Prepare for v0.16.21 release #11538
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: EKS (ENI) | |
# Any change in triggers needs to be reflected in the concurrency group. | |
on: | |
### FOR TESTING PURPOSES | |
# This workflow runs in the context of `main`, and ignores changes to | |
# workflow files in PRs. For testing changes to this workflow from a PR: | |
# - Make sure the PR uses a branch from the base repository (requires write | |
# privileges). It will not work with a branch from a fork (missing secrets). | |
# - Uncomment the `pull_request` event below, commit separately with a `DO | |
# NOT MERGE` message, and push to the PR. As long as the commit is present, | |
# any push to the PR will trigger this workflow. | |
# - Don't forget to remove the `DO NOT MERGE` commit once satisfied. The run | |
# will disappear from the PR checks: please provide a direct link to the | |
# successful workflow run (can be found from Actions tab) in a comment. | |
# | |
pull_request: {} | |
### | |
# pull_request_target: {} | |
# Run every 6 hours | |
schedule: | |
- cron: '0 1/6 * * *' | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.event.pull_request.number || 'scheduled' }} | |
cancel-in-progress: true | |
env: | |
region: us-east-2 | |
eksctl_version: v0.147.0 | |
# renovate: datasource=github-releases depName=cilium/cilium | |
cilium_version: v1.16.4 | |
kubectl_version: v1.23.6 | |
jobs: | |
installation-and-connectivity: | |
name: EKS Installation and Connectivity Test | |
if: ${{ github.repository == 'cilium/cilium-cli' }} | |
runs-on: ubuntu-24.04 | |
permissions: | |
# To be able to request the JWT from GitHub's OIDC provider | |
id-token: write | |
timeout-minutes: 60 | |
steps: | |
- name: Set cluster name | |
run: | | |
echo "clusterName=${{ github.repository_owner }}-${{ github.event.repository.name }}-${{ github.run_id }}-${{ github.run_attempt }}" >> $GITHUB_ENV | |
- name: Checkout | |
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 | |
- name: Install kubectl | |
run: | | |
curl -sLO "https://dl.k8s.io/release/${{ env.kubectl_version }}/bin/linux/amd64/kubectl" | |
curl -sLO "https://dl.k8s.io/${{ env.kubectl_version }}/bin/linux/amd64/kubectl.sha256" | |
echo "$(cat kubectl.sha256) kubectl" | sha256sum --check | |
sudo install -o root -g root -m 0755 kubectl /usr/local/bin/kubectl | |
kubectl version --client | |
- name: Install eksctl CLI | |
run: | | |
curl -LO "https://github.com/weaveworks/eksctl/releases/download/${{ env.eksctl_version }}/eksctl_$(uname -s)_amd64.tar.gz" | |
sudo tar xzvfC eksctl_$(uname -s)_amd64.tar.gz /usr/bin | |
rm eksctl_$(uname -s)_amd64.tar.gz | |
- name: Set up AWS CLI credentials | |
uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2 | |
with: | |
role-to-assume: ${{ secrets.AWS_PR_ASSUME_ROLE }} | |
aws-region: ${{ env.region }} | |
- name: Run aws configure | |
run: | | |
aws configure set aws_access_key_id ${{ env.AWS_ACCESS_KEY_ID }} | |
aws configure set aws_secret_access_key ${{ env.AWS_SECRET_ACCESS_KEY }} | |
aws configure set aws_session_token ${{ env.AWS_SESSION_TOKEN }} | |
aws configure set default.region ${{ env.AWS_REGION }} | |
- name: Set up job variables | |
id: vars | |
run: | | |
env | |
if [ ${{ github.event.issue.pull_request || github.event.pull_request }} ]; then | |
PR_API_JSON=$(curl \ | |
-H "Accept: application/vnd.github.v3+json" \ | |
-H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" \ | |
${{ github.event.issue.pull_request.url || github.event.pull_request.url }}) | |
SHA=$(echo "$PR_API_JSON" | jq -r ".head.sha") | |
OWNER=$(echo "$PR_API_JSON" | jq -r ".number") | |
else | |
SHA=${{ github.sha }} | |
OWNER=${{ github.sha }} | |
fi | |
echo "sha=${SHA}" >> $GITHUB_OUTPUT | |
echo "owner=${OWNER}" >> $GITHUB_OUTPUT | |
- name: Create EKS cluster | |
run: | | |
cat <<EOF > eks-config.yaml | |
apiVersion: eksctl.io/v1alpha5 | |
kind: ClusterConfig | |
metadata: | |
name: ${{ env.clusterName }} | |
region: ${{ env.region }} | |
tags: | |
usage: "${{ github.repository_owner }}-${{ github.event.repository.name }}" | |
owner: "${{ steps.vars.outputs.owner }}" | |
managedNodeGroups: | |
- name: ng-1 | |
instanceTypes: | |
- t3.medium | |
- t3a.medium | |
desiredCapacity: 2 | |
spot: true | |
privateNetworking: true | |
volumeType: "gp3" | |
volumeSize: 10 | |
taints: | |
- key: "node.cilium.io/agent-not-ready" | |
value: "true" | |
effect: "NoExecute" | |
EOF | |
eksctl create cluster -f ./eks-config.yaml | |
- name: Install Cilium CLI | |
uses: ./ | |
with: | |
skip-build: 'true' | |
image-tag: ${{ steps.vars.outputs.sha }} | |
- name: Install Cilium and run tests | |
timeout-minutes: 30 | |
run: | | |
# Install Cilium | |
cilium install \ | |
--version "${{ env.cilium_version }}" \ | |
--set cluster.name="${{ env.clusterName }}" \ | |
--wait=false \ | |
--set loadBalancer.l7.backend=envoy \ | |
--set tls.secretsBackend=k8s \ | |
--set bpf.monitorAggregation=none \ | |
--set socketLB.tracing=false | |
# Enable Relay | |
cilium hubble enable | |
# Wait for cilium and hubble relay to be ready | |
# NB: necessary to work against occasional flakes due to https://github.com/cilium/cilium-cli/issues/918 | |
cilium status --wait | |
# Make sure the 'aws-node' DaemonSet exists but has no scheduled pods | |
[[ $(kubectl -n kube-system get ds/aws-node -o jsonpath='{.status.currentNumberScheduled}') == 0 ]] | |
# Port forward Relay | |
cilium hubble port-forward& | |
sleep 10s | |
nc -nvz 127.0.0.1 4245 | |
# Run connectivity test | |
cilium connectivity test --test-concurrency=3 --all-flows --collect-sysdump-on-failure --external-target amazon.com. | |
kubectl delete namespace -l "app.kubernetes.io/name=cilium-cli" | |
# Run performance test | |
cilium connectivity perf --duration 1s | |
- name: Post-test information gathering | |
if: ${{ !success() }} | |
run: | | |
echo "=== Retrieve cluster state ===" | |
kubectl get pods --all-namespaces -o wide | |
cilium status | |
cilium sysdump --output-filename cilium-sysdump-out | |
shell: bash {0} # Disable default fail-fast behaviour so that all commands run independently | |
- name: Uninstall and make sure the 'aws-node' DaemonSet blocking nodeSelector was removed | |
if: ${{ success() }} | |
timeout-minutes: 5 | |
run: | | |
cilium uninstall --wait | |
# Make sure the 'aws-node' DaemonSet blocking nodeSelector was removed | |
[[ ! $(kubectl -n kube-system get ds/aws-node -o jsonpath="{.spec.template.spec.nodeSelector['io\.cilium/aws-node-enabled']}") ]] | |
- name: Clean up EKS | |
if: ${{ always() }} | |
run: | | |
eksctl delete cluster --name ${{ env.clusterName }} | |
shell: bash {0} # Disable default fail-fast behaviour so that all commands run independently | |
- name: Upload artifacts | |
if: ${{ !success() }} | |
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 | |
with: | |
name: cilium-sysdump-out.zip | |
path: cilium-sysdump-out.zip | |
retention-days: 5 |