End-to-end #2041
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: End-to-end | |
on: | |
schedule: | |
# Run at 4 and 10 GMT, basically "early morning" and "lunch" | |
- cron: "0 4,10 * * *" | |
jobs: | |
build-apps-image: | |
runs-on: ubuntu-latest | |
steps: | |
- name: Checkout compliantkubernetes-apps | |
uses: actions/checkout@v3 | |
with: | |
repository: elastisys/compliantkubernetes-apps | |
ref: main | |
- name: Log in to the Container registry | |
uses: docker/login-action@v2 | |
with: | |
registry: ghcr.io | |
username: ${{ github.actor }} | |
password: ${{ secrets.GITHUB_TOKEN }} | |
- name: Extract metadata (tags, labels) for Docker | |
id: meta | |
uses: docker/metadata-action@v4 | |
with: | |
images: ghcr.io/elastisys/compliantkubernetes-apps-pipeline | |
- name: Set up Docker Buildx | |
uses: docker/setup-buildx-action@v2 | |
- name: Build and push Docker images | |
uses: docker/build-push-action@v3 | |
with: | |
context: pipeline | |
push: true | |
tags: ghcr.io/elastisys/compliantkubernetes-apps-pipeline:${{ github.sha }} | |
labels: ${{ steps.meta.outputs.labels }} | |
cache-to: type=gha,mode=max | |
cache-from: type=gha | |
apps: | |
needs: [build-apps-image] | |
runs-on: ubuntu-latest | |
container: ghcr.io/elastisys/compliantkubernetes-apps-pipeline:${{ github.sha }} | |
strategy: | |
fail-fast: false | |
matrix: | |
cluster: | |
- sc | |
- wc | |
env: | |
EXOSCALE_API_KEY: ${{ secrets.EXOSCALE_API_KEY }} | |
EXOSCALE_API_SECRET: ${{ secrets.EXOSCALE_API_SECRET }} | |
CK8S_CONFIG_PATH: ./apps/pipeline/config/exoscale | |
CK8S_AUTO_APPROVE: true | |
steps: | |
- name: Checkout ck8s-kubespray | |
uses: actions/checkout@v3 | |
with: | |
repository: elastisys/compliantkubernetes-kubespray | |
# TODO: Should we use a specific commit here? | |
ref: main | |
submodules: recursive | |
path: compliantkubernetes-kubespray | |
- name: Checkout compliantkubernetes-apps | |
uses: actions/checkout@v3 | |
with: | |
repository: elastisys/compliantkubernetes-apps | |
ref: main | |
path: apps | |
fetch-depth: 0 # Fetches all tags | |
- name: Generate and insert public SSH key | |
run: | | |
ssh-keygen -q -t rsa -N "" -f apps/pipeline/config/exoscale/id_rsa | |
sed -i "s!PUBLIC_SSH_KEY_HERE!$(cat apps/pipeline/config/exoscale/id_rsa.pub)!g" apps/pipeline/config/exoscale/pipeline-${{ matrix.cluster }}-config/default.tfvars | |
- name: Init terraform | |
uses: docker://hashicorp/terraform:1.2.9 | |
with: | |
args: -chdir="/github/workspace/compliantkubernetes-kubespray/kubespray/contrib/terraform/exoscale" init | |
# Note: This container doesn't run bash, so we cannot use env vars in the arguments | |
- name: Run terraform | |
continue-on-error: true | |
id: runterraform0 | |
uses: docker://hashicorp/terraform:1.2.9 | |
with: | |
args: -chdir="/github/workspace/compliantkubernetes-kubespray/kubespray/contrib/terraform/exoscale" apply -auto-approve -var-file /github/workspace/apps/pipeline/config/exoscale/pipeline-${{ matrix.cluster }}-config/default.tfvars -state /github/workspace/apps/pipeline/config/exoscale/pipeline-${{ matrix.cluster }}-config/terraform.tfstate | |
- name: Run terraform - retry 1 | |
id: runterraform1 | |
if: steps.runterraform0.outcome=='failure' | |
uses: docker://hashicorp/terraform:1.2.9 | |
with: | |
args: -chdir="/github/workspace/compliantkubernetes-kubespray/kubespray/contrib/terraform/exoscale" apply -auto-approve -var-file /github/workspace/apps/pipeline/config/exoscale/pipeline-${{ matrix.cluster }}-config/default.tfvars -state /github/workspace/apps/pipeline/config/exoscale/pipeline-${{ matrix.cluster }}-config/terraform.tfstate | |
- name: Prepare DNS | |
if: matrix.cluster == 'sc' | |
run: | | |
ingress_controller_lb_ip_address="$(jq -r .outputs.ingress_controller_lb_ip_address.value $CK8S_CONFIG_PATH/pipeline-${{ matrix.cluster }}-config/terraform.tfstate)" | |
sed -i "s/ip-address/${ingress_controller_lb_ip_address}/g" apps/pipeline/config/exoscale/dns-${{ matrix.cluster }}.json | |
sed -i "s/ACTION/CREATE/g" apps/pipeline/config/exoscale/dns-${{ matrix.cluster }}.json | |
cat apps/pipeline/config/exoscale/dns-${{ matrix.cluster }}.json | |
- name: Create DNS records | |
if: matrix.cluster == 'sc' | |
uses: docker://amazon/aws-cli:2.1.26 | |
env: | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_DNS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_DNS_SECRET_ACCESS_KEY }} | |
# See https://github.com/aws/aws-cli/issues/5262 | |
AWS_EC2_METADATA_DISABLED: true | |
with: | |
args: route53 change-resource-record-sets --hosted-zone-id Z2STJRQSJO5PZ0 --change-batch file://apps/pipeline/config/exoscale/dns-${{ matrix.cluster }}.json | |
- name: Run kubespray | |
continue-on-error: true | |
id: runkubespray0 | |
uses: docker://quay.io/kubespray/kubespray:v2.23.0 | |
env: | |
ANSIBLE_CONFIG: ./compliantkubernetes-kubespray/kubespray/ansible.cfg | |
with: | |
args: /bin/bash -c "cd compliantkubernetes-kubespray/kubespray && ansible-playbook -i ../../$CK8S_CONFIG_PATH/pipeline-${{ matrix.cluster }}-config/inventory.ini --private-key ../../$CK8S_CONFIG_PATH/id_rsa --become --become-user=root cluster.yml" | |
- name: Run kubespray - retry 1 | |
id: runkubespray1 | |
if: steps.runkubespray0.outcome=='failure' | |
uses: docker://quay.io/kubespray/kubespray:v2.23.0 | |
env: | |
ANSIBLE_CONFIG: ./compliantkubernetes-kubespray/kubespray/ansible.cfg | |
with: | |
args: /bin/bash -c "cd compliantkubernetes-kubespray/kubespray && ansible-playbook -i ../../$CK8S_CONFIG_PATH/pipeline-${{ matrix.cluster }}-config/inventory.ini --private-key ../../$CK8S_CONFIG_PATH/id_rsa --become --become-user=root cluster.yml" | |
- name: Import PGP key and configure GPG agent | |
run: ./apps/pipeline/setup-pgp.bash | |
env: | |
PGP_KEY: ${{ secrets.PGP_KEY }} | |
PGP_PASSPHRASE: ${{ secrets.PGP_PASSPHRASE }} | |
- name: Prepare for apps | |
env: | |
S3_ACCESS_KEY: ${{ secrets.EXOSCALE_API_KEY }} | |
S3_SECRET_KEY: ${{ secrets.EXOSCALE_API_SECRET }} | |
run: | | |
# Set public LB IP in the kubeconfig | |
control_plane_lb_ip_address="$(jq -r .outputs.control_plane_lb_ip_address.value $CK8S_CONFIG_PATH/pipeline-${{ matrix.cluster }}-config/terraform.tfstate)" | |
yq write --inplace "$CK8S_CONFIG_PATH/pipeline-${{ matrix.cluster }}-config/artifacts/admin.conf" clusters[0].cluster.server https://${control_plane_lb_ip_address}:6443 | |
# Copy to correct location and encrypt | |
mkdir "$CK8S_CONFIG_PATH/.state" | |
cp "$CK8S_CONFIG_PATH/pipeline-${{ matrix.cluster }}-config/artifacts/admin.conf" "$CK8S_CONFIG_PATH/.state/kube_config_${{ matrix.cluster }}.yaml" | |
sops --config "$CK8S_CONFIG_PATH/.sops.yaml" -e -i "$CK8S_CONFIG_PATH/.state/kube_config_${{ matrix.cluster }}.yaml" | |
# Encrypt and set necessary secrets | |
sops --config "$CK8S_CONFIG_PATH/.sops.yaml" -e -i "$CK8S_CONFIG_PATH/secrets.yaml" | |
sops --config "$CK8S_CONFIG_PATH/.sops.yaml" --set "[\"objectStorage\"] {\"s3\": {\"accessKey\": \"${S3_ACCESS_KEY}\", \"secretKey\": \"${S3_SECRET_KEY}\"}}" "$CK8S_CONFIG_PATH/secrets.yaml" | |
- name: Initialize apps | |
env: | |
CK8S_ENVIRONMENT_NAME: pipeline-exoscale | |
CK8S_CLOUD_PROVIDER: exoscale | |
CK8S_FLAVOR: dev | |
run: ./apps/bin/ck8s init both | |
id: initialize-apps | |
- name: Create buckets | |
if: matrix.cluster == 'sc' | |
env: | |
EXOSCALE_API_KEY: ${{ secrets.EXOSCALE_API_KEY }} | |
EXOSCALE_API_SECRET: ${{ secrets.EXOSCALE_API_SECRET }} | |
run: | | |
./apps/scripts/S3/generate-s3cfg.sh exoscale "${EXOSCALE_API_KEY}" "${EXOSCALE_API_SECRET}" sos-ch-gva-2.exo.io ch-gva-2 > s3cfg.ini | |
./apps/scripts/S3/entry.sh --s3cfg s3cfg.ini create | |
- name: Bootstrap rook | |
env: | |
KUBECONFIG: /__w/compliantkubernetes-apps/compliantkubernetes-apps/apps/pipeline/config/exoscale/pipeline-${{ matrix.cluster }}-config/artifacts/admin.conf | |
CK8S_CONFIG_PATH: /__w/compliantkubernetes-apps/compliantkubernetes-apps/apps/pipeline/config/exoscale | |
CK8S_APPS_PIPELINE: "true" | |
CLUSTER: ${{ matrix.cluster }} | |
run: | | |
./scripts/pipeline/bootstrap-rook.sh | |
working-directory: ./compliantkubernetes-kubespray/rook | |
id: bootstrap-rook | |
- name: Test rook | |
env: | |
KUBECONFIG: /__w/compliantkubernetes-apps/compliantkubernetes-apps/apps/pipeline/config/exoscale/pipeline-${{ matrix.cluster }}-config/artifacts/admin.conf | |
CK8S_CONFIG_PATH: /__w/compliantkubernetes-apps/compliantkubernetes-apps/apps/pipeline/config/exoscale | |
CK8S_APPS_PIPELINE: "true" | |
CLUSTER: ${{ matrix.cluster }} | |
run: | | |
./scripts/pipeline/wait-for-rook.sh ${{ matrix.cluster }} | |
working-directory: ./compliantkubernetes-kubespray/rook | |
id: test-rook | |
- name: Gather configs | |
if: always() && steps.initialize-apps.outcome == 'success' && matrix.cluster == 'sc' | |
shell: bash | |
working-directory: ./apps/pipeline/config/exoscale | |
run: | | |
cp defaults/common-config.yaml common-defaults.yaml | |
cp defaults/sc-config.yaml sc-defaults.yaml | |
cp defaults/wc-config.yaml wc-defaults.yaml | |
tar -czf configs.tar.gz common-defaults.yaml common-config.yaml sc-defaults.yaml sc-config.yaml wc-defaults.yaml wc-config.yaml | |
id: gather-configs | |
- name: Upload configs | |
if: always() && steps.gather-configs.outcome == 'success' | |
uses: actions/upload-artifact@v3 | |
with: | |
name: compliantkubernetes-apps-pipeline-config | |
path: ./apps/pipeline/config/exoscale/configs.tar.gz | |
- name: Install apps | |
if: steps.bootstrap-rook.outcome == 'success' && steps.test-rook.outcome == 'success' | |
run: ./apps/pipeline/apply-${{ matrix.cluster }}.bash | |
shell: bash | |
id: install-apps | |
- name: Wait for OpenSearch to be ready | |
if: steps.install-apps.outcome == 'success' && matrix.cluster == 'wc' | |
run: ./apps/pipeline/opensearch.bash | |
continue-on-error: true | |
shell: bash | |
id: opensearch | |
- name: Test apps | |
if: always() && (steps.install-apps.outcome != 'cancelled' && steps.install-apps.outcome != 'skipped') | |
run: ./apps/bin/ck8s test ${{ matrix.cluster }} apps --logging-enabled | |
env: | |
PIPELINE: "true" | |
id: test-apps | |
- name: Finalise deploy rook | |
env: | |
KUBECONFIG: /__w/compliantkubernetes-apps/compliantkubernetes-apps/apps/pipeline/config/exoscale/pipeline-${{ matrix.cluster }}-config/artifacts/admin.conf | |
CK8S_CONFIG_PATH: /__w/compliantkubernetes-apps/compliantkubernetes-apps/apps/pipeline/config/exoscale | |
CK8S_APPS_PIPELINE: "true" | |
CLUSTER: ${{ matrix.cluster }} | |
run: | | |
./scripts/pipeline/finalise-deploy-rook.sh | |
working-directory: ./compliantkubernetes-kubespray/rook | |
id: finalise-rook | |
- name: Upload logs | |
if: steps.install-apps.outcome == 'failure' || steps.test-apps.outcome == 'failure' | |
uses: actions/upload-artifact@v3 | |
with: | |
name: compliantkubernetes-apps-logs | |
path: ./logs | |
- name: Upload events | |
if: steps.install-apps.outcome == 'failure' || steps.test-apps.outcome == 'failure' | |
uses: actions/upload-artifact@v3 | |
with: | |
name: compliantkubernetes-apps-events | |
path: ./events | |
- name: Upload rook-ceph logs | |
if: steps.finalise-rook.outcome == 'failure' || steps.bootstrap-rook.outcome == 'failure' || steps.test-rook.outcome == 'failure' || steps.install-apps.outcome == 'failure' | |
uses: actions/upload-artifact@v3 | |
with: | |
name: rook-ceph-logs | |
path: ./compliantkubernetes-kubespray/rook/logs | |
- name: Upload rook-ceph events | |
if: steps.finalise-rook.outcome == 'failure' || steps.bootstrap-rook.outcome == 'failure' || steps.test-rook.outcome == 'failure' || steps.install-apps.outcome == 'failure' | |
uses: actions/upload-artifact@v3 | |
with: | |
name: rook-ceph-events | |
path: ./compliantkubernetes-kubespray/rook/events | |
- name: Destroy terraform | |
continue-on-error: true | |
id: destroyterraform0 | |
if: always() | |
uses: docker://hashicorp/terraform:1.2.9 | |
with: | |
args: -chdir="/github/workspace/compliantkubernetes-kubespray/kubespray/contrib/terraform/exoscale" destroy -auto-approve -var-file /github/workspace/apps/pipeline/config/exoscale/pipeline-${{ matrix.cluster }}-config/default.tfvars -state /github/workspace/apps/pipeline/config/exoscale/pipeline-${{ matrix.cluster }}-config/terraform.tfstate | |
- name: Destroy terraform - retry 1 | |
id: destroyterraform1 | |
if: always() && steps.destroyterraform0.outcome=='failure' | |
uses: docker://hashicorp/terraform:1.2.9 | |
with: | |
args: -chdir="/github/workspace/compliantkubernetes-kubespray/kubespray/contrib/terraform/exoscale" destroy -auto-approve -var-file /github/workspace/apps/pipeline/config/exoscale/pipeline-${{ matrix.cluster }}-config/default.tfvars -state /github/workspace/apps/pipeline/config/exoscale/pipeline-${{ matrix.cluster }}-config/terraform.tfstate | |
- name: Prepare DNS cleanup | |
if: always() && (matrix.cluster == 'sc') | |
run: | | |
sed -i "s/CREATE/DELETE/g" apps/pipeline/config/exoscale/dns-${{ matrix.cluster }}.json | |
cat apps/pipeline/config/exoscale/dns-${{ matrix.cluster }}.json | |
- name: Delete DNS records | |
if: always() && (matrix.cluster == 'sc') | |
uses: docker://amazon/aws-cli:2.1.26 | |
env: | |
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_DNS_ACCESS_KEY_ID }} | |
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_DNS_SECRET_ACCESS_KEY }} | |
AWS_EC2_METADATA_DISABLED: true | |
with: | |
args: route53 change-resource-record-sets --hosted-zone-id Z2STJRQSJO5PZ0 --change-batch file://apps/pipeline/config/exoscale/dns-${{ matrix.cluster }}.json | |
- name: Delete buckets | |
if: always() && (matrix.cluster == 'sc') | |
env: | |
EXOSCALE_API_KEY: ${{ secrets.EXOSCALE_API_KEY }} | |
EXOSCALE_API_SECRET: ${{ secrets.EXOSCALE_API_SECRET }} | |
run: | | |
./apps/scripts/S3/entry.sh --s3cfg s3cfg.ini delete | |
clean-apps-image: | |
needs: apps | |
if: always() | |
runs-on: ubuntu-latest | |
steps: | |
- name: Checkout compliantkubernetes-apps | |
uses: actions/checkout@v3 | |
with: | |
repository: elastisys/compliantkubernetes-apps | |
ref: main | |
- name: Cleanup docker image | |
run: ./pipeline/cleanup-docker-image.bash | |
env: | |
GITHUB_ACTOR: ${{ github.actor }} | |
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
alert: | |
runs-on: ubuntu-latest | |
needs: [clean-apps-image] | |
if: always() && github.ref == 'refs/heads/main' # Make sure this runs even if some jobs fail, and only on main | |
steps: | |
- id: get-sha | |
run: | | |
echo "sha=$( curl https://api.github.com/repos/elastisys/compliantkubernetes-apps/git/ref/heads/main | jq .object.sha | tr -d '"')" >> $GITHUB_OUTPUT | |
- id: get-sha-short | |
run: | | |
echo "sha=$( echo ${{ steps.get-sha.outputs.sha }} | cut -c -8)" >> $GITHUB_OUTPUT | |
- id: result | |
run: | | |
curl -H "Accept: application/vnd.github.v3+json" https://api.github.com/repos/${{ github.repository }}/actions/runs/${{ github.run_id }}/jobs \ | |
--header 'Authorization: token ${{ secrets.GITHUB_TOKEN }}' \ | |
--header 'Accept: application/vnd.github.v3+json' > response.json | |
cat response.json | jq -r '.jobs[] | select(.conclusion=="failure") | .id' > jobs.failed | |
if [ -s jobs.failed ] | |
then | |
while read i | |
do | |
cat response.json | jq --arg i $i -r '[.jobs[] | select(.id=='$i') | "The JOB: ", .name, " failed at STEPS: ", ([(.steps[] | select(.conclusion=="failure") | .name)] | join("; "))] | join("")' >> steps.failed | |
done < jobs.failed | |
export RESULT="$(cat steps.failed)" | |
else | |
export RESULT="All good." | |
fi | |
RESULT="${RESULT//'%'/'%25'}" | |
RESULT="${RESULT//$'\n'/'%0A'}" | |
RESULT="${RESULT//$'\r'/'%0D'}" | |
echo "result=$(echo "$RESULT")" >> $GITHUB_OUTPUT | |
- uses: technote-space/workflow-conclusion-action@v1 | |
- name: Send Slack alert | |
uses: elastisys/action-slack@v3 | |
with: | |
status: custom | |
fields: repo,message,commit,author,action,workflow,job,took,ref | |
custom_payload: | | |
{ | |
attachments: [{ | |
color: `${process.env.WORKFLOW_CONCLUSION}` === 'success' ? 'good' : `${process.env.WORKFLOW_CONCLUSION}` === 'failure' ? 'danger' : 'warning', | |
text: `Pipeline: ${process.env.AS_WORKFLOW}\n\ncompliantkubernetes-apps commit: <https://github.com/elastisys/compliantkubernetes-apps/commit/${{ steps.get-sha.outputs.sha }}|${{ steps.get-sha-short.outputs.sha }}>\n\n Status: ${process.env.WORKFLOW_CONCLUSION}\nTook: ${process.env.AS_TOOK}\nDetails: ${{ steps.result.outputs.result }}`, | |
}] | |
} | |
env: | |
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK }} # required | |
if: always() # Notify for both successes and failures |