diff --git a/.dockerignore b/.dockerignore index 20691a16..f98de6cc 100644 --- a/.dockerignore +++ b/.dockerignore @@ -5,8 +5,18 @@ docs /infrastructure /blueprint test +/tools/deployment-cli-tools .github .git .vscode /deployment -skaffold.yaml \ No newline at end of file +skaffold.yaml +*.egg-info +__pycache__ +.hypothesis +.coverage +.pytest_cache +/application-templates +/deployment-configuration +/cloud-harness +.openapi-generator \ No newline at end of file diff --git a/.gitignore b/.gitignore index 93e1eff4..90f643c0 100644 --- a/.gitignore +++ b/.gitignore @@ -16,4 +16,5 @@ skaffold.yaml .overrides /deployment.yaml .hypothesis -__pycache__ \ No newline at end of file +__pycache__ +.env \ No newline at end of file diff --git a/applications/jupyterhub/src/harness_jupyter/harness_jupyter/jupyterhub.py b/applications/jupyterhub/src/harness_jupyter/harness_jupyter/jupyterhub.py index c2fdc99e..2306cc16 100644 --- a/applications/jupyterhub/src/harness_jupyter/harness_jupyter/jupyterhub.py +++ b/applications/jupyterhub/src/harness_jupyter/harness_jupyter/jupyterhub.py @@ -1,5 +1,3 @@ -import imp -import logging import sys import urllib.parse @@ -8,11 +6,9 @@ from cloudharness.applications import get_configuration from cloudharness.auth.quota import get_user_quotas from cloudharness.utils.config import CloudharnessConfig as conf +from cloudharness import log as logging, set_debug - -handler = logging.StreamHandler(sys.stdout) -handler.setLevel(logging.DEBUG) -logging.getLogger().addHandler(handler) +set_debug() def custom_options_form(spawner, abc): # let's skip the profile selection form for now @@ -104,7 +100,7 @@ def change_pod_manifest(self: KubeSpawner): # check user quotas application_config = get_configuration("jupyterhub") - + logging.info("Cloudharness: changing pod manifest") user_quotas = get_user_quotas( application_config=application_config, user_id=self.user.name) @@ -131,6 +127,7 @@ def change_pod_manifest(self: KubeSpawner): if 'subdomain' in harness and harness['subdomain'] == subdomain: ws_image = getattr(self, "ws_image", None) + logging.info("Subdomain is", subdomain) if ws_image: # try getting the image + tag from values.yaml ch_conf = conf.get_configuration() @@ -144,17 +141,20 @@ def change_pod_manifest(self: KubeSpawner): else: if app['name'] != 'jupyterhub': # Would use the hub image in that case, which we don't want. ws_image = harness['deployment']['image'] + logging.info(f'Use spacific app image: {ws_image}') if ws_image: - logging.info(f'Change image to {ws_image}') + logging.info(f'Use image: {ws_image}') self.image = ws_image if registry['name'] in self.image and registry['secret']: self.image_pull_secrets = registry['secret'] if 'jupyterhub' in harness and harness['jupyterhub']: if 'args' in harness['jupyterhub']: + logging.info("Setting custom args") self.args = harness['jupyterhub']['args'] if harness['jupyterhub'].get('mountUserVolume', True): + logging.info("Setting user volume affinity") set_user_volume_affinity(self) else: self.volume_mounts = [] @@ -164,15 +164,18 @@ def change_pod_manifest(self: KubeSpawner): self.http_timeout = 60 * 5 # 5 minutes if 'spawnerExtraConfig' in harness['jupyterhub']: + logging.info("Setting custom spawner config") try: for k, v in harness['jupyterhub']['spawnerExtraConfig'].items(): if k != 'node_selectors': + logging.info(f"Setting {k} to {v}") setattr(self, k, v) # check if there are node selectors, if so apply them to the pod node_selectors = harness['jupyterhub']['spawnerExtraConfig'].get('node_selectors') if node_selectors: for node_selector in node_selectors: + logging.info("Setting node selector", node_selector["key"]) ns = dict( matchExpressions=[ dict( @@ -200,6 +203,7 @@ def change_pod_manifest(self: KubeSpawner): logging.error("Error loading Spawner extra configuration", exc_info=True) # set user quota cpu/mem usage if value has a "value" else don't change the value + logging.info("Setting user quota cpu/mem usage") set_key_value(self, key="cpu_guarantee", value=user_quotas.get("quota-ws-guaranteecpu")) set_key_value(self, key="cpu_limit", value=user_quotas.get("quota-ws-maxcpu")) set_key_value(self, key="mem_guarantee", value=user_quotas.get("quota-ws-guaranteemem"), unit="G") @@ -215,6 +219,7 @@ def change_pod_manifest(self: KubeSpawner): # this will execute jupyter.change_pod_manifest(self=self) if 'applicationHook' in harness['jupyterhub']: func_name = harness['jupyterhub']['applicationHook'].split('.') + logging.info(f"Executing application hook {func_name}") module = __import__('.'.join(func_name[:-1])) f = getattr(module, func_name[-1]) f(self=self) diff --git a/deployment-configuration/codefresh-build-template.yaml b/deployment-configuration/codefresh-build-template.yaml index c225d623..f3e4377c 100644 --- a/deployment-configuration/codefresh-build-template.yaml +++ b/deployment-configuration/codefresh-build-template.yaml @@ -1,10 +1,8 @@ type: build stage: build -tag: '${{CF_BUILD_ID}}' dockerfile: Dockerfile registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - TAG=${{CF_BUILD_ID}} - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} diff --git a/deployment-configuration/codefresh-template-dev.yaml b/deployment-configuration/codefresh-template-dev.yaml index fc450345..e11bcf6d 100644 --- a/deployment-configuration/codefresh-template-dev.yaml +++ b/deployment-configuration/codefresh-template-dev.yaml @@ -33,7 +33,9 @@ steps: working_directory: . commands: - bash cloud-harness/install.sh - - harness-deployment $PATHS -t ${{CF_BUILD_ID}} -d ${{DOMAIN}} -r ${{REGISTRY}} -rs ${{REGISTRY_SECRET}} -n ${{NAMESPACE}} -e $ENV $PARAMS + - harness-deployment $PATHS -d ${{DOMAIN}} -r ${{REGISTRY}} -rs ${{REGISTRY_SECRET}} -n ${{NAMESPACE}} --write-env -e $ENV $PARAMS + - cat deployment/.env >> ${{CF_VOLUME_PATH}}/env_vars_to_export + - cat ${{CF_VOLUME_PATH}}/env_vars_to_export prepare_deployment_view: commands: - 'helm template ./deployment/helm --debug -n ${{NAMESPACE}}' @@ -73,7 +75,7 @@ steps: kube_context: ${{CLUSTER_NAME}} namespace: ${{NAMESPACE}} chart_version: ${{CF_BUILD_ID}} - cmd_ps: --wait --timeout 600s + cmd_ps: --wait --timeout 600s --create-namespace custom_value_files: - ./deployment/helm/values.yaml build_test_images: diff --git a/deployment-configuration/codefresh-template-test.yaml b/deployment-configuration/codefresh-template-test.yaml index 1b04d326..6511924c 100644 --- a/deployment-configuration/codefresh-template-test.yaml +++ b/deployment-configuration/codefresh-template-test.yaml @@ -32,7 +32,9 @@ steps: working_directory: . commands: - bash cloud-harness/install.sh - - harness-deployment $PATHS -n test-${{NAMESPACE_BASENAME}}-${{CF_SHORT_REVISION}} -t ${{CF_BUILD_ID}} -d ${{CF_SHORT_REVISION}}.${{DOMAIN}} -r ${{REGISTRY}} -rs ${{REGISTRY_SECRET}} -e $ENV -N $PARAMS + - harness-deployment $PATHS -n test-${{NAMESPACE_BASENAME}}-${{CF_SHORT_REVISION}} -d ${{CF_SHORT_REVISION}}.${{DOMAIN}} -r ${{REGISTRY}} -rs ${{REGISTRY_SECRET}} -e $ENV --write-env -N $PARAMS + - cat deployment/.env >> ${{CF_VOLUME_PATH}}/env_vars_to_export + - cat ${{CF_VOLUME_PATH}}/env_vars_to_export prepare_deployment_view: commands: - "helm template ./deployment/helm --debug -n test-${{NAMESPACE_BASENAME}}-${{CF_SHORT_REVISION}}" @@ -60,13 +62,6 @@ steps: stage: unittest type: parallel steps: {} - prepare_cluster: - stage: deploy - image: codefresh/kubectl - fail_fast: false - commands: - - kubectl config use-context ${{CLUSTER_NAME}} - - kubectl create ns test-${{NAMESPACE_BASENAME}}-${{CF_SHORT_REVISION}} deployment: stage: deploy type: helm @@ -79,7 +74,7 @@ steps: kube_context: ${{CLUSTER_NAME}} namespace: test-${{NAMESPACE_BASENAME}}-${{CF_SHORT_REVISION}} chart_version: ${{CF_BUILD_ID}} - cmd_ps: --timeout 600s + cmd_ps: --timeout 600s --create-namespace custom_value_files: - ./deployment/helm/values.yaml build_test_images: diff --git a/deployment/Dockerfile b/deployment/Dockerfile new file mode 100644 index 00000000..05f1e561 --- /dev/null +++ b/deployment/Dockerfile @@ -0,0 +1,2 @@ +FROM codefresh/cf-docker-puller +RUN docker images diff --git a/deployment/codefresh-test.yaml b/deployment/codefresh-test.yaml index e327e359..131065d0 100644 --- a/deployment/codefresh-test.yaml +++ b/deployment/codefresh-test.yaml @@ -33,8 +33,10 @@ steps: commands: - bash cloud-harness/install.sh - harness-deployment . -n test-${{NAMESPACE_BASENAME}}-${{CF_SHORT_REVISION}} - -t ${{CF_BUILD_ID}} -d ${{CF_SHORT_REVISION}}.${{DOMAIN}} -r ${{REGISTRY}} -rs - ${{REGISTRY_SECRET}} -e test -N -i samples + -d ${{CF_SHORT_REVISION}}.${{DOMAIN}} -r ${{REGISTRY}} -rs ${{REGISTRY_SECRET}} + -e test --write-env -N -i samples + - cat deployment/.env >> ${{CF_VOLUME_PATH}}/env_vars_to_export + - cat ${{CF_VOLUME_PATH}}/env_vars_to_export prepare_deployment_view: commands: - helm template ./deployment/helm --debug -n test-${{NAMESPACE_BASENAME}}-${{CF_SHORT_REVISION}} @@ -52,33 +54,45 @@ steps: cloudharness-base: type: build stage: build - tag: '${{CF_BUILD_ID}}' dockerfile: infrastructure/base-images/cloudharness-base/Dockerfile registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - TAG=${{CF_BUILD_ID}} - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - REGISTRY=${{REGISTRY}}/cloudharness/ image_name: cloudharness/cloudharness-base title: Cloudharness base working_directory: ./. + tag: '${{CLOUDHARNESS_BASE_TAG}}' + when: + condition: + any: + buildDoesNotExist: includes('${{CLOUDHARNESS_BASE_TAG_EXISTS}}', '{{CLOUDHARNESS_BASE_TAG_EXISTS}}') + == true + forceNoCache: includes('${{CLOUDHARNESS_BASE_TAG_FORCE_BUILD}}', '{{CLOUDHARNESS_BASE_TAG_FORCE_BUILD}}') + == false cloudharness-frontend-build: type: build stage: build - tag: '${{CF_BUILD_ID}}' dockerfile: infrastructure/base-images/cloudharness-frontend-build/Dockerfile registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - TAG=${{CF_BUILD_ID}} - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - REGISTRY=${{REGISTRY}}/cloudharness/ image_name: cloudharness/cloudharness-frontend-build title: Cloudharness frontend build working_directory: ./. + tag: '${{CLOUDHARNESS_FRONTEND_BUILD_TAG}}' + when: + condition: + any: + buildDoesNotExist: includes('${{CLOUDHARNESS_FRONTEND_BUILD_TAG_EXISTS}}', + '{{CLOUDHARNESS_FRONTEND_BUILD_TAG_EXISTS}}') == true + forceNoCache: includes('${{CLOUDHARNESS_FRONTEND_BUILD_TAG_FORCE_BUILD}}', + '{{CLOUDHARNESS_FRONTEND_BUILD_TAG_FORCE_BUILD}}') == false build_static_images: title: Build static images type: parallel @@ -87,19 +101,25 @@ steps: cloudharness-flask: type: build stage: build - tag: '${{CF_BUILD_ID}}' dockerfile: Dockerfile registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - TAG=${{CF_BUILD_ID}} - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CF_BUILD_ID}} + - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} image_name: cloudharness/cloudharness-flask title: Cloudharness flask working_directory: ./infrastructure/common-images/cloudharness-flask + tag: '${{CLOUDHARNESS_FLASK_TAG}}' + when: + condition: + any: + buildDoesNotExist: includes('${{CLOUDHARNESS_FLASK_TAG_EXISTS}}', '{{CLOUDHARNESS_FLASK_TAG_EXISTS}}') + == true + forceNoCache: includes('${{CLOUDHARNESS_FLASK_TAG_FORCE_BUILD}}', '{{CLOUDHARNESS_FLASK_TAG_FORCE_BUILD}}') + == false build_application_images: type: parallel stage: build @@ -107,177 +127,243 @@ steps: nfsserver: type: build stage: build - tag: '${{CF_BUILD_ID}}' dockerfile: Dockerfile registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - TAG=${{CF_BUILD_ID}} - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - REGISTRY=${{REGISTRY}}/cloudharness/ image_name: cloudharness/nfsserver title: Nfsserver working_directory: ./applications/nfsserver + tag: '${{NFSSERVER_TAG}}' + when: + condition: + any: + buildDoesNotExist: includes('${{NFSSERVER_TAG_EXISTS}}', '{{NFSSERVER_TAG_EXISTS}}') + == true + forceNoCache: includes('${{NFSSERVER_TAG_FORCE_BUILD}}', '{{NFSSERVER_TAG_FORCE_BUILD}}') + == false accounts: type: build stage: build - tag: '${{CF_BUILD_ID}}' dockerfile: Dockerfile registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - TAG=${{CF_BUILD_ID}} - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - REGISTRY=${{REGISTRY}}/cloudharness/ image_name: cloudharness/accounts title: Accounts working_directory: ./applications/accounts + tag: '${{ACCOUNTS_TAG}}' + when: + condition: + any: + buildDoesNotExist: includes('${{ACCOUNTS_TAG_EXISTS}}', '{{ACCOUNTS_TAG_EXISTS}}') + == true + forceNoCache: includes('${{ACCOUNTS_TAG_FORCE_BUILD}}', '{{ACCOUNTS_TAG_FORCE_BUILD}}') + == false samples: type: build stage: build - tag: '${{CF_BUILD_ID}}' dockerfile: Dockerfile registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - TAG=${{CF_BUILD_ID}} - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_FRONTEND_BUILD=${{REGISTRY}}/cloudharness/cloudharness-frontend-build:${{CF_BUILD_ID}} - - CLOUDHARNESS_FLASK=${{REGISTRY}}/cloudharness/cloudharness-flask:${{CF_BUILD_ID}} + - CLOUDHARNESS_FRONTEND_BUILD=${{REGISTRY}}/cloudharness/cloudharness-frontend-build:${{CLOUDHARNESS_FRONTEND_BUILD_TAG}} + - CLOUDHARNESS_FLASK=${{REGISTRY}}/cloudharness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}} image_name: cloudharness/samples title: Samples working_directory: ./applications/samples + tag: '${{SAMPLES_TAG}}' + when: + condition: + any: + buildDoesNotExist: includes('${{SAMPLES_TAG_EXISTS}}', '{{SAMPLES_TAG_EXISTS}}') + == true + forceNoCache: includes('${{SAMPLES_TAG_FORCE_BUILD}}', '{{SAMPLES_TAG_FORCE_BUILD}}') + == false samples-print-file: type: build stage: build - tag: '${{CF_BUILD_ID}}' dockerfile: Dockerfile registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - TAG=${{CF_BUILD_ID}} - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CF_BUILD_ID}} + - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} image_name: cloudharness/samples-print-file title: Samples print file working_directory: ./applications/samples/tasks/print-file + tag: '${{SAMPLES_PRINT_FILE_TAG}}' + when: + condition: + any: + buildDoesNotExist: includes('${{SAMPLES_PRINT_FILE_TAG_EXISTS}}', '{{SAMPLES_PRINT_FILE_TAG_EXISTS}}') + == true + forceNoCache: includes('${{SAMPLES_PRINT_FILE_TAG_FORCE_BUILD}}', '{{SAMPLES_PRINT_FILE_TAG_FORCE_BUILD}}') + == false samples-secret: type: build stage: build - tag: '${{CF_BUILD_ID}}' dockerfile: Dockerfile registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - TAG=${{CF_BUILD_ID}} - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CF_BUILD_ID}} + - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} image_name: cloudharness/samples-secret title: Samples secret working_directory: ./applications/samples/tasks/secret + tag: '${{SAMPLES_SECRET_TAG}}' + when: + condition: + any: + buildDoesNotExist: includes('${{SAMPLES_SECRET_TAG_EXISTS}}', '{{SAMPLES_SECRET_TAG_EXISTS}}') + == true + forceNoCache: includes('${{SAMPLES_SECRET_TAG_FORCE_BUILD}}', '{{SAMPLES_SECRET_TAG_FORCE_BUILD}}') + == false samples-sum: type: build stage: build - tag: '${{CF_BUILD_ID}}' dockerfile: Dockerfile registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - TAG=${{CF_BUILD_ID}} - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CF_BUILD_ID}} + - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} image_name: cloudharness/samples-sum title: Samples sum working_directory: ./applications/samples/tasks/sum + tag: '${{SAMPLES_SUM_TAG}}' + when: + condition: + any: + buildDoesNotExist: includes('${{SAMPLES_SUM_TAG_EXISTS}}', '{{SAMPLES_SUM_TAG_EXISTS}}') + == true + forceNoCache: includes('${{SAMPLES_SUM_TAG_FORCE_BUILD}}', '{{SAMPLES_SUM_TAG_FORCE_BUILD}}') + == false common: type: build stage: build - tag: '${{CF_BUILD_ID}}' dockerfile: Dockerfile registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - TAG=${{CF_BUILD_ID}} - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_FLASK=${{REGISTRY}}/cloudharness/cloudharness-flask:${{CF_BUILD_ID}} + - CLOUDHARNESS_FLASK=${{REGISTRY}}/cloudharness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}} image_name: cloudharness/common title: Common working_directory: ./applications/common/server + tag: '${{COMMON_TAG}}' + when: + condition: + any: + buildDoesNotExist: includes('${{COMMON_TAG_EXISTS}}', '{{COMMON_TAG_EXISTS}}') + == true + forceNoCache: includes('${{COMMON_TAG_FORCE_BUILD}}', '{{COMMON_TAG_FORCE_BUILD}}') + == false workflows-send-result-event: type: build stage: build - tag: '${{CF_BUILD_ID}}' dockerfile: Dockerfile registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - TAG=${{CF_BUILD_ID}} - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CF_BUILD_ID}} + - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} image_name: cloudharness/workflows-send-result-event title: Workflows send result event working_directory: ./applications/workflows/tasks/send-result-event + tag: '${{WORKFLOWS_SEND_RESULT_EVENT_TAG}}' + when: + condition: + any: + buildDoesNotExist: includes('${{WORKFLOWS_SEND_RESULT_EVENT_TAG_EXISTS}}', + '{{WORKFLOWS_SEND_RESULT_EVENT_TAG_EXISTS}}') == true + forceNoCache: includes('${{WORKFLOWS_SEND_RESULT_EVENT_TAG_FORCE_BUILD}}', + '{{WORKFLOWS_SEND_RESULT_EVENT_TAG_FORCE_BUILD}}') == false workflows-extract-download: type: build stage: build - tag: '${{CF_BUILD_ID}}' dockerfile: Dockerfile registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - TAG=${{CF_BUILD_ID}} - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - REGISTRY=${{REGISTRY}}/cloudharness/ image_name: cloudharness/workflows-extract-download title: Workflows extract download working_directory: ./applications/workflows/tasks/extract-download + tag: '${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG}}' + when: + condition: + any: + buildDoesNotExist: includes('${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_EXISTS}}', + '{{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_EXISTS}}') == true + forceNoCache: includes('${{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_FORCE_BUILD}}', + '{{WORKFLOWS_EXTRACT_DOWNLOAD_TAG_FORCE_BUILD}}') == false workflows-notify-queue: type: build stage: build - tag: '${{CF_BUILD_ID}}' dockerfile: Dockerfile registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - TAG=${{CF_BUILD_ID}} - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CF_BUILD_ID}} + - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} image_name: cloudharness/workflows-notify-queue title: Workflows notify queue working_directory: ./applications/workflows/tasks/notify-queue + tag: '${{WORKFLOWS_NOTIFY_QUEUE_TAG}}' + when: + condition: + any: + buildDoesNotExist: includes('${{WORKFLOWS_NOTIFY_QUEUE_TAG_EXISTS}}', + '{{WORKFLOWS_NOTIFY_QUEUE_TAG_EXISTS}}') == true + forceNoCache: includes('${{WORKFLOWS_NOTIFY_QUEUE_TAG_FORCE_BUILD}}', + '{{WORKFLOWS_NOTIFY_QUEUE_TAG_FORCE_BUILD}}') == false workflows: type: build stage: build - tag: '${{CF_BUILD_ID}}' dockerfile: Dockerfile registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - TAG=${{CF_BUILD_ID}} - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_FLASK=${{REGISTRY}}/cloudharness/cloudharness-flask:${{CF_BUILD_ID}} + - CLOUDHARNESS_FLASK=${{REGISTRY}}/cloudharness/cloudharness-flask:${{CLOUDHARNESS_FLASK_TAG}} image_name: cloudharness/workflows title: Workflows working_directory: ./applications/workflows/server + tag: '${{WORKFLOWS_TAG}}' + when: + condition: + any: + buildDoesNotExist: includes('${{WORKFLOWS_TAG_EXISTS}}', '{{WORKFLOWS_TAG_EXISTS}}') + == true + forceNoCache: includes('${{WORKFLOWS_TAG_FORCE_BUILD}}', '{{WORKFLOWS_TAG_FORCE_BUILD}}') + == false tests_unit: stage: unittest type: parallel @@ -286,14 +372,7 @@ steps: title: Unit tests for samples commands: - pytest /usr/src/app/samples/test - image: '${{samples}}' - prepare_cluster: - stage: deploy - image: codefresh/kubectl - fail_fast: false - commands: - - kubectl config use-context ${{CLUSTER_NAME}} - - kubectl create ns test-${{NAMESPACE_BASENAME}}-${{CF_SHORT_REVISION}} + image: '${{REGISTRY}}/cloudharness/samples:${{SAMPLES_TAG}}' deployment: stage: deploy type: helm @@ -306,7 +385,7 @@ steps: kube_context: '${{CLUSTER_NAME}}' namespace: test-${{NAMESPACE_BASENAME}}-${{CF_SHORT_REVISION}} chart_version: '${{CF_BUILD_ID}}' - cmd_ps: --timeout 600s + cmd_ps: --timeout 600s --create-namespace custom_value_files: - ./deployment/helm/values.yaml custom_values: @@ -319,34 +398,46 @@ steps: test-e2e: type: build stage: build - tag: '${{CF_BUILD_ID}}' dockerfile: Dockerfile registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - TAG=${{CF_BUILD_ID}} - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - REGISTRY=${{REGISTRY}}/cloudharness/ image_name: cloudharness/test-e2e title: Test e2e working_directory: ./test/test-e2e + tag: '${{TEST_E2E_TAG}}' + when: + condition: + any: + buildDoesNotExist: includes('${{TEST_E2E_TAG_EXISTS}}', '{{TEST_E2E_TAG_EXISTS}}') + == true + forceNoCache: includes('${{TEST_E2E_TAG_FORCE_BUILD}}', '{{TEST_E2E_TAG_FORCE_BUILD}}') + == false test-api: type: build stage: build - tag: '${{CF_BUILD_ID}}' dockerfile: test/test-api/Dockerfile registry: '${{CODEFRESH_REGISTRY}}' buildkit: true build_arguments: - - TAG=${{CF_BUILD_ID}} - DOMAIN=${{DOMAIN}} - NOCACHE=${{CF_BUILD_ID}} - REGISTRY=${{REGISTRY}}/cloudharness/ - - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CF_BUILD_ID}} + - CLOUDHARNESS_BASE=${{REGISTRY}}/cloudharness/cloudharness-base:${{CLOUDHARNESS_BASE_TAG}} image_name: cloudharness/test-api title: Test api working_directory: ./. + tag: '${{TEST_API_TAG}}' + when: + condition: + any: + buildDoesNotExist: includes('${{TEST_API_TAG_EXISTS}}', '{{TEST_API_TAG_EXISTS}}') + == true + forceNoCache: includes('${{TEST_API_TAG_FORCE_BUILD}}', '{{TEST_API_TAG_FORCE_BUILD}}') + == false wait_deployment: stage: qa title: Wait deployment to be ready @@ -365,12 +456,13 @@ steps: stage: qa title: Api tests working_directory: /home/test - image: '${{test-api}}' + image: '${{REGISTRY}}/cloudharness/test-api:${{TEST_API_TAG}}' fail_fast: false commands: - echo $APP_NAME scale: samples_api_test: + title: samples api test volumes: - '${{CF_REPO_NAME}}/applications/samples:/home/test' - '${{CF_REPO_NAME}}/deployment/helm/values.yaml:/opt/cloudharness/resources/allvalues.yaml' @@ -385,6 +477,7 @@ steps: --request-timeout=180000 --hypothesis-max-examples=2 --show-errors-tracebacks - pytest -v test/api common_api_test: + title: common api test volumes: - '${{CF_REPO_NAME}}/applications/common:/home/test' - '${{CF_REPO_NAME}}/deployment/helm/values.yaml:/opt/cloudharness/resources/allvalues.yaml' @@ -394,6 +487,7 @@ steps: - st --pre-run cloudharness_test.apitest_init run api/openapi.yaml --base-url https://common.${{CF_SHORT_REVISION}}.${{DOMAIN}}/api -c all workflows_api_test: + title: workflows api test volumes: - '${{CF_REPO_NAME}}/applications/workflows:/home/test' - '${{CF_REPO_NAME}}/deployment/helm/values.yaml:/opt/cloudharness/resources/allvalues.yaml' @@ -412,12 +506,13 @@ steps: stage: qa title: End to end tests working_directory: /home/test - image: '${{test-e2e}}' + image: '${{REGISTRY}}/cloudharness/test-e2e:${{TEST_E2E_TAG}}' fail_fast: false commands: - yarn test scale: samples_e2e_test: + title: samples e2e test volumes: - '${{CF_REPO_NAME}}/applications/samples/test/e2e:/home/test/__tests__/samples' environment: diff --git a/docs/build-deploy/ci-cd/README.md b/docs/build-deploy/ci-cd/README.md new file mode 100644 index 00000000..77b59024 --- /dev/null +++ b/docs/build-deploy/ci-cd/README.md @@ -0,0 +1,20 @@ +# CI/CD with CloudHarness + +CloudHarness supports continuous deployment natively by creating the Helm Chart for you and with a few +ways to build containers. + +## CI/CD with Skaffold + +[Skaffold](https://skaffold.dev/) is a command line tool that simplifies build and deployment on a Kubernetes cluster. The configuration as `skaffold.yaml` file is created when running +`harness-deployment`. + +All you need to to is: +- Configure the Kubernetes cluster in your local shell (e.g. `gcloud init` on Google Cloud) +- Configure the local Docker to be able to push to the remote registry +- Create the deployment specifying the registry and remote domain -- `harness-deployment ... -r myregistry -d mydomain` +- Run `skaffold build` +- Run `skaffold run` + +## CI/CD with Codefresh + +[Codefresh](https://codefresh.io/) is a nice platform for CI/CD see details in the [dedicated document](./codefresh.md). \ No newline at end of file diff --git a/docs/build-deploy/ci-cd/codefresh.md b/docs/build-deploy/ci-cd/codefresh.md new file mode 100644 index 00000000..3e691334 --- /dev/null +++ b/docs/build-deploy/ci-cd/codefresh.md @@ -0,0 +1,88 @@ +# Continuous deployment with Codefresh + +Codefresh pipelines are generated as `deployment/codefresh-[ENV].yaml`. +These files can be loaded from codefresh as documented [here](https://codefresh.io/docs/docs/configure-ci-cd-pipeline/pipelines/). + +The pipeline will take care of building the images from the source code and deploy the helm chart. +Log in to codefresh and run the pipeline associated to the repository. + +- To setup a new dev pipeline, indicate the remote yaml path `./deployment/codefresh-dev.yaml` +- To setup a new production pipeline, indicate the remote yaml path `./deployment/codefresh-prod.yaml` + +## Preconfigured pipelines + +- [codefresh-dev.yaml](../../../deployment-configuration/codefresh-template-dev.yaml). The main CI/CD pipeline. Includes build, deployment, testing and final tagging and push. Tagging and push is made upon approval. +- [codefresh-prod.yaml](../../../deployment-configuration/codefresh-template-prod.yaml). The Production deployment pipeline. The main idea is to reuse the builds tagged in the dev pipeline with specific configurations for the production environment. +- [codefresh-stage.yaml](../../../deployment-configuration/codefresh-template-stage.yaml). The Staging deployment pipeline. The main idea is to reuse the builds tagged in the dev pipeline with specific configurations for the production environment. +- [codefresh-test.yaml](../../../deployment-configuration/codefresh-template-test.yaml). The Testing pipeline. It creates a new deployment from the current codebase and runs all tests. The deployment is deleted after the completion of the tests. See also the [testing documentation](../../testing.md). + +The templates for the predefined pipeline are in the [deployment-configuration](../../../deployment-configuration) directory. + +## Variables + +Variables are used to customize the deployment + +### General variables +- **CLOUDHARNESS_BRANCH**. Specifies the cloudharness branch or tag to use. + +### Build and deploy variables +- **DOMAIN*** - The deployment's base domain +- **CLUSTER_NAME*** - The cluster context name as configured in Codefresh +- **NAMESPACE*** - The Kubernetes namespace to use +- **REGISTRY*** - The base registry address +- **CODEFRESH_REGISTRY*** - the name of the registry as configured inside Codefresh +- **REGISTRY_SECRET** - define a secret to push on the registry (not mandatory) + +### Test variables + +- **SKIP_TESTS** - if defined, skips all tests + +### Releasing variables + +- **REGISTRY_PUBLISH_URL**. The base url of the registry to push to publish +- **DEPLOYMENT_PUBLISH_TAG**. The tag in which to publish +- **DEPLOYMENT_TAG**. Required for stage and prod pipelines, specifies the tag from a previous publish step. For instance, if the dev pipeline had `DEPLOYMENT_PUBLISH_TAG=1.0` then we need to set `DEPLOYMENT_TAG=1.0` in the stage and production steps. + +### Automatic variables + +- Codefresh variables. All variables starting with `CF_` are managed by codefresh +- **[APPNAME]_TAG**. This set of variables are created in the `prepare_deployment` step. The tag is derived from an hash of the content being built. +- **[APPNAME]_TAG_EXISTS**. This set of variables are created in the `prepare_deployment` step. If a manifest for `[APPNAME]_TAG` exists, `[APPNAME]_TAG_EXISTS` is set. This is used to implement smart caching + +### Cache variables +**[APPNAME]_FORCE_BUILD**. Set it to force the build of the build of the image named `[APPNAME]_TAG` + +## Update a deployment specification + +The deployment must be updated whenever new build artifacts are added/removed, or opon changes in the templates. + +To update a pipeline for a specific env, run +``` +harness-deployment . -e [ENV] +``` + +A file `./deployment/codefresh-dev.yaml` is created, provided that a file `./deployment/codefresh-[ENV].yaml` exists either as a preconfigured pipeline on cloudharness or as a custom pipeline in your application. + +## Override Codefresh pipeline templates +Edit `./deployment-configuration/codefresh-template-[ENV].yaml` to override any helm chart values file. +Notice that Codefresh templates will be generated only if a specific environment file is defined. +By default, the *dev* and *prod* environments are defined. + +To override the single image build template, edit `deployment-configuration/codefresh-build-template.yaml` + + + +## Create and override the pipelines + +Create a file named `./deployment-configuration/codefresh-template-[ENV].yaml` to create a specific +pipeline. The file can include any additional step in addition to the predefined ones. + +If [ENV] is one of `dev`, `stage`, `prod`, `test`, you only need to specify the additional steps you want to add, or change the bit in the file you need to change. + +## Caching and conditional build + +The dev and test pipelines use content hash to avoid doing the same build twice. +The `.dockerignore` file is used to determine the content which is part of the hashing. + +Caching may be problematic if the build is relying on data which is not in the repo which has to be updated. +In this case, set the `[APPNAME]_FORCE_BUILD` variable to force a new build. \ No newline at end of file diff --git a/docs/build-deploy/codefresh.md b/docs/build-deploy/codefresh.md deleted file mode 100644 index bae357e8..00000000 --- a/docs/build-deploy/codefresh.md +++ /dev/null @@ -1,26 +0,0 @@ -# Continuous deployment with Codefresh - -Codefresh pipelines are generated as `deployment/codefresh-[ENV].yaml`. -These files can be loaded from codefresh as documented [here](https://codefresh.io/docs/docs/configure-ci-cd-pipeline/pipelines/). - -The pipeline will take care of building the images from the source code and deploy the helm chart. -Log in to codefresh and run the pipeline associated to the repository. - -- To setup a new dev pipeline, indicate the remote yaml path `./deployment/codefresh-dev.yaml` -- To setup a new production pipeline, indicate the remote yaml path `./deployment/codefresh-prod.yaml` - -## Update a deployment specification - -The deployment must be updated whenever new build artifacts are added/removed, or opon changes in the templates. - -In order to update the deployment, run -``` -harness-deployment . -e [ENV] -``` - -## Override Codefresh pipeline templates -Edit `./deployment-configuration/codefresh-template-[ENV].yaml` to override any helm chart values file. -Notice that Codefresh templates will be generated only if a specific environment file is defined. -By default, the *dev* and *prod* environments are defined. - -To override the single image build template, edit `deployment-configuration/codefresh-build-template.yaml` \ No newline at end of file diff --git a/libraries/cloudharness-common/cloudharness/auth/keycloak.py b/libraries/cloudharness-common/cloudharness/auth/keycloak.py index 06750ea2..18425911 100644 --- a/libraries/cloudharness-common/cloudharness/auth/keycloak.py +++ b/libraries/cloudharness-common/cloudharness/auth/keycloak.py @@ -5,7 +5,7 @@ import requests from keycloak import KeycloakAdmin, KeycloakOpenID -from keycloak.exceptions import KeycloakAuthenticationError +from keycloak.exceptions import KeycloakAuthenticationError, KeycloakGetError from cloudharness import log from cloudharness.middleware import get_authentication_token @@ -85,6 +85,16 @@ def get_token(username, password): client_secret_key=conf["webclient"]["secret"]) return keycloak_openid.token(username, password)['access_token'] +def is_uuid(s): + import uuid + try: + uuid.UUID(s) + return True + except ValueError: + return False + +class UserNotFound(KeycloakGetError): + pass class AuthClient(): __public_key = None @@ -401,11 +411,11 @@ def get_users(self, query=None, with_details=False) -> List[User]: return users @with_refreshtoken - def get_user(self, user_id, with_details=False): + def get_user(self, user_id, with_details=False) -> User: """ Get the user including the user groups - :param user_id: User id + :param user_id_or_username: User id or username :param with_details: Default False, when set to True all attributes of the group are also retrieved @@ -418,14 +428,25 @@ def get_user(self, user_id, with_details=False): :return: UserRepresentation + GroupRepresentation """ admin_client = self.get_admin_client() - user = admin_client.get_user(user_id) + if is_uuid(user_id): + try: + user = admin_client.get_user(user_id) + except KeycloakGetError as e: + raise UserNotFound(user_id) + + else: + found_users = admin_client.get_users({"username": user_id}) + if len(found_users) == 0: + raise UserNotFound(user_id) + user = admin_client.get_user(found_users[0]['id']) # Load full data + user.update({ "userGroups": admin_client.get_user_groups(user_id=user['id'], brief_representation=not with_details), 'realmRoles': admin_client.get_realm_roles_of_user(user['id']) }) return User.from_dict(user) - def get_current_user(self): + def get_current_user(self) -> User: """ Get the current user including the user groups diff --git a/libraries/cloudharness-common/cloudharness/auth/quota.py b/libraries/cloudharness-common/cloudharness/auth/quota.py index c4f2beb6..a17de36b 100644 --- a/libraries/cloudharness-common/cloudharness/auth/quota.py +++ b/libraries/cloudharness-common/cloudharness/auth/quota.py @@ -101,7 +101,7 @@ def get_user_quotas(application_config: ApplicationConfig =None, user_id: str=No Args: application_config (ApplicationConfig): the application config to use for getting the quotas - user_id (str): the Keycloak user id + user_id (str): the Keycloak user id or username to get the quotas for Returns: dict: key/value pairs of the user quota diff --git a/test/test-e2e/Dockerfile b/test/test-e2e/Dockerfile index 79fca289..48c8f855 100644 --- a/test/test-e2e/Dockerfile +++ b/test/test-e2e/Dockerfile @@ -29,7 +29,7 @@ WORKDIR /home/test USER test COPY package.json . COPY yarn.lock . -RUN yarn --timeout 99999999 +RUN yarn install --timeout 99999999 # Run everything after as non-privileged user. diff --git a/tools/deployment-cli-tools/ch_cli_tools/codefresh.py b/tools/deployment-cli-tools/ch_cli_tools/codefresh.py index 28fa93a8..c491a131 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/codefresh.py +++ b/tools/deployment-cli-tools/ch_cli_tools/codefresh.py @@ -1,5 +1,6 @@ import os from os.path import join, relpath, exists, dirname +import requests import logging from cloudharness_model.models.api_tests_config import ApiTestsConfig @@ -9,7 +10,7 @@ from cloudharness_utils.testing.util import get_app_environment from .models import HarnessMainConfig, ApplicationTestConfig, ApplicationHarnessConfig from cloudharness_utils.constants import * -from .helm import KEY_APPS, KEY_TASK_IMAGES +from .helm import KEY_APPS, KEY_TASK_IMAGES, KEY_TEST_IMAGES, generate_tag_from_content from .utils import find_dockerfiles_paths, get_app_relative_to_base_path, guess_build_dependencies_from_dockerfile, \ get_image_name, get_template, dict_merge, app_name_from_path, clean_path from cloudharness_utils.testing.api import get_api_filename, get_schemathesis_command, get_urls_from_api_file @@ -32,6 +33,46 @@ def literal_presenter(dumper, data): yaml.add_representer(str, literal_presenter) +def write_env_file(helm_values: HarnessMainConfig, filename): + env = {} + logging.info("Create env file with image info %s", filename) + + def extract_tag(image_name): + return image_name.split(":")[1] if ":" in image_name else "latest" + + def check_image_exists(name, image): + tag = extract_tag(image) + chunks = image.split(":")[0].split("/") + registry = chunks[0] if "." in chunks[0] else "docker.io" + image_name = "/".join(chunks[1::] if "." in chunks[0] else chunks[0::]) + api_url = f"https://{registry}/v2/{image_name}/manifests/{tag}" + resp = requests.get(api_url) + if resp.status_code == 200: + # TODO the hash might be the same but not the parent's hash + env[app_specific_tag_variable(name) + "_EXISTS"] = 1 + else: + env[app_specific_tag_variable(name) + "_NEW"] = 1 + + for app in helm_values.apps.values(): + if app.harness and app.harness.deployment.image: + env[app_specific_tag_variable(app.name)] = extract_tag(app.harness.deployment.image) + check_image_exists(app.name, app.harness.deployment.image) + + for k, task_image in helm_values[KEY_TASK_IMAGES].items(): + env[app_specific_tag_variable(k)] = extract_tag(task_image) + check_image_exists(k, task_image) + + for k, task_image in helm_values[KEY_TEST_IMAGES].items(): + env[app_specific_tag_variable(k)] = extract_tag(task_image) + check_image_exists(k, task_image) + + logging.info("Writing env file %s", filename) + with open(filename, 'w') as f: + for k, v in env.items(): + f.write(f"{k}={v}\n") + + + def create_codefresh_deployment_scripts(root_paths, envs=(), include=(), exclude=(), template_name=CF_TEMPLATE_PATH, base_image_name=None, @@ -156,6 +197,7 @@ def codefresh_steps_from_base_path(base_path, build_step, fixed_context=None, in app_domain = get_app_domain( app_config) + app_domain steps[CD_API_TEST_STEP]['scale'][f"{app_name}_api_test"] = dict( + title=f"{app_name} api test", volumes=api_test_volumes(clean_path( dockerfile_relative_to_root)), environment=e2e_test_environment( @@ -171,6 +213,7 @@ def codefresh_steps_from_base_path(base_path, build_step, fixed_context=None, in if app_config.subdomain: steps[CD_E2E_TEST_STEP]['scale'][f"{app_name}_e2e_test"] = dict( + title=f"{app_name} e2e test", volumes=e2e_test_volumes( clean_path(dockerfile_relative_to_root), app_name), environment=e2e_test_environment(app_config) @@ -184,10 +227,11 @@ def add_unit_test_step(app_config: ApplicationHarnessConfig): app_name = app_config.name if test_config.unit.enabled and test_config.unit.commands: + tag = app_specific_tag_variable(app_name) steps[CD_UNIT_TEST_STEP]['steps'][f"{app_name}_ut"] = dict( title=f"Unit tests for {app_name}", commands=test_config.unit.commands, - image=r"${{%s}}" % app_name + image=image_tag_with_variables(app_name, tag, base_image_name), ) codefresh_steps_from_base_path(join(root_path, BASE_IMAGES_PATH), CD_BUILD_STEP_BASE, @@ -199,11 +243,16 @@ def add_unit_test_step(app_config: ApplicationHarnessConfig): root_path, APPS_PATH), CD_BUILD_STEP_PARALLEL) if CD_E2E_TEST_STEP in steps: + name = "test-e2e" codefresh_steps_from_base_path(join( - root_path, TEST_IMAGES_PATH), CD_BUILD_STEP_TEST, include=("test-e2e",), publish=False) + root_path, TEST_IMAGES_PATH), CD_BUILD_STEP_TEST, include=(name,), publish=False) + steps[CD_E2E_TEST_STEP]["image"] = image_tag_with_variables(name, app_specific_tag_variable(name), base_name=base_image_name) + if CD_API_TEST_STEP in steps: + name = "test-api" codefresh_steps_from_base_path(join( - root_path, TEST_IMAGES_PATH), CD_BUILD_STEP_TEST, include=("test-api",), fixed_context=relpath(root_path, os.getcwd()), publish=False) + root_path, TEST_IMAGES_PATH), CD_BUILD_STEP_TEST, include=(name,), fixed_context=relpath(root_path, os.getcwd()), publish=False) + steps[CD_API_TEST_STEP]["image"] = image_tag_with_variables(name, app_specific_tag_variable(name), base_name=base_image_name) if not codefresh: logging.warning( @@ -310,20 +359,13 @@ def api_test_volumes(app_relative_to_root): ] - - - - - - def codefresh_app_publish_spec(app_name, build_tag, base_name=None): title = app_name.capitalize().replace( '-', ' ').replace('/', ' ').replace('.', ' ').strip() step_spec = codefresh_template_spec( template_path=CF_TEMPLATE_PUBLISH_PATH, - candidate="${{REGISTRY}}/%s:%s" % (get_image_name( - app_name, base_name), build_tag or '${{DEPLOYMENT_TAG}}'), + candidate=image_tag_with_variables(app_name, build_tag, base_name), title=title, ) if not build_tag: @@ -331,9 +373,13 @@ def codefresh_app_publish_spec(app_name, build_tag, base_name=None): step_spec['tags'].append('latest') return step_spec +def image_tag_with_variables(app_name, build_tag, base_name=""): + return "${{REGISTRY}}/%s:${{%s}}" % (get_image_name( + app_name, base_name), build_tag or '${{DEPLOYMENT_TAG}}') + def app_specific_tag_variable(app_name): - return "${{ %s }}_${{DEPLOYMENT_PUBLISH_TAG}}" % app_name.replace('-', '_').upper() + return "%s_TAG" % app_name.replace('-', '_').upper().strip() def codefresh_app_build_spec(app_name, app_context_path, dockerfile_path="Dockerfile", base_name=None, helm_values: HarnessMainConfig = {}, dependencies=None): @@ -346,6 +392,9 @@ def codefresh_app_build_spec(app_name, app_context_path, dockerfile_path="Docker title=title, working_directory='./' + app_context_path, dockerfile=dockerfile_path) + + tag = app_specific_tag_variable(app_name) + build["tag"] = "${{%s}}" % tag specific_build_template_path = join(app_context_path, 'build.yaml') if exists(specific_build_template_path): @@ -360,7 +409,7 @@ def codefresh_app_build_spec(app_name, app_context_path, dockerfile_path="Docker build['build_arguments'].append('REGISTRY=${{REGISTRY}}/%s/' % base_name) def add_arg_dependencies(dependencies): - arg_dependencies = [f"{d.upper().replace('-', '_')}=${{{{REGISTRY}}}}/{get_image_name(d, base_name)}:{build['tag']}" for + arg_dependencies = [f"{d.upper().replace('-', '_')}={image_tag_with_variables(d, app_specific_tag_variable(d), base_name)}" for d in dependencies] build['build_arguments'].extend(arg_dependencies) @@ -373,5 +422,26 @@ def add_arg_dependencies(dependencies): helm_values.apps[values_key].harness.dependencies.build) except (KeyError, AttributeError): add_arg_dependencies(helm_values['task-images']) - + + when_condition = existing_build_when_condition(tag) + build["when"] = when_condition return build + +def existing_build_when_condition(tag): + """ + See https://codefresh.io/docs/docs/pipelines/conditional-execution-of-steps/#execute-steps-according-to-the-presence-of-a-variable + the _EXISTS variable is added in the preparation step + the _FORCE_BUILD variable may be added manually by the user to force the build of a specific image + """ + is_built = tag + "_EXISTS" + force_build = tag + "_FORCE_BUILD" + when_condition = { + "condition": { + "any": { + "buildDoesNotExist": "includes('${{%s}}', '{{%s}}') == true" % (is_built, is_built), + "forceNoCache": "includes('${{%s}}', '{{%s}}') == false" % (force_build, force_build), + } + } + } + + return when_condition diff --git a/tools/deployment-cli-tools/ch_cli_tools/helm.py b/tools/deployment-cli-tools/ch_cli_tools/helm.py index f33d79c3..f3681b8d 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/helm.py +++ b/tools/deployment-cli-tools/ch_cli_tools/helm.py @@ -5,12 +5,15 @@ import os import shutil import logging +from hashlib import sha1 import subprocess +from functools import cache import tarfile from docker import from_env as DockerClient + from . import HERE, CH_ROOT -from cloudharness_utils.constants import VALUES_MANUAL_PATH, HELM_CHART_PATH, APPS_PATH, HELM_PATH, \ +from cloudharness_utils.constants import TEST_IMAGES_PATH, VALUES_MANUAL_PATH, HELM_CHART_PATH, APPS_PATH, HELM_PATH, \ DEPLOYMENT_CONFIGURATION_PATH, BASE_IMAGES_PATH, STATIC_IMAGES_PATH from .utils import get_cluster_ip, get_image_name, env_variable, get_sub_paths, guess_build_dependencies_from_dockerfile, image_name_from_dockerfile_path, \ get_template, merge_configuration_directories, merge_to_yaml_file, dict_merge, app_name_from_path, \ @@ -24,6 +27,9 @@ KEY_DEPLOYMENT = 'deployment' KEY_APPS = 'apps' KEY_TASK_IMAGES = 'task-images' +KEY_TEST_IMAGES = 'test-images' + +DEFAULT_IGNORE = ('/tasks', '.dockerignore', '.hypothesis', "__pycache__", '.node_modules', 'dist', 'build', '.coverage') def deploy(namespace, output_path='./deployment'): @@ -75,6 +81,7 @@ def __init__(self, root_paths, tag='latest', registry='', local=True, domain=Non self.static_images = set() self.base_images = {} + self.all_images = {} def __init_deployment(self): """ @@ -117,6 +124,7 @@ def process_values(self) -> HarnessMainConfig: self.__init_base_images(base_image_name) self.__init_static_images(base_image_name) + helm_values[KEY_TEST_IMAGES] = self.__init_test_images(base_image_name) self.__process_applications(helm_values, base_image_name) @@ -164,8 +172,7 @@ def collect_app_values(self, app_base_path, base_image_name=None): continue app_key = app_name.replace('-', '_') - app_values = create_app_values_spec(app_name, app_path, tag=self.tag, registry=self.registry, env=self.env, - base_image_name=base_image_name, base_images=self.base_images) + app_values = self.create_app_values_spec(app_name, app_path, base_image_name=base_image_name) values[app_key] = dict_merge( values[app_key], app_values) if app_key in values else app_values @@ -176,8 +183,8 @@ def __init_static_images(self, base_image_name): for static_img_dockerfile in self.static_images: img_name = image_name_from_dockerfile_path(os.path.basename( static_img_dockerfile), base_name=base_image_name) - self.base_images[os.path.basename(static_img_dockerfile)] = image_tag( - img_name, self.registry, self.tag) + self.base_images[os.path.basename(static_img_dockerfile)] = self.image_tag( + img_name, build_context_path=static_img_dockerfile) def __assign_static_build_dependencies(self, helm_values): for static_img_dockerfile in self.static_images: @@ -199,13 +206,25 @@ def __init_base_images(self, base_image_name): for base_img_dockerfile in self.__find_static_dockerfile_paths(root_path): img_name = image_name_from_dockerfile_path( os.path.basename(base_img_dockerfile), base_name=base_image_name) - self.base_images[os.path.basename(base_img_dockerfile)] = image_tag( - img_name, self.registry, self.tag) + self.base_images[os.path.basename(base_img_dockerfile)] = self.image_tag( + img_name, build_context_path=root_path) self.static_images.update(find_dockerfiles_paths( os.path.join(root_path, STATIC_IMAGES_PATH))) return self.base_images + + def __init_test_images(self, base_image_name): + test_images = {} + for root_path in self.root_paths: + for base_img_dockerfile in find_dockerfiles_paths(os.path.join(root_path, TEST_IMAGES_PATH)): + img_name = image_name_from_dockerfile_path( + os.path.basename(base_img_dockerfile), base_name=base_image_name) + test_images[os.path.basename(base_img_dockerfile)] = self.image_tag( + img_name, build_context_path=base_img_dockerfile) + + return test_images + def __find_static_dockerfile_paths(self, root_path): return find_dockerfiles_paths(os.path.join(root_path, BASE_IMAGES_PATH)) + find_dockerfiles_paths(os.path.join(root_path, STATIC_IMAGES_PATH)) @@ -235,7 +254,6 @@ def create_tls_certificate(self, helm_values): return helm_values['tls'] = self.domain.replace(".", "-") + "-tls" - bootstrap_file = 'bootstrap.sh' certs_parent_folder_path = os.path.join( self.output_path, 'helm', 'resources') @@ -381,6 +399,90 @@ def __clear_unused_db_configuration(self, harness_config): if database_type != db: del database_config[db] + def image_tag(self, image_name, build_context_path=None, dependencies=()): + tag = self.tag + if tag is None and not self.local: + logging.info(f"Generating tag for {image_name} from {build_context_path} and {dependencies}") + ignore_path = os.path.join(build_context_path, '.dockerignore') + ignore = set(DEFAULT_IGNORE) + if os.path.exists(ignore_path): + with open(ignore_path) as f: + ignore = ignore.union({line.strip() for line in f}) + logging.info(f"Ignoring {ignore}") + tag = generate_tag_from_content(build_context_path, ignore) + logging.info(f"Content hash: {tag}") + dependencies = dependencies or guess_build_dependencies_from_dockerfile(build_context_path) + tag = sha1((tag + "".join(self.all_images.get(n , '') for n in dependencies)).encode("utf-8")).hexdigest() + logging.info(f"Generated tag: {tag}") + app_name = image_name.split("/")[-1] # the image name can have a prefix + self.all_images[app_name] = tag + return self.registry + image_name + (f':{tag}' if tag else '') + + def create_app_values_spec(self, app_name, app_path, base_image_name=None): + logging.info('Generating values script for ' + app_name) + + specific_template_path = os.path.join(app_path, 'deploy', 'values.yaml') + if os.path.exists(specific_template_path): + logging.info("Specific values template found: " + + specific_template_path) + values = get_template(specific_template_path) + else: + values = {} + + for e in self.env: + specific_template_path = os.path.join( + app_path, 'deploy', f'values-{e}.yaml') + if os.path.exists(specific_template_path): + logging.info( + "Specific environment values template found: " + specific_template_path) + with open(specific_template_path) as f: + values_env_specific = yaml.safe_load(f) + values = dict_merge(values, values_env_specific) + + if KEY_HARNESS in values and 'name' in values[KEY_HARNESS] and values[KEY_HARNESS]['name']: + logging.warning('Name is automatically set in applications: name %s will be ignored', + values[KEY_HARNESS]['name']) + + image_paths = [path for path in find_dockerfiles_paths( + app_path) if 'tasks/' not in path and 'subapps' not in path] + if len(image_paths) > 1: + logging.warning('Multiple Dockerfiles found in application %s. Picking the first one: %s', app_name, + image_paths[0]) + if KEY_HARNESS in values and 'dependencies' in values[KEY_HARNESS] and 'build' in values[KEY_HARNESS]['dependencies']: + build_dependencies = values[KEY_HARNESS]['dependencies']['build'] + else: + build_dependencies = [] + + if len(image_paths) > 0: + image_name = image_name_from_dockerfile_path(os.path.relpath( + image_paths[0], os.path.dirname(app_path)), base_image_name) + + values['image'] = self.image_tag( + image_name, build_context_path=app_path, dependencies=build_dependencies) + elif KEY_HARNESS in values and values[KEY_HARNESS].get(KEY_DEPLOYMENT, {}).get('image', None) and values[ + KEY_HARNESS].get(KEY_DEPLOYMENT, {}).get('auto', False) and not values('image', None): + raise Exception(f"At least one Dockerfile must be specified on application {app_name}. " + f"Specify harness.deployment.image value if you intend to use a prebuilt image.") + + task_images_paths = [path for path in find_dockerfiles_paths( + app_path) if 'tasks/' in path] + values[KEY_TASK_IMAGES] = values.get(KEY_TASK_IMAGES, {}) + + if build_dependencies: + for build_dependency in values[KEY_HARNESS]['dependencies']['build']: + if build_dependency in self.base_images: + values[KEY_TASK_IMAGES][build_dependency] = self.base_images[build_dependency] + + for task_path in task_images_paths: + task_name = app_name_from_path(os.path.relpath( + task_path, os.path.dirname(app_path))) + img_name = image_name_from_dockerfile_path(task_name, base_image_name) + + values[KEY_TASK_IMAGES][task_name] = self.image_tag( + img_name, build_context_path=task_path, dependencies=values[KEY_TASK_IMAGES].keys()) + + return values + def get_included_with_dependencies(values, include): app_values = values['apps'].values() @@ -557,65 +659,9 @@ def values_set_legacy(values): values['resources'] = harness[KEY_DEPLOYMENT]['resources'] -def create_app_values_spec(app_name, app_path, tag=None, registry='', env=(), base_image_name=None, base_images=[]): - logging.info('Generating values script for ' + app_name) - - specific_template_path = os.path.join(app_path, 'deploy', 'values.yaml') - if os.path.exists(specific_template_path): - logging.info("Specific values template found: " + - specific_template_path) - values = get_template(specific_template_path) - else: - values = {} - - for e in env: - specific_template_path = os.path.join( - app_path, 'deploy', f'values-{e}.yaml') - if os.path.exists(specific_template_path): - logging.info( - "Specific environment values template found: " + specific_template_path) - with open(specific_template_path) as f: - values_env_specific = yaml.safe_load(f) - values = dict_merge(values, values_env_specific) - - if KEY_HARNESS in values and 'name' in values[KEY_HARNESS] and values[KEY_HARNESS]['name']: - logging.warning('Name is automatically set in applications: name %s will be ignored', - values[KEY_HARNESS]['name']) - - image_paths = [path for path in find_dockerfiles_paths( - app_path) if 'tasks/' not in path and 'subapps' not in path] - if len(image_paths) > 1: - logging.warning('Multiple Dockerfiles found in application %s. Picking the first one: %s', app_name, - image_paths[0]) - if len(image_paths) > 0: - image_name = image_name_from_dockerfile_path(os.path.relpath( - image_paths[0], os.path.dirname(app_path)), base_image_name) - values['image'] = image_tag(image_name, registry, tag) - elif KEY_HARNESS in values and values[KEY_HARNESS].get(KEY_DEPLOYMENT, {}).get('image', None) and values[ - KEY_HARNESS].get(KEY_DEPLOYMENT, {}).get('auto', False) and not values('image', None): - raise Exception(f"At least one Dockerfile must be specified on application {app_name}. " - f"Specify harness.deployment.image value if you intend to use a prebuilt image.") - - task_images_paths = [path for path in find_dockerfiles_paths( - app_path) if 'tasks/' in path] - values[KEY_TASK_IMAGES] = values.get(KEY_TASK_IMAGES, {}) - for task_path in task_images_paths: - task_name = app_name_from_path(os.path.relpath( - task_path, os.path.dirname(app_path))) - img_name = image_name_from_dockerfile_path(task_name, base_image_name) - - values[KEY_TASK_IMAGES][task_name] = image_tag(img_name, registry, tag) - - if KEY_HARNESS in values and 'dependencies' in values[KEY_HARNESS] and 'build' in values[KEY_HARNESS]['dependencies']: - for build_dependency in values[KEY_HARNESS]['dependencies']['build']: - if build_dependency in base_images: - values[KEY_TASK_IMAGES][build_dependency] = base_images[build_dependency] - - return values - - -def image_tag(image_name, registry, tag): - return registry + image_name + f':{tag}' if tag else '' +def generate_tag_from_content(content_path, ignore=()): + from dirhash import dirhash + return dirhash(content_path, 'sha1', ignore=ignore) def extract_env_variables_from_values(values, envs=tuple(), prefix=''): @@ -703,7 +749,7 @@ def validate_dependencies(values): d for d in app_values[KEY_HARNESS]['dependencies']['build']} not_found = { - d for d in build_dependencies if d not in values['task-images']} + d for d in build_dependencies if d not in values[KEY_TASK_IMAGES]} not_found = {d for d in not_found if d not in all_apps} if not_found: raise ValuesValidationException( diff --git a/tools/deployment-cli-tools/ch_cli_tools/preprocessing.py b/tools/deployment-cli-tools/ch_cli_tools/preprocessing.py index 138cf6b3..cee45c96 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/preprocessing.py +++ b/tools/deployment-cli-tools/ch_cli_tools/preprocessing.py @@ -32,7 +32,9 @@ def merge_appdir(root_path, base_path): ) merge_configuration_directories(artifacts[app_name], dest_path) merge_configuration_directories(base_path, dest_path) - + + + for root_path in root_paths: @@ -69,6 +71,14 @@ def merge_appdir(root_path, base_path): merge_appdir(root_path, base_path) merged = True + with open(join(merge_build_path, ".dockerignore"), "a") as dst: + + for root_path in root_paths: + ignore_file = join(root_path, ".dockerignore") + if os.path.exists(ignore_file): + with open(ignore_file) as src: + dst.write(src.read()) + return (root_paths + [merge_build_path]) if merged else root_paths def get_build_paths(root_paths, helm_values, merge_build_path=DEFAULT_MERGE_PATH): diff --git a/tools/deployment-cli-tools/ch_cli_tools/skaffold.py b/tools/deployment-cli-tools/ch_cli_tools/skaffold.py index 7a9beae7..1e6742a5 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/skaffold.py +++ b/tools/deployment-cli-tools/ch_cli_tools/skaffold.py @@ -25,7 +25,7 @@ def create_skaffold_configuration(root_paths, helm_values: HarnessMainConfig, ou overrides = {} def remove_tag(image_name): - return image_name[0:-len(helm_values.tag)-1] + return image_name.split(":")[0] def get_image_tag(name): return f"{get_image_name(name, base_image_name)}" diff --git a/tools/deployment-cli-tools/ch_cli_tools/utils.py b/tools/deployment-cli-tools/ch_cli_tools/utils.py index 7ca14f8a..13d8b79f 100644 --- a/tools/deployment-cli-tools/ch_cli_tools/utils.py +++ b/tools/deployment-cli-tools/ch_cli_tools/utils.py @@ -3,6 +3,7 @@ import glob import subprocess import os +from functools import cache from os.path import join, dirname, isdir, basename, exists, relpath, sep, dirname as dn import json import collections @@ -350,11 +351,13 @@ def merge_app_directories(root_paths, destination) -> None: def to_python_module(name): return name.replace('-', '_') - +@cache def guess_build_dependencies_from_dockerfile(filename): dependencies = [] if not "Dockerfile" in filename: filename = join(filename, "Dockerfile") + if not os.path.exists(filename): + return dependencies with open(filename) as f: for line in f: if line.startswith("ARG") and not "=" in line: diff --git a/tools/deployment-cli-tools/harness-deployment b/tools/deployment-cli-tools/harness-deployment index 1c049016..a9cecabb 100644 --- a/tools/deployment-cli-tools/harness-deployment +++ b/tools/deployment-cli-tools/harness-deployment @@ -6,9 +6,10 @@ import os from ch_cli_tools.helm import create_helm_chart, hosts_info, deploy from ch_cli_tools.skaffold import create_skaffold_configuration, create_vscode_debug_configuration -from ch_cli_tools.codefresh import create_codefresh_deployment_scripts +from ch_cli_tools.codefresh import create_codefresh_deployment_scripts, write_env_file from ch_cli_tools.preprocessing import preprocess_build_overrides from ch_cli_tools.utils import merge_app_directories +from cloudharness_utils.constants import DEPLOYMENT_PATH HERE = os.path.dirname(os.path.realpath(__file__)).replace(os.path.sep, '/') ROOT = os.path.dirname(os.path.dirname(HERE)).replace(os.path.sep, '/') @@ -27,8 +28,8 @@ if __name__ == "__main__": help='Specify the applications to include and exclude the rest. ' 'Omit to build images for all Docker files.') - parser.add_argument('-t', '--tag', dest='tag', action="store", default='latest', - help='Add a tag with the current version (default `latest`)') + parser.add_argument('-t', '--tag', dest='tag', action="store", default=None, + help='Add a tag with the current version (default no value, content hash is used)') parser.add_argument('-n', '--namespace', dest='namespace', action="store", default=None, help='Specify the namespace of the deployment (default taken from values.yaml)') @@ -58,6 +59,9 @@ if __name__ == "__main__": help='Deploy helm chart') parser.add_argument('-N', '--no-cd', dest='no_cd_gen', action="store_const", default=None, const=True, help=f'Do not generate ci/cd files') + parser.add_argument('-we', '--write-env', dest='write_env', action="store_const", default=None, const=True, + help=f'Write build env to .env file in {DEPLOYMENT_PATH}') + args, unknown = parser.parse_known_args(sys.argv[1:]) @@ -93,21 +97,23 @@ if __name__ == "__main__": namespace=args.namespace ) - root_paths = preprocess_build_overrides( + merged_root_paths = preprocess_build_overrides( root_paths=root_paths, helm_values=helm_values) if not args.no_cd_gen and envs: create_codefresh_deployment_scripts( - root_paths, + merged_root_paths, include=args.include, exclude=args.exclude, envs=envs, base_image_name=helm_values['name'], helm_values=helm_values) + + if args.write_env: + write_env_file(helm_values, os.path.join(root_paths[-1], DEPLOYMENT_PATH, ".env")) - create_skaffold_configuration(root_paths, helm_values) - create_vscode_debug_configuration([os.path.join(os.getcwd(), path) for path in args.paths], - helm_values) + create_skaffold_configuration(merged_root_paths, helm_values) + create_vscode_debug_configuration(root_paths, helm_values) hosts_info(helm_values) diff --git a/tools/deployment-cli-tools/requirements.txt b/tools/deployment-cli-tools/requirements.txt index 473311c9..622565b9 100644 --- a/tools/deployment-cli-tools/requirements.txt +++ b/tools/deployment-cli-tools/requirements.txt @@ -4,3 +4,4 @@ ruamel.yaml oyaml cloudharness_model cloudharness_utils +dirhash \ No newline at end of file diff --git a/tools/deployment-cli-tools/setup.py b/tools/deployment-cli-tools/setup.py index f9608be9..15fb7f01 100644 --- a/tools/deployment-cli-tools/setup.py +++ b/tools/deployment-cli-tools/setup.py @@ -27,7 +27,8 @@ 'six', 'cloudharness_model', 'cloudharness_utils', - 'fastapi-code-generator' + 'fastapi-code-generator', + 'dirhash' ] diff --git a/tools/deployment-cli-tools/tests/resources/.dockerignore b/tools/deployment-cli-tools/tests/resources/.dockerignore new file mode 100644 index 00000000..bba89d5b --- /dev/null +++ b/tools/deployment-cli-tools/tests/resources/.dockerignore @@ -0,0 +1,22 @@ +**/node_modules +.tox +docs +applications +/infrastructure +/blueprint +test +/tools/deployment-cli-tools +.github +.git +.vscode +/deployment +skaffold.yaml +*.egg-info +__pycache__ +.hypothesis +.coverage +.pytest_cache +/application-templates +/deployment-configuration +/cloud-harness +.openapi-generator \ No newline at end of file diff --git a/tools/deployment-cli-tools/tests/resources/applications/myapp/.dockerignore b/tools/deployment-cli-tools/tests/resources/applications/myapp/.dockerignore new file mode 100644 index 00000000..669be813 --- /dev/null +++ b/tools/deployment-cli-tools/tests/resources/applications/myapp/.dockerignore @@ -0,0 +1 @@ +*.ignored \ No newline at end of file diff --git a/tools/deployment-cli-tools/tests/test_codefresh.py b/tools/deployment-cli-tools/tests/test_codefresh.py index 3e46b65d..4e6439e4 100644 --- a/tools/deployment-cli-tools/tests/test_codefresh.py +++ b/tools/deployment-cli-tools/tests/test_codefresh.py @@ -119,7 +119,7 @@ def test_create_codefresh_configuration(): ) == 2, "Two unit test steps are expected" assert 'myapp_ut' in l1_steps[CD_UNIT_TEST_STEP]['steps'], "Myapp test step is expected" tstep = l1_steps[CD_UNIT_TEST_STEP]['steps']['myapp_ut'] - assert tstep['image'] == r"${{myapp}}", "The test image should be the one built for the current app" + assert tstep['image'] == r"${{REGISTRY}}/cloudharness/myapp:${{MYAPP_TAG}}", "The test image should be the one built for the current app" assert len( tstep['commands']) == 2, "Unit test commands are not properly loaded from the unit test configuration file" assert tstep['commands'][0] == "tox", "Unit test commands are not properly loaded from the unit test configuration file" diff --git a/tools/deployment-cli-tools/tests/test_helm.py b/tools/deployment-cli-tools/tests/test_helm.py index 0abd39f2..ed53ab86 100644 --- a/tools/deployment-cli-tools/tests/test_helm.py +++ b/tools/deployment-cli-tools/tests/test_helm.py @@ -273,3 +273,118 @@ def test_clear_all_dbconfig_if_nodb(): # But it is None db_config = values[KEY_APPS]['myapp'][KEY_HARNESS][KEY_DATABASE] assert db_config is None + +def test_tag_hash_generation(): + v1 = generate_tag_from_content(RESOURCES) + v2 = generate_tag_from_content(RESOURCES, ignore=['myapp']) + assert v1 != v2 + v3 = generate_tag_from_content(RESOURCES, ignore=['*/myapp/*']) + assert v3 != v1 + v4 = generate_tag_from_content(RESOURCES, ignore=['applications/myapp/*']) + assert v4 == v3 + v5 = generate_tag_from_content(RESOURCES, ignore=['/applications/myapp/*']) + assert v5 == v4 + + try: + fname = os.path.join(RESOURCES, 'applications', 'myapp', 'afile.txt') + with open(fname, 'w') as f: + f.write('a') + + v6 = generate_tag_from_content(RESOURCES, ignore=['/applications/myapp/*']) + assert v6 == v5 + v7 = generate_tag_from_content(RESOURCES) + assert v7 != v1 + finally: + os.remove(fname) + +def test_collect_helm_values_auto_tag(): + def create(): + return create_helm_chart([CLOUDHARNESS_ROOT, RESOURCES], output_path=OUT, include=['samples', 'myapp'], + exclude=['events'], domain="my.local", + namespace='test', env='dev', local=False, tag=None, registry='reg') + + BASE_KEY = "cloudharness-base" + values = create() + + # Auto values are set by using the directory hash + assert 'reg/cloudharness/myapp:' in values[KEY_APPS]['myapp'][KEY_HARNESS]['deployment']['image'] + assert 'reg/cloudharness/myapp:' in values.apps['myapp'].harness.deployment.image + assert 'cloudharness/myapp-mytask' in values[KEY_TASK_IMAGES]['myapp-mytask'] + assert values[KEY_APPS]['myapp'][KEY_HARNESS]['deployment']['image'] == values.apps['myapp'].harness.deployment.image + v1 = values.apps['myapp'].harness.deployment.image + c1 = values["task-images"]["my-common"] + b1 = values["task-images"][BASE_KEY] + d1 = values["task-images"]["cloudharness-flask"] + + values = create() + assert v1 == values.apps['myapp'].harness.deployment.image, "Nothing changed the hash value" + assert values["task-images"][BASE_KEY] == b1, "Base image should not change following the root .dockerignore" + + + try: + fname = os.path.join(RESOURCES, 'applications', 'myapp', 'afile.txt') + with open(fname, 'w') as f: + f.write('a') + + values = create() + assert v1 != values.apps['myapp'].harness.deployment.image, "Adding the file changed the hash value" + v2 = values.apps['myapp'].harness.deployment.image + assert values["task-images"][BASE_KEY] == b1, "Application files should be ignored for base image following the root .dockerignore" + finally: + os.remove(fname) + + + try: + with open(fname, 'w') as f: + f.write('a') + + values = create() + assert v2 == values.apps['myapp'].harness.deployment.image, "Recreated an identical file, the hash value should be the same" + finally: + os.remove(fname) + + + fname = os.path.join(RESOURCES, 'applications', 'myapp', 'afile.ignored') + try: + with open(fname, 'w') as f: + f.write('a') + + values = create() + assert values["task-images"][BASE_KEY] == b1, "2: Application files should be ignored for base image following the root .dockerignore" + + assert v1 == values.apps['myapp'].harness.deployment.image, "Nothing should change the hash value as the file is ignored in the .dockerignore" + finally: + os.remove(fname) + + + + # Dependencies test: if a dependency is changed, the hash should change + fname = os.path.join(RESOURCES, 'infrastructure/common-images', 'my-common', 'afile') + + try: + with open(fname, 'w') as f: + f.write('a') + + values = create() + + assert c1 != values["task-images"]["my-common"], "If content of a static image is changed, the hash should change" + assert v1 != values.apps['myapp'].harness.deployment.image, "If a static image dependency is changed, the hash should change" + finally: + os.remove(fname) + + + fname = os.path.join(CLOUDHARNESS_ROOT, 'atestfile') + try: + with open(fname, 'w') as f: + f.write('a') + + values = create() + + assert b1 != values["task-images"][BASE_KEY], "Content for base image is changed, the hash should change" + assert d1 != values["task-images"]["cloudharness-flask"], "Content for base image is changed, the static image should change" + assert v1 != values.apps['myapp'].harness.deployment.image, "2 levels dependency: If a base image dependency is changed, the hash should change" + finally: + os.remove(fname) + + +