diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index f6e92103b5..e2dd35c9e3 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -16,16 +16,20 @@ jobs: if: startsWith(github.repository, 'spinnaker/') runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Set up QEMU - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 + with: + ## Temporary due to bug in qemu: https://github.com/docker/setup-qemu-action/issues/198 + image: tonistiigi/binfmt:qemu-v7.0.0-28 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - - uses: actions/setup-java@v3 + uses: docker/setup-buildx-action@v3 + - uses: actions/setup-java@v4 with: - java-version: 11 + java-version: | + 17 distribution: 'zulu' cache: 'gradle' - name: Prepare build variables @@ -37,10 +41,23 @@ jobs: env: ORG_GRADLE_PROJECT_version: ${{ steps.build_variables.outputs.VERSION }} run: ./gradlew build --stacktrace ${{ steps.build_variables.outputs.REPO }}-web:installDist + - name: Build local slim container image for testing + uses: docker/build-push-action@v6 + with: + context: . + file: Dockerfile.slim + load: true + platforms: local + tags: | + "${{ steps.build_variables.outputs.REPO }}:${{ steps.build_variables.outputs.VERSION }}-unvalidated" + - name: Test local slim container image + env: + FULL_DOCKER_IMAGE_NAME: "${{ steps.build_variables.outputs.REPO }}:${{ steps.build_variables.outputs.VERSION }}-unvalidated" + run: ./gradlew rosco-integration:test - name: Login to GAR # Only run this on repositories in the 'spinnaker' org, not on forks. if: startsWith(github.repository, 'spinnaker/') - uses: docker/login-action@v2 + uses: docker/login-action@v3 # use service account flow defined at: https://github.com/docker/login-action#service-account-based-authentication-1 with: registry: us-docker.pkg.dev @@ -49,7 +66,7 @@ jobs: - name: Build and publish slim container image # Only run this on repositories in the 'spinnaker' org, not on forks. if: startsWith(github.repository, 'spinnaker/') - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v6 with: context: . file: Dockerfile.slim @@ -63,7 +80,7 @@ jobs: - name: Build and publish ubuntu container image # Only run this on repositories in the 'spinnaker' org, not on forks. if: startsWith(github.repository, 'spinnaker/') - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v6 with: context: . file: Dockerfile.ubuntu diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index f62295bb96..a50faaddab 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -33,11 +33,11 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -48,7 +48,7 @@ jobs: # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild - uses: github/codeql-action/autobuild@v2 + uses: github/codeql-action/autobuild@v3 # ℹī¸ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl @@ -62,4 +62,4 @@ jobs: # make release - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@v3 diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yml index 462af94c7c..6e5dd57b07 100644 --- a/.github/workflows/pr.yml +++ b/.github/workflows/pr.yml @@ -10,16 +10,20 @@ jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Set up QEMU - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 + with: + ## Temporary due to bug in qemu: https://github.com/docker/setup-qemu-action/issues/198 + image: tonistiigi/binfmt:qemu-v7.0.0-28 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - - uses: actions/setup-java@v3 + uses: docker/setup-buildx-action@v3 + - uses: actions/setup-java@v4 with: - java-version: 11 + java-version: | + 17 distribution: 'zulu' cache: 'gradle' - name: Prepare build variables @@ -32,7 +36,7 @@ jobs: ORG_GRADLE_PROJECT_version: ${{ steps.build_variables.outputs.VERSION }} run: ./gradlew build ${{ steps.build_variables.outputs.REPO }}-web:installDist - name: Build slim container image - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v6 with: context: . platforms: linux/amd64,linux/arm64 @@ -43,7 +47,7 @@ jobs: "${{ env.CONTAINER_REGISTRY }}/${{ steps.build_variables.outputs.REPO }}:latest-slim" "${{ env.CONTAINER_REGISTRY }}/${{ steps.build_variables.outputs.REPO }}:${{ steps.build_variables.outputs.VERSION }}-slim" - name: Build ubuntu container image - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v6 with: context: . platforms: linux/amd64,linux/arm64 @@ -51,3 +55,16 @@ jobs: tags: | "${{ env.CONTAINER_REGISTRY }}/${{ steps.build_variables.outputs.REPO }}:latest-ubuntu" "${{ env.CONTAINER_REGISTRY }}/${{ steps.build_variables.outputs.REPO }}:${{ steps.build_variables.outputs.VERSION }}-ubuntu" + - name: Build local slim container image for testing + uses: docker/build-push-action@v6 + with: + context: . + file: Dockerfile.slim + load: true + platforms: local + tags: | + "${{ steps.build_variables.outputs.REPO }}:${{ steps.build_variables.outputs.VERSION }}" + - name: Test local slim container image + env: + FULL_DOCKER_IMAGE_NAME: "${{ steps.build_variables.outputs.REPO }}:${{ steps.build_variables.outputs.VERSION }}" + run: ./gradlew rosco-integration:test diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 4bf9bc5434..c5c83bb45c 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -14,18 +14,22 @@ jobs: release: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - - uses: actions/setup-java@v3 + - uses: actions/setup-java@v4 with: - java-version: 11 + java-version: | + 17 distribution: 'zulu' cache: 'gradle' - name: Set up QEMU - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 + with: + ## Temporary due to bug in qemu: https://github.com/docker/setup-qemu-action/issues/198 + image: tonistiigi/binfmt:qemu-v7.0.0-28 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Assemble release info id: release_info env: @@ -67,7 +71,7 @@ jobs: - name: Login to Google Cloud # Only run this on repositories in the 'spinnaker' org, not on forks. if: startsWith(github.repository, 'spinnaker/') - uses: 'google-github-actions/auth@v1' + uses: 'google-github-actions/auth@v2' # use service account flow defined at: https://github.com/google-github-actions/upload-cloud-storage#authenticating-via-service-account-key-json with: credentials_json: '${{ secrets.GAR_JSON_KEY }}' @@ -75,7 +79,7 @@ jobs: # https://console.cloud.google.com/storage/browser/halconfig # Only run this on repositories in the 'spinnaker' org, not on forks. if: startsWith(github.repository, 'spinnaker/') - uses: 'google-github-actions/upload-cloud-storage@v1' + uses: 'google-github-actions/upload-cloud-storage@v2' with: path: 'halconfig/' glob: '*' # exclude directories as we tar.gz it first above @@ -84,7 +88,7 @@ jobs: - name: Login to GAR # Only run this on repositories in the 'spinnaker' org, not on forks. if: startsWith(github.repository, 'spinnaker/') - uses: docker/login-action@v2 + uses: docker/login-action@v3 # use service account flow defined at: https://github.com/docker/login-action#service-account-based-authentication-1 with: registry: us-docker.pkg.dev @@ -93,7 +97,7 @@ jobs: - name: Build and publish slim container image # Only run this on repositories in the 'spinnaker' org, not on forks. if: startsWith(github.repository, 'spinnaker/') - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v6 with: context: . file: Dockerfile.slim @@ -106,7 +110,7 @@ jobs: - name: Build and publish ubuntu container image # Only run this on repositories in the 'spinnaker' org, not on forks. if: startsWith(github.repository, 'spinnaker/') - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v6 with: context: . file: Dockerfile.ubuntu @@ -117,7 +121,7 @@ jobs: "${{ env.CONTAINER_REGISTRY }}/${{ steps.build_variables.outputs.REPO }}:${{ steps.release_info.outputs.RELEASE_VERSION }}-${{ steps.build_variables.outputs.VERSION }}-unvalidated-ubuntu" - name: Create release if: steps.release_info.outputs.SKIP_RELEASE == 'false' - uses: softprops/action-gh-release@v1 + uses: softprops/action-gh-release@v2 with: body: | ${{ steps.release_info.outputs.CHANGELOG }} diff --git a/Dockerfile.compile b/Dockerfile.compile index f98974504a..ded359c48a 100644 --- a/Dockerfile.compile +++ b/Dockerfile.compile @@ -1,8 +1,8 @@ -FROM alpine:3.16 +FROM alpine:3.20 RUN apk add --update \ - openjdk11 \ + openjdk17 \ && rm -rf /var/cache/apk LABEL maintainer="sig-platform@spinnaker.io" ENV GRADLE_USER_HOME /workspace/.gradle ENV GRADLE_OPTS -Xmx4g -CMD ./gradlew --no-daemon rosco-web:installDist -x test +CMD ./gradlew -PenableCrossCompilerPlugin=true --no-daemon rosco-web:installDist -x test diff --git a/Dockerfile.slim b/Dockerfile.slim index 759d909b0d..eff6ca6641 100644 --- a/Dockerfile.slim +++ b/Dockerfile.slim @@ -1,17 +1,18 @@ -FROM alpine:3.16 +FROM alpine:3.20 LABEL maintainer="sig-platform@spinnaker.io" ENV KUSTOMIZE_VERSION=3.8.6 ENV KUSTOMIZE4_VERSION=4.5.5 -ENV PACKER_VERSION=1.8.1 +ENV PACKER_VERSION=1.10.1 ENV HELMFILE_VERSION=0.153.1 ARG TARGETARCH +ARG PACKER_PLUGINS="amazon azure googlecompute" WORKDIR /packer -RUN apk --no-cache add --update bash wget curl openssl openjdk11-jre git openssh-client && \ +RUN apk --no-cache add --update bash wget curl openssl openjdk17-jre git openssh-client && \ wget https://releases.hashicorp.com/packer/${PACKER_VERSION}/packer_${PACKER_VERSION}_linux_${TARGETARCH}.zip && \ unzip packer_${PACKER_VERSION}_linux_${TARGETARCH}.zip && \ rm packer_${PACKER_VERSION}_linux_${TARGETARCH}.zip @@ -56,5 +57,16 @@ COPY rosco-web/config /opt/rosco COPY halconfig/packer /opt/rosco/config/packer RUN mkdir -p /opt/rosco/plugins && chown -R spinnaker:nogroup /opt/rosco/plugins USER spinnaker -CMD ["/opt/rosco/bin/rosco"] +# Install packer plugins (must be run as spinnaker user). To provide a github token (optional), run docker build with something like "--secret id=github_token,env=PACKER_GITHUB_API_TOKEN" +RUN for plugin in $PACKER_PLUGINS ; do \ + if [ -f /run/secrets/github_token ]; then \ + PACKER_GITHUB_API_TOKEN=$(cat /run/secrets/github_token) packer plugins install "github.com/hashicorp/$plugin"; \ + else \ + packer plugins install "github.com/hashicorp/$plugin"; \ + fi; \ +done + +HEALTHCHECK CMD curl http://localhost:8087/health | grep UP || exit 1 + +CMD ["/opt/rosco/bin/rosco"] diff --git a/Dockerfile.ubuntu b/Dockerfile.ubuntu index 080147d923..6f4e8904b8 100644 --- a/Dockerfile.ubuntu +++ b/Dockerfile.ubuntu @@ -1,16 +1,17 @@ -FROM ubuntu:bionic +FROM ubuntu:jammy LABEL maintainer="sig-platform@spinnaker.io" ENV KUSTOMIZE_VERSION=3.8.6 ENV KUSTOMIZE4_VERSION=4.5.5 -ENV PACKER_VERSION=1.8.1 +ENV PACKER_VERSION=1.10.1 ENV HELMFILE_VERSION=0.153.1 ARG TARGETARCH +ARG PACKER_PLUGINS="amazon azure googlecompute" WORKDIR /packer -RUN apt-get update && apt-get -y install openjdk-11-jre-headless wget unzip curl git openssh-client && \ +RUN apt-get update && apt-get -y install openjdk-17-jre-headless wget unzip curl git openssh-client && \ wget https://releases.hashicorp.com/packer/${PACKER_VERSION}/packer_${PACKER_VERSION}_linux_${TARGETARCH}.zip && \ unzip packer_${PACKER_VERSION}_linux_${TARGETARCH}.zip && \ rm packer_${PACKER_VERSION}_linux_${TARGETARCH}.zip @@ -48,10 +49,36 @@ RUN mkdir helmfile && \ mv ./helmfile/helmfile /usr/local/bin/helmfile && \ rm -rf ./helmfile +# Install AWS CLI 2 and the session manager plugin +RUN if [ "${TARGETARCH}" = "arm64" ]; then \ + curl "https://awscli.amazonaws.com/awscli-exe-linux-aarch64.zip" -o "awscliv2.zip" && \ + curl "https://s3.amazonaws.com/session-manager-downloads/plugin/latest/ubuntu_arm64/session-manager-plugin.deb" -o "session-manager-plugin.deb"; \ + else \ + curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip" && \ + curl "https://s3.amazonaws.com/session-manager-downloads/plugin/latest/ubuntu_64bit/session-manager-plugin.deb" -o "session-manager-plugin.deb"; \ + fi && \ + unzip awscliv2.zip && \ + ./aws/install && \ + rm -rf ./awscliv2.zip ./aws && \ + dpkg -i session-manager-plugin.deb && \ + rm session-manager-plugin.deb + RUN adduser --system --uid 10111 --group spinnaker COPY rosco-web/build/install/rosco /opt/rosco COPY rosco-web/config /opt/rosco COPY halconfig/packer /opt/rosco/config/packer RUN mkdir -p /opt/rosco/plugins && chown -R spinnaker:nogroup /opt/rosco/plugins USER spinnaker + +# Install packer plugins (must be run as spinnaker user). To provide a github token (optional), run docker build with something like "--secret id=github_token,env=PACKER_GITHUB_API_TOKEN" +RUN for plugin in $PACKER_PLUGINS ; do \ + if [ -f /run/secrets/github_token ]; then \ + PACKER_GITHUB_API_TOKEN=$(cat /run/secrets/github_token) packer plugins install "github.com/hashicorp/$plugin"; \ + else \ + packer plugins install "github.com/hashicorp/$plugin"; \ + fi; \ +done + +HEALTHCHECK CMD curl http://localhost:8087/health | grep UP || exit 1 + CMD ["/opt/rosco/bin/rosco"] diff --git a/build.gradle b/build.gradle index fc0411fe27..8d407361ed 100644 --- a/build.gradle +++ b/build.gradle @@ -16,12 +16,18 @@ subprojects { dependencies { implementation enforcedPlatform("io.spinnaker.kork:kork-bom:$korkVersion") - annotationProcessor platform("io.spinnaker.kork:kork-bom:$korkVersion") + compileOnly "org.projectlombok:lombok" + + annotationProcessor enforcedPlatform("io.spinnaker.kork:kork-bom:$korkVersion") annotationProcessor "org.projectlombok:lombok" annotationProcessor("org.springframework.boot:spring-boot-configuration-processor") - testAnnotationProcessor platform("io.spinnaker.kork:kork-bom:$korkVersion") + + testCompileOnly "org.projectlombok:lombok" + + testAnnotationProcessor enforcedPlatform("io.spinnaker.kork:kork-bom:$korkVersion") testAnnotationProcessor "org.projectlombok:lombok" - testRuntimeOnly "org.junit.vintage:junit-vintage-engine" + + testRuntimeOnly "org.junit.jupiter:junit-jupiter-engine" } test { @@ -44,6 +50,7 @@ subprojects { jvmArgs '-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=8187' } } + } defaultTasks ':rosco-web:run' diff --git a/gradle.properties b/gradle.properties index 8b0215158d..b5eadd3008 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,7 +1,7 @@ -korkVersion=7.191.0 +korkVersion=7.251.0 org.gradle.parallel=true -spinnakerGradleVersion=8.31.0 -targetJava11=true +spinnakerGradleVersion=8.32.1 +targetJava17=true # To enable a composite reference to a project, set the # project property `'Composite=true'`. diff --git a/rosco-core/rosco-core.gradle b/rosco-core/rosco-core.gradle index f73a397a0d..8df286d943 100644 --- a/rosco-core/rosco-core.gradle +++ b/rosco-core/rosco-core.gradle @@ -1,23 +1,25 @@ dependencies { - compileOnly "org.projectlombok:lombok" - annotationProcessor "org.projectlombok:lombok" - api "io.spinnaker.kork:kork-artifacts" api "io.spinnaker.kork:kork-plugins" - api "org.codehaus.groovy:groovy" + api "org.apache.groovy:groovy" implementation "com.netflix.frigga:frigga" implementation "io.spinnaker.kork:kork-jedis" implementation "io.spinnaker.kork:kork-retrofit" + implementation "io.swagger.core.v3:swagger-annotations" implementation "io.spinnaker.kork:kork-swagger" implementation "io.spinnaker.kork:kork-web" implementation "com.squareup.retrofit2:converter-jackson" implementation "com.squareup.retrofit2:retrofit" implementation "io.reactivex:rxjava" implementation "org.apache.commons:commons-exec" - implementation "org.codehaus.groovy:groovy" + implementation "org.apache.groovy:groovy" implementation "org.springframework.boot:spring-boot-starter-web" implementation "redis.clients:jedis" testImplementation "org.spockframework:spock-core" testImplementation "org.objenesis:objenesis" + testImplementation "org.junit.jupiter:junit-jupiter-api" + testImplementation "org.springframework:spring-test" + testImplementation "org.springframework.boot:spring-boot-starter-test" + testImplementation project(":rosco-web") } diff --git a/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/api/Bake.groovy b/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/api/Bake.groovy index 2dfd70d950..9bcd83faa4 100644 --- a/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/api/Bake.groovy +++ b/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/api/Bake.groovy @@ -20,7 +20,7 @@ import com.fasterxml.jackson.annotation.JsonInclude import groovy.transform.CompileStatic import groovy.transform.EqualsAndHashCode import groovy.transform.ToString -import io.swagger.annotations.ApiModelProperty +import io.swagger.v3.oas.annotations.media.Schema import com.netflix.spinnaker.kork.artifacts.model.Artifact /** @@ -32,7 +32,7 @@ import com.netflix.spinnaker.kork.artifacts.model.Artifact @EqualsAndHashCode(includes = "id") @ToString(includeNames = true) class Bake { - @ApiModelProperty(value="The id of the bake job.") + @Schema(description ="The id of the bake job.") String id String ami String image_name diff --git a/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/api/BakeRequest.groovy b/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/api/BakeRequest.groovy index ca566168aa..b083bfd410 100644 --- a/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/api/BakeRequest.groovy +++ b/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/api/BakeRequest.groovy @@ -25,7 +25,7 @@ import com.netflix.spinnaker.rosco.providers.util.packagespecific.NupkgPackageUt import com.netflix.spinnaker.rosco.providers.util.packagespecific.RpmPackageUtil import groovy.transform.CompileStatic import groovy.transform.Immutable -import io.swagger.annotations.ApiModelProperty +import io.swagger.v3.oas.annotations.media.Schema /** * A request to bake a new machine image. @@ -37,30 +37,30 @@ import io.swagger.annotations.ApiModelProperty class BakeRequest { // A generated uuid which will identify the request and be used as the jobId when running the bake - @ApiModelProperty(value = "A generated UUID which will be used to identify the effective packer bake", readOnly = true) - final String request_id = UUID.randomUUID().toString() + @Schema(description= "A generated UUID which will be used to identify the effective packer bake", accessMode = Schema.AccessMode.READ_ONLY) + String request_id = UUID.randomUUID().toString() String user - @ApiModelProperty("The package(s) to install, as a space-delimited string") @JsonProperty("package") @SerializedName("package") + @Schema(description="The package(s) to install, as a space-delimited string") @JsonProperty("package") @SerializedName("package") String package_name - @ApiModelProperty("The package(s) to install, as Spinnaker artifacts") + @Schema(description="The package(s) to install, as Spinnaker artifacts") List package_artifacts - @ApiModelProperty("The CI server") + @Schema(description="The CI server") String build_host - @ApiModelProperty("The CI job") + @Schema(description="The CI job") String job - @ApiModelProperty("The CI build number") + @Schema(description="The CI build number") String build_number - @ApiModelProperty("The commit hash of the CI build") + @Schema(description="The commit hash of the CI build") String commit_hash - @ApiModelProperty("The CI Build Url") + @Schema(description="The CI Build Url") String build_info_url - @ApiModelProperty("The target platform") + @Schema(description="The target platform") CloudProviderType cloud_provider_type Label base_label - @ApiModelProperty("The named base image to resolve from rosco's configuration") + @Schema(description="The named base image to resolve from rosco's configuration") String base_os String base_name - @ApiModelProperty("The explicit machine image to use, instead of resolving one from rosco's configuration") + @Schema(description="The explicit machine image to use, instead of resolving one from rosco's configuration") String base_ami VmType vm_type StoreType store_type @@ -69,17 +69,17 @@ class BakeRequest { String ami_suffix Boolean upgrade String instance_type - @ApiModelProperty("The image owner organization") + @Schema(description="The image owner organization") String organization - @ApiModelProperty("The explicit packer template to use, instead of resolving one from rosco's configuration") + @Schema(description="The explicit packer template to use, instead of resolving one from rosco's configuration") String template_file_name - @ApiModelProperty("A map of key/value pairs to add to the packer command") + @Schema(description="A map of key/value pairs to add to the packer command") Map extended_attributes - @ApiModelProperty("The name of a json file containing key/value pairs to add to the packer command (must be in the same location as the template file)") + @Schema(description="The name of a json file containing key/value pairs to add to the packer command (must be in the same location as the template file)") String var_file_name - @ApiModelProperty("The name of a configured account to use when baking the image") + @Schema(description="The name of a configured account to use when baking the image") String account_name String spinnaker_execution_id diff --git a/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/api/BakeStatus.groovy b/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/api/BakeStatus.groovy index 15c2f3b389..01e29000ba 100644 --- a/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/api/BakeStatus.groovy +++ b/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/api/BakeStatus.groovy @@ -20,7 +20,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore import groovy.transform.CompileStatic import groovy.transform.EqualsAndHashCode import groovy.transform.ToString -import io.swagger.annotations.ApiModelProperty +import io.swagger.v3.oas.annotations.media.Schema /** * The state of a bake as returned by the Bakery API when a bake is created. Once complete it provides a link to the @@ -34,7 +34,7 @@ class BakeStatus implements Serializable { /** * The bake status id. */ - @ApiModelProperty(value="The id of the bake request.") + @Schema(description="The id of the bake request.") String id State state @@ -46,7 +46,7 @@ class BakeStatus implements Serializable { * * @see BakeryController#lookupBake */ - @ApiModelProperty(value="The id of the bake job. Can be passed to lookupBake() to retrieve the details of the newly-baked image.") + @Schema(description="The id of the bake job. Can be passed to lookupBake() to retrieve the details of the newly-baked image.") String resource_id @JsonIgnore diff --git a/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/jobs/local/JobExecutorLocal.groovy b/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/jobs/local/JobExecutorLocal.groovy index 3450b322fe..08ef94b702 100644 --- a/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/jobs/local/JobExecutorLocal.groovy +++ b/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/jobs/local/JobExecutorLocal.groovy @@ -177,9 +177,7 @@ class JobExecutorLocal implements JobExecutor { bakeStatus.state = BakeStatus.State.RUNNING } - if (outputContent) { - bakeStatus.outputContent = outputContent - } + bakeStatus.outputContent = outputContent if (logsContent) { bakeStatus.logsContent = logsContent diff --git a/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/aws/config/AWSBakeryDefaultsBean.java b/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/aws/config/AWSBakeryDefaultsBean.java new file mode 100644 index 0000000000..acae6fa146 --- /dev/null +++ b/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/aws/config/AWSBakeryDefaultsBean.java @@ -0,0 +1,39 @@ +/* + * Copyright 2024 OpsMx, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.netflix.spinnaker.rosco.providers.aws.config; + +import com.netflix.spinnaker.rosco.api.BakeRequest; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +@Configuration +@ConditionalOnProperty("aws.enabled") +public class AWSBakeryDefaultsBean { + @Bean + @ConfigurationProperties("aws.bakery-defaults") + public RoscoAWSConfiguration.AWSBakeryDefaults awsBakeryDefaults( + @Value("${aws.bakery-defaults.default-virtualization-type:hvm}") + BakeRequest.VmType defaultVirtualizationType) { + RoscoAWSConfiguration.AWSBakeryDefaults defaults = + new RoscoAWSConfiguration.AWSBakeryDefaults(); + defaults.setDefaultVirtualizationType(defaultVirtualizationType); + return defaults; + } +} diff --git a/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/aws/config/RoscoAWSConfiguration.groovy b/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/aws/config/RoscoAWSConfiguration.groovy index 95713a12f6..9a67044d1b 100644 --- a/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/aws/config/RoscoAWSConfiguration.groovy +++ b/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/aws/config/RoscoAWSConfiguration.groovy @@ -43,12 +43,6 @@ class RoscoAWSConfiguration { @Autowired AWSBakeHandler awsBakeHandler - @Bean - @ConfigurationProperties('aws.bakery-defaults') - AWSBakeryDefaults awsBakeryDefaults(@Value('${aws.bakery-defaults.default-virtualization-type:hvm}') BakeRequest.VmType defaultVirtualizationType) { - new AWSBakeryDefaults(defaultVirtualizationType: defaultVirtualizationType) - } - static class AWSBakeryDefaults { String awsAccessKey String awsSecretKey diff --git a/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/azure/config/AzureBakeryConfigurationBeans.java b/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/azure/config/AzureBakeryConfigurationBeans.java new file mode 100644 index 0000000000..32c9b78dfe --- /dev/null +++ b/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/azure/config/AzureBakeryConfigurationBeans.java @@ -0,0 +1,38 @@ +/* + * Copyright 2024 OpsMx, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.netflix.spinnaker.rosco.providers.azure.config; + +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +@Configuration +@ConditionalOnProperty("azure.enabled") +public class AzureBakeryConfigurationBeans { + @Bean + @ConfigurationProperties("azure.bakery-defaults") + public RoscoAzureConfiguration.AzureBakeryDefaults azureBakeryDefaults() { + return new RoscoAzureConfiguration.AzureBakeryDefaults(); + } + + @Bean + @ConfigurationProperties("azure") + public RoscoAzureConfiguration.AzureConfigurationProperties azureConfigurationProperties() { + return new RoscoAzureConfiguration.AzureConfigurationProperties(); + } +} diff --git a/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/azure/config/RoscoAzureConfiguration.groovy b/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/azure/config/RoscoAzureConfiguration.groovy index 93ddcedb28..c1e9a7f1c8 100644 --- a/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/azure/config/RoscoAzureConfiguration.groovy +++ b/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/azure/config/RoscoAzureConfiguration.groovy @@ -40,12 +40,6 @@ class RoscoAzureConfiguration { @Autowired AzureBakeHandler azureBakeHandler - @Bean - @ConfigurationProperties('azure.bakery-defaults') - AzureBakeryDefaults azureBakeryDefaults() { - new AzureBakeryDefaults() - } - @PostConstruct void init() { cloudProviderBakeHandlerRegistry.register(BakeRequest.CloudProviderType.azure, azureBakeHandler) @@ -67,12 +61,6 @@ class RoscoAzureConfiguration { AzureOperatingSystemSettings baseImage } - @Bean - @ConfigurationProperties('azure') - AzureConfigurationProperties azureConfigurationProperties() { - new AzureConfigurationProperties() - } - static class AzureConfigurationProperties { List accounts = [] } diff --git a/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/docker/config/DockerBakeryDefaultsBean.java b/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/docker/config/DockerBakeryDefaultsBean.java new file mode 100644 index 0000000000..f013122a8d --- /dev/null +++ b/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/docker/config/DockerBakeryDefaultsBean.java @@ -0,0 +1,32 @@ +/* + * Copyright 2024 OpsMx, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.netflix.spinnaker.rosco.providers.docker.config; + +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +@Configuration +@ConditionalOnProperty("docker.enabled") +public class DockerBakeryDefaultsBean { + @Bean + @ConfigurationProperties("docker.bakery-defaults") + public RoscoDockerConfiguration.DockerBakeryDefaults dockerBakeryDefaults() { + return new RoscoDockerConfiguration.DockerBakeryDefaults(); + } +} diff --git a/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/docker/config/RoscoDockerConfiguration.groovy b/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/docker/config/RoscoDockerConfiguration.groovy index 049269afd3..6f77be9327 100644 --- a/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/docker/config/RoscoDockerConfiguration.groovy +++ b/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/docker/config/RoscoDockerConfiguration.groovy @@ -42,12 +42,6 @@ class RoscoDockerConfiguration { @Autowired DockerBakeHandler dockerBakeHandler - @Bean - @ConfigurationProperties('docker.bakery-defaults') - DockerBakeryDefaults dockerBakeryDefaults() { - new DockerBakeryDefaults() - } - static class DockerBakeryDefaults { String targetRepository String templateFile diff --git a/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/google/GCEBakeHandler.groovy b/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/google/GCEBakeHandler.groovy index ae5990ce66..1ec2949cd4 100644 --- a/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/google/GCEBakeHandler.groovy +++ b/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/google/GCEBakeHandler.groovy @@ -35,7 +35,7 @@ import java.util.concurrent.atomic.AtomicReference @Component public class GCEBakeHandler extends CloudProviderBakeHandler { - private static final String IMAGE_NAME_TOKEN = "googlecompute: A disk image was created:" + private static final String IMAGE_NAME_TOKEN = "googlecompute: A disk image was created" private final resolvedBakeryDefaults = new AtomicReference() diff --git a/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/google/config/GCEBakeryConfigurationBeans.java b/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/google/config/GCEBakeryConfigurationBeans.java new file mode 100644 index 0000000000..36c86dd905 --- /dev/null +++ b/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/google/config/GCEBakeryConfigurationBeans.java @@ -0,0 +1,44 @@ +/* + * Copyright 2024 OpsMx, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.netflix.spinnaker.rosco.providers.google.config; + +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +@Configuration +@ConditionalOnProperty("google.enabled") +public class GCEBakeryConfigurationBeans { + @Bean + @ConfigurationProperties("google.gce.bakery-defaults") + public RoscoGoogleConfiguration.GCEBakeryDefaults deprecatedGCEBakeryDefaults() { + return new RoscoGoogleConfiguration.GCEBakeryDefaults(); + } + + @Bean + @ConfigurationProperties("google.bakery-defaults") + public RoscoGoogleConfiguration.GCEBakeryDefaults gceBakeryDefaults() { + return new RoscoGoogleConfiguration.GCEBakeryDefaults(); + } + + @Bean + @ConfigurationProperties("google") + public RoscoGoogleConfiguration.GoogleConfigurationProperties googleConfigurationProperties() { + return new RoscoGoogleConfiguration.GoogleConfigurationProperties(); + } +} diff --git a/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/google/config/RoscoGoogleConfiguration.groovy b/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/google/config/RoscoGoogleConfiguration.groovy index 50d0d46795..004888a6ff 100644 --- a/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/google/config/RoscoGoogleConfiguration.groovy +++ b/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/google/config/RoscoGoogleConfiguration.groovy @@ -43,18 +43,6 @@ class RoscoGoogleConfiguration { @Autowired GCEBakeHandler gceBakeHandler - @Bean - @ConfigurationProperties('google.gce.bakery-defaults') - GCEBakeryDefaults deprecatedGCEBakeryDefaults() { - new GCEBakeryDefaults() - } - - @Bean - @ConfigurationProperties('google.bakery-defaults') - GCEBakeryDefaults gceBakeryDefaults() { - new GCEBakeryDefaults() - } - @AutoClone(style = AutoCloneStyle.SIMPLE) @ToString(includeNames = true) static class GCEBakeryDefaults { @@ -88,12 +76,6 @@ class RoscoGoogleConfiguration { cloudProviderBakeHandlerRegistry.register(BakeRequest.CloudProviderType.gce, gceBakeHandler) } - @Bean - @ConfigurationProperties("google") - GoogleConfigurationProperties googleConfigurationProperties() { - new GoogleConfigurationProperties() - } - static class GoogleConfigurationProperties { List accounts = [] } diff --git a/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/oracle/config/OracleBakeryConfigurationBeans.java b/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/oracle/config/OracleBakeryConfigurationBeans.java new file mode 100644 index 0000000000..660b89bec1 --- /dev/null +++ b/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/oracle/config/OracleBakeryConfigurationBeans.java @@ -0,0 +1,38 @@ +/* + * Copyright 2024 OpsMx, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.netflix.spinnaker.rosco.providers.oracle.config; + +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +@Configuration +@ConditionalOnProperty("oracle.enabled") +public class OracleBakeryConfigurationBeans { + @Bean + @ConfigurationProperties("oracle.bakery-defaults") + public OracleBakeryDefaults oracleBakeryDefaults() { + return new OracleBakeryDefaults(); + } + + @Bean + @ConfigurationProperties("oracle") + public OracleConfigurationProperties oracleConfigurationProperties() { + return new OracleConfigurationProperties(); + } +} diff --git a/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/oracle/config/RoscoOracleConfiguration.java b/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/oracle/config/RoscoOracleConfiguration.java index 6fcebaf3e1..7f92f870e7 100644 --- a/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/oracle/config/RoscoOracleConfiguration.java +++ b/rosco-core/src/main/groovy/com/netflix/spinnaker/rosco/providers/oracle/config/RoscoOracleConfiguration.java @@ -15,8 +15,6 @@ import javax.annotation.PostConstruct; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.boot.context.properties.ConfigurationProperties; -import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.Configuration; @@ -41,16 +39,4 @@ public RoscoOracleConfiguration( void init() { cloudProviderBakeHandlerRegistry.register(BakeRequest.CloudProviderType.oracle, ociBakeHandler); } - - @Bean - @ConfigurationProperties("oracle.bakery-defaults") - public OracleBakeryDefaults oracleBakeryDefaults() { - return new OracleBakeryDefaults(); - } - - @Bean - @ConfigurationProperties("oracle") - public OracleConfigurationProperties oracleConfigurationProperties() { - return new OracleConfigurationProperties(); - } } diff --git a/rosco-core/src/test/groovy/com/netflix/spinnaker/rosco/jobs/local/JobExecutorLocalSpec.groovy b/rosco-core/src/test/groovy/com/netflix/spinnaker/rosco/jobs/local/JobExecutorLocalSpec.groovy index 1027203793..196fa0b143 100644 --- a/rosco-core/src/test/groovy/com/netflix/spinnaker/rosco/jobs/local/JobExecutorLocalSpec.groovy +++ b/rosco-core/src/test/groovy/com/netflix/spinnaker/rosco/jobs/local/JobExecutorLocalSpec.groovy @@ -3,7 +3,6 @@ package com.netflix.spinnaker.rosco.jobs.local import com.netflix.spectator.api.DefaultRegistry import com.netflix.spinnaker.rosco.api.BakeStatus import com.netflix.spinnaker.rosco.jobs.JobRequest -import com.netflix.spinnaker.rosco.jobs.JobExecutor import com.netflix.spinnaker.rosco.providers.util.TestDefaults import spock.lang.Specification import spock.lang.Subject @@ -11,43 +10,43 @@ import spock.lang.Unroll class JobExecutorLocalSpec extends Specification implements TestDefaults { - private static final String BASH_SCRIPT = '''\ - for i in {1..5}; do - echo "Output $i" - sleep 0.1 - echo "Error $i" >&2 - sleep 0.1 - done - echo "Final output" - '''.stripIndent() - private static final String EXPECTED_OUTPUT = '''\ - Output 1 - Output 2 - Output 3 - Output 4 - Output 5 - Final output - '''.stripIndent() - private static final String EXPECTED_LOGS = '''\ - Error 1 - Error 2 - Error 3 - Error 4 - Error 5 - '''.stripIndent() - private static final String COMBINED_OUTPUT = '''\ - Output 1 - Error 1 - Output 2 - Error 2 - Output 3 - Error 3 - Output 4 - Error 4 - Output 5 - Error 5 - Final output - '''.stripIndent() + private static final String BASH_SCRIPT = """\ +for i in {1..5}; do + echo "Output \$i" + sleep 0.1 + echo "Error \$i" >&2 + sleep 0.1 +done +echo "Final output" +""" + private static final String EXPECTED_OUTPUT = """\ +Output 1 +Output 2 +Output 3 +Output 4 +Output 5 +Final output +""" + private static final String EXPECTED_LOGS = """\ +Error 1 +Error 2 +Error 3 +Error 4 +Error 5 +""" + private static final String COMBINED_OUTPUT = """\ +Output 1 +Error 1 +Output 2 +Error 2 +Output 3 +Error 3 +Output 4 +Error 4 +Output 5 +Error 5 +Final output +""" @Unroll void 'job executor runs command and captures stdout and stderr with combineStdOutAndErr set to #combineStdOutAndErr'() { @@ -80,4 +79,29 @@ class JobExecutorLocalSpec extends Specification implements TestDefaults { true | COMBINED_OUTPUT | COMBINED_OUTPUT false | EXPECTED_OUTPUT | EXPECTED_LOGS } + + void 'job executor handles empty output'() { + def jobRequest = new JobRequest( + tokenizedCommand: ["true"], + jobId: SOME_JOB_ID, + combineStdOutAndErr: false) + + @Subject + def jobExecutorLocal = new JobExecutorLocal( + registry: new DefaultRegistry(), + timeoutMinutes: 1) + + when: + def jobId = jobExecutorLocal.startJob(jobRequest) + // Give the script time to run + 100 ms fudge factor + sleep(3000) + def bakeStatus = jobExecutorLocal.updateJob(jobId) + + then: + bakeStatus != null + bakeStatus.state == BakeStatus.State.COMPLETED + bakeStatus.result == BakeStatus.Result.SUCCESS + bakeStatus.outputContent == "" + bakeStatus.logsContent == "No output from command." + } } diff --git a/rosco-core/src/test/groovy/com/netflix/spinnaker/rosco/providers/google/GCEBakeHandlerSpec.groovy b/rosco-core/src/test/groovy/com/netflix/spinnaker/rosco/providers/google/GCEBakeHandlerSpec.groovy index 0e5b679355..9cf9cf30e1 100644 --- a/rosco-core/src/test/groovy/com/netflix/spinnaker/rosco/providers/google/GCEBakeHandlerSpec.groovy +++ b/rosco-core/src/test/groovy/com/netflix/spinnaker/rosco/providers/google/GCEBakeHandlerSpec.groovy @@ -164,16 +164,21 @@ class GCEBakeHandlerSpec extends Specification implements TestDefaults{ "Build 'googlecompute' finished.\n" + "\n" + "==> Builds finished. The artifacts of successful builds are:\n" + - "--> googlecompute: A disk image was created: kato-x12345678-trusty" + "--> googlecompute: ${packerLog}" - Bake bake = gceBakeHandler.scrapeCompletedBakeResults(REGION, "123", logsContent) + Bake bake = gceBakeHandler.scrapeCompletedBakeResults(REGION, bakeId, logsContent) then: with (bake) { - id == "123" + id == bakeId !ami - image_name == "kato-x12345678-trusty" + image_name == imageName } + + where: + packerLog | bakeId | imageName + "A disk image was created: kato-x12345678-trusty" | "123" | "kato-x12345678-trusty" + "A disk image was created in the test-gcp project: kato-x12345678-trusty-changed" | "456" | "kato-x12345678-trusty-changed" } void 'scraping returns null for missing image name'() { diff --git a/rosco-core/src/test/groovy/com/netflix/spinnaker/rosco/providers/oracle/OCIBakeHandlerSpec.groovy b/rosco-core/src/test/groovy/com/netflix/spinnaker/rosco/providers/oracle/OCIBakeHandlerSpec.groovy index 3a236b8ef9..00005a6c4e 100644 --- a/rosco-core/src/test/groovy/com/netflix/spinnaker/rosco/providers/oracle/OCIBakeHandlerSpec.groovy +++ b/rosco-core/src/test/groovy/com/netflix/spinnaker/rosco/providers/oracle/OCIBakeHandlerSpec.groovy @@ -247,7 +247,7 @@ class OCIBakeHandlerSpec extends Specification implements TestDefaults { then: with(options) { cloudProvider == 'oracle' - baseImages.size == 2 + baseImages.size() == 2 baseImages[0].id == 'ubuntu16_04' baseImages[1].id == 'centos_7' } diff --git a/rosco-core/src/test/java/com/netflix/spinnaker/rosco/providers/aws/AWSStartupTest.java b/rosco-core/src/test/java/com/netflix/spinnaker/rosco/providers/aws/AWSStartupTest.java new file mode 100644 index 0000000000..e7ead122a9 --- /dev/null +++ b/rosco-core/src/test/java/com/netflix/spinnaker/rosco/providers/aws/AWSStartupTest.java @@ -0,0 +1,45 @@ +/* + * Copyright 2024 OpsMx, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.netflix.spinnaker.rosco.providers.aws; + +import com.netflix.spinnaker.rosco.Main; +import com.netflix.spinnaker.rosco.providers.aws.config.RoscoAWSConfiguration; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.TestPropertySource; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +@ExtendWith(SpringExtension.class) +@SpringBootTest(classes = {Main.class}) +@TestPropertySource( + properties = { + "spring.application.name=rosco", + "aws.enabled=true", + "rosco.config-dir=/some/path", + "bakeAccount=xyz" + }) +public class AWSStartupTest { + + @Autowired AWSBakeHandler awsBakeHandler; + + @Autowired RoscoAWSConfiguration.AWSBakeryDefaults awsBakeryDefaults; + + @Test + public void startupTest() {} +} diff --git a/rosco-core/src/test/java/com/netflix/spinnaker/rosco/providers/azure/AzureStartupTest.java b/rosco-core/src/test/java/com/netflix/spinnaker/rosco/providers/azure/AzureStartupTest.java new file mode 100644 index 0000000000..5f3020c746 --- /dev/null +++ b/rosco-core/src/test/java/com/netflix/spinnaker/rosco/providers/azure/AzureStartupTest.java @@ -0,0 +1,46 @@ +/* + * Copyright 2024 OpsMx, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.netflix.spinnaker.rosco.providers.azure; + +import com.netflix.spinnaker.rosco.Main; +import com.netflix.spinnaker.rosco.providers.azure.config.RoscoAzureConfiguration; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.TestPropertySource; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +@ExtendWith(SpringExtension.class) +@SpringBootTest(classes = {Main.class}) +@TestPropertySource( + properties = { + "spring.application.name=rosco", + "azure.enabled=true", + "rosco.config-dir=/some/path" + }) +public class AzureStartupTest { + + @Autowired AzureBakeHandler azureBakeHandler; + + @Autowired RoscoAzureConfiguration.AzureBakeryDefaults azureBakeryDefaults; + + @Autowired RoscoAzureConfiguration.AzureConfigurationProperties azureConfigurationProperties; + + @Test + public void startupTest() {} +} diff --git a/rosco-core/src/test/java/com/netflix/spinnaker/rosco/providers/docker/DockerStartupTest.java b/rosco-core/src/test/java/com/netflix/spinnaker/rosco/providers/docker/DockerStartupTest.java new file mode 100644 index 0000000000..806a488ddc --- /dev/null +++ b/rosco-core/src/test/java/com/netflix/spinnaker/rosco/providers/docker/DockerStartupTest.java @@ -0,0 +1,44 @@ +/* + * Copyright 2024 OpsMx, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.netflix.spinnaker.rosco.providers.docker; + +import com.netflix.spinnaker.rosco.Main; +import com.netflix.spinnaker.rosco.providers.docker.config.RoscoDockerConfiguration; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.TestPropertySource; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +@ExtendWith(SpringExtension.class) +@SpringBootTest(classes = {Main.class}) +@TestPropertySource( + properties = { + "spring.application.name=rosco", + "docker.enabled=true", + "rosco.config-dir=/some/path" + }) +public class DockerStartupTest { + + @Autowired DockerBakeHandler dockerBakeHandler; + + @Autowired RoscoDockerConfiguration.DockerBakeryDefaults dockerBakeryDefaults; + + @Test + public void startupTest() {} +} diff --git a/rosco-core/src/test/java/com/netflix/spinnaker/rosco/providers/google/GCEStartupTest.java b/rosco-core/src/test/java/com/netflix/spinnaker/rosco/providers/google/GCEStartupTest.java new file mode 100644 index 0000000000..c03195feb5 --- /dev/null +++ b/rosco-core/src/test/java/com/netflix/spinnaker/rosco/providers/google/GCEStartupTest.java @@ -0,0 +1,46 @@ +/* + * Copyright 2024 OpsMx, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.netflix.spinnaker.rosco.providers.google; + +import com.netflix.spinnaker.rosco.Main; +import com.netflix.spinnaker.rosco.providers.google.config.RoscoGoogleConfiguration; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.TestPropertySource; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +@ExtendWith(SpringExtension.class) +@SpringBootTest(classes = {Main.class}) +@TestPropertySource( + properties = { + "spring.application.name=rosco", + "google.enabled=true", + "rosco.config-dir=/some/path" + }) +public class GCEStartupTest { + + @Autowired GCEBakeHandler gceBakeHandler; + + @Autowired RoscoGoogleConfiguration.GCEBakeryDefaults gceBakeryDefaults; + + @Autowired RoscoGoogleConfiguration.GoogleConfigurationProperties googleConfigurationProperties; + + @Test + public void startupTest() {} +} diff --git a/rosco-core/src/test/java/com/netflix/spinnaker/rosco/providers/oracle/OracleStartupTest.java b/rosco-core/src/test/java/com/netflix/spinnaker/rosco/providers/oracle/OracleStartupTest.java new file mode 100644 index 0000000000..0e3da8573b --- /dev/null +++ b/rosco-core/src/test/java/com/netflix/spinnaker/rosco/providers/oracle/OracleStartupTest.java @@ -0,0 +1,47 @@ +/* + * Copyright 2024 OpsMx, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.netflix.spinnaker.rosco.providers.oracle; + +import com.netflix.spinnaker.rosco.Main; +import com.netflix.spinnaker.rosco.providers.oracle.config.OracleBakeryDefaults; +import com.netflix.spinnaker.rosco.providers.oracle.config.OracleConfigurationProperties; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.TestPropertySource; +import org.springframework.test.context.junit.jupiter.SpringExtension; + +@ExtendWith(SpringExtension.class) +@SpringBootTest(classes = {Main.class}) +@TestPropertySource( + properties = { + "spring.application.name=rosco", + "oracle.enabled=true", + "rosco.config-dir=/some/path" + }) +public class OracleStartupTest { + + @Autowired OCIBakeHandler ociBakeHandler; + + @Autowired OracleBakeryDefaults oracleBakeryDefaults; + + @Autowired OracleConfigurationProperties oracleConfigurationProperties; + + @Test + public void startupTest() {} +} diff --git a/rosco-integration/rosco-integration.gradle b/rosco-integration/rosco-integration.gradle new file mode 100644 index 0000000000..1ca43840a7 --- /dev/null +++ b/rosco-integration/rosco-integration.gradle @@ -0,0 +1,30 @@ +dependencies { + testImplementation project(":rosco-manifests") + testImplementation "com.fasterxml.jackson.core:jackson-databind" + testImplementation "com.github.tomakehurst:wiremock-jre8-standalone" + testImplementation "commons-io:commons-io" + testImplementation "io.spinnaker.kork:kork-artifacts" + testImplementation "org.assertj:assertj-core" + testImplementation "org.junit.jupiter:junit-jupiter-api" + testImplementation "org.junit.jupiter:junit-jupiter-params" + testImplementation "org.slf4j:slf4j-api" + testImplementation "org.testcontainers:testcontainers" + testImplementation "org.testcontainers:junit-jupiter" + testRuntimeOnly "ch.qos.logback:logback-classic" + testRuntimeOnly "org.junit.jupiter:junit-jupiter-engine" +} + +test.configure { + def fullDockerImageName = System.getenv('FULL_DOCKER_IMAGE_NAME') + onlyIf("there is a docker image to test") { + fullDockerImageName != null && fullDockerImageName.trim() != '' + } +} + +test { + // So stdout and stderr from the just-built container are available in CI + testLogging.showStandardStreams = true + + // Run the tests when the docker image changes + inputs.property 'fullDockerImageName', System.getenv('FULL_DOCKER_IMAGE_NAME') +} diff --git a/rosco-integration/src/test/java/com/netflix/spinnaker/rosco/StandaloneContainerTest.java b/rosco-integration/src/test/java/com/netflix/spinnaker/rosco/StandaloneContainerTest.java new file mode 100644 index 0000000000..3f99d8797b --- /dev/null +++ b/rosco-integration/src/test/java/com/netflix/spinnaker/rosco/StandaloneContainerTest.java @@ -0,0 +1,827 @@ +/* + * Copyright 2024 Salesforce, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.netflix.spinnaker.rosco; + +import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; +import static com.github.tomakehurst.wiremock.client.WireMock.exactly; +import static com.github.tomakehurst.wiremock.client.WireMock.putRequestedFor; +import static com.github.tomakehurst.wiremock.client.WireMock.urlPathEqualTo; +import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig; +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assumptions.assumeTrue; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.github.tomakehurst.wiremock.client.WireMock; +import com.github.tomakehurst.wiremock.junit5.WireMockExtension; +import com.github.tomakehurst.wiremock.stubbing.Scenario; +import com.netflix.spinnaker.kork.artifacts.model.Artifact; +import com.netflix.spinnaker.rosco.manifests.BakeManifestRequest; +import com.netflix.spinnaker.rosco.manifests.helm.HelmBakeManifestRequest; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.URI; +import java.net.URL; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.nio.charset.StandardCharsets; +import java.nio.file.FileVisitOption; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Base64; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import lombok.Getter; +import org.apache.commons.compress.archivers.tar.TarArchiveEntry; +import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream; +import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream; +import org.apache.commons.io.IOUtils; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.Network; +import org.testcontainers.containers.output.Slf4jLogConsumer; +import org.testcontainers.containers.wait.strategy.Wait; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.testcontainers.utility.DockerImageName; + +@Testcontainers +class StandaloneContainerTest { + + private static final String REDIS_NETWORK_ALIAS = "redisHost"; + + private static final int REDIS_PORT = 6379; + + private static final Logger logger = LoggerFactory.getLogger(StandaloneContainerTest.class); + + private static final Network network = Network.newNetwork(); + + @RegisterExtension + static final WireMockExtension wmClouddriver = + WireMockExtension.newInstance().options(wireMockConfig().dynamicPort()).build(); + + static int clouddriverPort; + + private static final ObjectMapper mapper = new ObjectMapper(); + + private static GenericContainer redis = + new GenericContainer(DockerImageName.parse("library/redis:5-alpine")) + .withNetwork(network) + .withNetworkAliases(REDIS_NETWORK_ALIAS) + .withExposedPorts(REDIS_PORT); + + private static GenericContainer roscoContainer; + + private static GenericContainer roscoContainerUsingValuesFileForOverrides; + + @BeforeAll + static void setupOnce() throws Exception { + clouddriverPort = wmClouddriver.getRuntimeInfo().getHttpPort(); + logger.info("wiremock clouddriver http port: {} ", clouddriverPort); + + String fullDockerImageName = System.getenv("FULL_DOCKER_IMAGE_NAME"); + + // Skip the tests if there's no docker image. This allows gradlew build to work. + assumeTrue(fullDockerImageName != null); + + // expose clouddriver to rosco + org.testcontainers.Testcontainers.exposeHostPorts(clouddriverPort); + + redis.start(); + + DockerImageName dockerImageName = DockerImageName.parse(fullDockerImageName); + + roscoContainer = + new GenericContainer(dockerImageName) + .withNetwork(network) + .withExposedPorts(8087) + .dependsOn(redis) + .waitingFor(Wait.forHealthcheck()) + .withEnv( + "SPRING_APPLICATION_JSON", + getSpringApplicationJson( + 0)); /* 0 means always use --set/--set-string for overrides */ + + Slf4jLogConsumer logConsumer = new Slf4jLogConsumer(logger); + roscoContainer.start(); + roscoContainer.followOutput(logConsumer); + + roscoContainerUsingValuesFileForOverrides = + new GenericContainer(dockerImageName) + .withNetwork(network) + .withExposedPorts(8087) + .dependsOn(redis) + .waitingFor(Wait.forHealthcheck()) + .withEnv( + "SPRING_APPLICATION_JSON", + getSpringApplicationJson(1)); /* 1 means always use --values files for overrides */ + + roscoContainerUsingValuesFileForOverrides.start(); + roscoContainerUsingValuesFileForOverrides.followOutput(logConsumer); + } + + private static String getSpringApplicationJson(int overridesFileThreshold) + throws JsonProcessingException { + String redisUrl = "redis://" + REDIS_NETWORK_ALIAS + ":" + REDIS_PORT; + logger.info("redisUrl: '{}'", redisUrl); + Map properties = + Map.of( + "redis.connection", + redisUrl, + "services.clouddriver.baseUrl", + "http://" + GenericContainer.INTERNAL_HOST_HOSTNAME + ":" + clouddriverPort, + "helm.overridesFileThreshold", + String.valueOf(overridesFileThreshold)); + return mapper.writeValueAsString(properties); + } + + @AfterAll + static void cleanupOnce() { + if (roscoContainer != null) { + roscoContainer.stop(); + } + + if (redis != null) { + redis.stop(); + } + } + + @BeforeEach + void init(TestInfo testInfo) { + System.out.println("--------------- Test " + testInfo.getDisplayName()); + } + + @Test + void testHealthCheck() throws Exception { + // hit an arbitrary endpoint + HttpRequest request = + HttpRequest.newBuilder() + .uri( + new URI( + "http://" + + roscoContainer.getHost() + + ":" + + roscoContainer.getFirstMappedPort() + + "/health")) + .GET() + .build(); + + HttpClient client = HttpClient.newHttpClient(); + + HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString()); + assertThat(response).isNotNull(); + logger.info("response: {}, {}", response.statusCode(), response.body()); + assertThat(response.statusCode()).isEqualTo(200); + } + + public static final String SIMPLE_TEMPLATE_VARIABLE_KEY = "foo"; + public static final String NESTED_TEMPLATE_VARIABLE_KEY = "foo1.test"; + public static final String INDEXED_TEMPLATE_VARIABLE_KEY = "foo2[0]"; + public static final String DOTTED_TEMPLATE_VARIABLE_KEY_NON_NESTED = "foo3\\.foo4"; + public static final String ARRAY_TEMPLATE_VARIABLE = "foo5"; + + /** + * Enumerates predefined sets of Helm chart values used in testing to verify the precedence and + * application of various value sources during the Helm chart rendering process. Each enum value + * represents a different scenario, including default values, overrides, and external value files. + */ + @Getter + public enum HelmTemplateValues { + /** + * Represents default values defined within a Helm chart's values.yaml file. These are the + * fallback values used when no overrides are provided. + */ + DEFAULT( + Map.of( + SIMPLE_TEMPLATE_VARIABLE_KEY, + "bar_default", + NESTED_TEMPLATE_VARIABLE_KEY, + "bar1_default", + INDEXED_TEMPLATE_VARIABLE_KEY, + "bar2_default", + DOTTED_TEMPLATE_VARIABLE_KEY_NON_NESTED, + "bar3_default", + ARRAY_TEMPLATE_VARIABLE, + "bar5_default")), + /** + * Represents user-provided overrides that can be passed to Helm via the '--values' flag or the + * '--set' command. These values are meant to override the default values specified in the + * chart's values.yaml. + */ + OVERRIDES( + Map.of( + SIMPLE_TEMPLATE_VARIABLE_KEY, + 1000000, + NESTED_TEMPLATE_VARIABLE_KEY, + 999999, + INDEXED_TEMPLATE_VARIABLE_KEY, + "bar2_overrides", + DOTTED_TEMPLATE_VARIABLE_KEY_NON_NESTED, + "bar3_overrides", + ARRAY_TEMPLATE_VARIABLE, + "{bar5_overrides}")), + + OVERRIDES_STRING_LARGE_NUMBER( + Map.of( + SIMPLE_TEMPLATE_VARIABLE_KEY, + "1000000", + NESTED_TEMPLATE_VARIABLE_KEY, + 999999, + INDEXED_TEMPLATE_VARIABLE_KEY, + "bar2_overrides", + DOTTED_TEMPLATE_VARIABLE_KEY_NON_NESTED, + "bar3_overrides", + ARRAY_TEMPLATE_VARIABLE, + "{bar5_overrides}")), + + OVERRIDES_NO_LARGE_NUMBERS( + Map.of( + SIMPLE_TEMPLATE_VARIABLE_KEY, + 999999, + NESTED_TEMPLATE_VARIABLE_KEY, + 999999, + INDEXED_TEMPLATE_VARIABLE_KEY, + "bar2_overrides", + DOTTED_TEMPLATE_VARIABLE_KEY_NON_NESTED, + "bar3_overrides", + ARRAY_TEMPLATE_VARIABLE, + "{bar5_overrides}")), + OVERRIDES_NEGATIVE_NUMBERS( + Map.of( + SIMPLE_TEMPLATE_VARIABLE_KEY, + -1000000, + NESTED_TEMPLATE_VARIABLE_KEY, + -999999, + INDEXED_TEMPLATE_VARIABLE_KEY, + "bar2_overrides", + DOTTED_TEMPLATE_VARIABLE_KEY_NON_NESTED, + "bar3_overrides", + ARRAY_TEMPLATE_VARIABLE, + "{bar5_overrides}")), + + /** Represents expected helm template output with scientific notion. */ + TEMPLATE_OUTPUT_WITH_SCIENTIFIC_NOTION( + Map.of( + SIMPLE_TEMPLATE_VARIABLE_KEY, + // In helm2, any numeric value greater than or equal to 1,000,000 will be treated as a + // float + // for both --set and --values. Consequently, the Helm template output value + // will be in scientific notation. + "1e+06", + NESTED_TEMPLATE_VARIABLE_KEY, + 999999, + INDEXED_TEMPLATE_VARIABLE_KEY, + "bar2_overrides", + DOTTED_TEMPLATE_VARIABLE_KEY_NON_NESTED, + "bar3_overrides", + ARRAY_TEMPLATE_VARIABLE, + "{bar5_overrides}")), + + TEMPLATE_OUTPUT_WITH_NEGATIVE_SCIENTIFIC_NOTION( + Map.of( + SIMPLE_TEMPLATE_VARIABLE_KEY, + // In helm2, any numeric value greater than or equal to 1,000,000 will be treated as a + // float + // for both --set and --values. Consequently, the Helm template output value + // will be in scientific notation. + "-1e+06", + NESTED_TEMPLATE_VARIABLE_KEY, + -999999, + INDEXED_TEMPLATE_VARIABLE_KEY, + "bar2_overrides", + DOTTED_TEMPLATE_VARIABLE_KEY_NON_NESTED, + "bar3_overrides", + ARRAY_TEMPLATE_VARIABLE, + "{bar5_overrides}")), + + /** + * Represents values from an external source, such as a separate values file not included within + * the Helm chart itself. These values are meant to simulate the scenario where values are + * provided from an external file during the Helm chart rendering process. + */ + EXTERNAL( + Map.of( + SIMPLE_TEMPLATE_VARIABLE_KEY, + "bar_external", + NESTED_TEMPLATE_VARIABLE_KEY, + "bar1_external", + INDEXED_TEMPLATE_VARIABLE_KEY, + "bar2_external", + DOTTED_TEMPLATE_VARIABLE_KEY_NON_NESTED, + "bar3_external", + ARRAY_TEMPLATE_VARIABLE, + "bar5_external")), + /** + * Represents an empty map of values, used to test the scenario where no overrides are provided, + * and the default values within the chart's values.yaml should prevail. + */ + EMPTY(Collections.emptyMap()); + + private final Map values; + + HelmTemplateValues(Map values) { + this.values = values; + } + } + + /** + * test data provider for helm template precedence test value of template variable foo is as below + * - default value in chart's value.yaml: bar_default - value in overrides: bar_overrides - value + * in external value.yaml: bar_external + * + * @return test data for helm template precedence test + */ + private static Stream helmOverridesPriorityTestData() { + /* + + */ + return Stream.of( + // default values.yml + overrides through --values + no external values yaml -> values of + // helm variables is from overrides + Arguments.of( + HelmTemplateValues.OVERRIDES, + true, + BakeManifestRequest.TemplateRenderer.HELM3, + HelmTemplateValues.OVERRIDES, + false, + false), + Arguments.of( + HelmTemplateValues.OVERRIDES_NO_LARGE_NUMBERS, + true, + BakeManifestRequest.TemplateRenderer.HELM3, + HelmTemplateValues.OVERRIDES_NO_LARGE_NUMBERS, + false, + false), + + // default values.yml + overrides through --values + no external values yaml -> values of + // helm variables is from overrides + Arguments.of( + HelmTemplateValues.OVERRIDES, + true, + BakeManifestRequest.TemplateRenderer.HELM2, + HelmTemplateValues.OVERRIDES, + false, + false), + // default values.yml + overrides through --set-string + no external values yaml -> values + // of helm variables is from overrides + Arguments.of( + HelmTemplateValues.OVERRIDES, + false, + BakeManifestRequest.TemplateRenderer.HELM3, + HelmTemplateValues.OVERRIDES, + false, + false), + Arguments.of( + HelmTemplateValues.OVERRIDES, + false, + BakeManifestRequest.TemplateRenderer.HELM2, + HelmTemplateValues.OVERRIDES, + false, + false), + // default values.yml + empty overrides + no external values yaml -> values of helm + // variables is from default + Arguments.of( + HelmTemplateValues.EMPTY, + false, + BakeManifestRequest.TemplateRenderer.HELM3, + HelmTemplateValues.DEFAULT, + false, + false), + Arguments.of( + HelmTemplateValues.EMPTY, + false, + BakeManifestRequest.TemplateRenderer.HELM2, + HelmTemplateValues.DEFAULT, + false, + false), + // default values.yml + overrides through --values + external values yaml -> values of helm + // variables is from overrides + Arguments.of( + HelmTemplateValues.OVERRIDES, + true, + BakeManifestRequest.TemplateRenderer.HELM3, + HelmTemplateValues.OVERRIDES, + true, + false), + Arguments.of( + HelmTemplateValues.OVERRIDES, + true, + BakeManifestRequest.TemplateRenderer.HELM2, + HelmTemplateValues.OVERRIDES, + true, + false), + // default values.yml + overrides through --set-string + external values yaml -> values of + // helm variables is from overrides + Arguments.of( + HelmTemplateValues.OVERRIDES, + false, + BakeManifestRequest.TemplateRenderer.HELM3, + HelmTemplateValues.OVERRIDES, + true, + false), + Arguments.of( + HelmTemplateValues.OVERRIDES, + false, + BakeManifestRequest.TemplateRenderer.HELM2, + HelmTemplateValues.OVERRIDES, + true, + false), + // default values.yml + empty overrides + external values yaml -> values of helm variables + // is from external values yaml + Arguments.of( + HelmTemplateValues.EMPTY, + false, + BakeManifestRequest.TemplateRenderer.HELM3, + HelmTemplateValues.EXTERNAL, + true, + false), + Arguments.of( + HelmTemplateValues.EMPTY, + true, + BakeManifestRequest.TemplateRenderer.HELM2, + HelmTemplateValues.EXTERNAL, + true, + false), + + // default values.yml + overrides through --values + no external values yaml -> values of + // helm variables is from overrides + Arguments.of( + HelmTemplateValues.OVERRIDES, + true, + BakeManifestRequest.TemplateRenderer.HELM3, + HelmTemplateValues.OVERRIDES, + false, + true), + Arguments.of( + HelmTemplateValues.OVERRIDES_STRING_LARGE_NUMBER, + true, + BakeManifestRequest.TemplateRenderer.HELM3, + HelmTemplateValues.OVERRIDES_STRING_LARGE_NUMBER, + false, + true), + Arguments.of( + HelmTemplateValues.OVERRIDES_NEGATIVE_NUMBERS, + true, + BakeManifestRequest.TemplateRenderer.HELM3, + HelmTemplateValues.OVERRIDES_NEGATIVE_NUMBERS, + false, + true), + Arguments.of( + HelmTemplateValues.OVERRIDES_NEGATIVE_NUMBERS, + true, + BakeManifestRequest.TemplateRenderer.HELM2, + HelmTemplateValues.TEMPLATE_OUTPUT_WITH_NEGATIVE_SCIENTIFIC_NOTION, + false, + true), + // default values.yml + overrides through --values + no external values yaml -> values of + // helm variables is from overrides + Arguments.of( + HelmTemplateValues.OVERRIDES, + true, + BakeManifestRequest.TemplateRenderer.HELM2, + HelmTemplateValues.TEMPLATE_OUTPUT_WITH_SCIENTIFIC_NOTION, + false, + true), + Arguments.of( + HelmTemplateValues.OVERRIDES_STRING_LARGE_NUMBER, + true, + BakeManifestRequest.TemplateRenderer.HELM2, + HelmTemplateValues.TEMPLATE_OUTPUT_WITH_SCIENTIFIC_NOTION, + false, + true), + // default values.yml + overrides through --set-string + no external values yaml -> values + // of helm variables is from overrides + Arguments.of( + HelmTemplateValues.OVERRIDES, + false, + BakeManifestRequest.TemplateRenderer.HELM3, + HelmTemplateValues.OVERRIDES, + false, + true), + Arguments.of( + HelmTemplateValues.OVERRIDES, + false, + BakeManifestRequest.TemplateRenderer.HELM2, + HelmTemplateValues.TEMPLATE_OUTPUT_WITH_SCIENTIFIC_NOTION, + false, + true), + // default values.yml + empty overrides + no external values yaml -> values of helm + // variables is from default + Arguments.of( + HelmTemplateValues.EMPTY, + false, + BakeManifestRequest.TemplateRenderer.HELM3, + HelmTemplateValues.DEFAULT, + false, + true), + Arguments.of( + HelmTemplateValues.EMPTY, + false, + BakeManifestRequest.TemplateRenderer.HELM2, + HelmTemplateValues.DEFAULT, + false, + true), + // default values.yml + overrides through --values + external values yaml -> values of helm + // variables is from overrides + Arguments.of( + HelmTemplateValues.OVERRIDES, + true, + BakeManifestRequest.TemplateRenderer.HELM3, + HelmTemplateValues.OVERRIDES, + true, + true), + Arguments.of( + HelmTemplateValues.OVERRIDES, + true, + BakeManifestRequest.TemplateRenderer.HELM2, + HelmTemplateValues.TEMPLATE_OUTPUT_WITH_SCIENTIFIC_NOTION, + true, + true), + // default values.yml + overrides through --set-string + external values yaml -> values of + // helm variables is from overrides + Arguments.of( + HelmTemplateValues.OVERRIDES, + false, + BakeManifestRequest.TemplateRenderer.HELM3, + HelmTemplateValues.OVERRIDES, + true, + true), + Arguments.of( + HelmTemplateValues.OVERRIDES, + false, + BakeManifestRequest.TemplateRenderer.HELM2, + HelmTemplateValues.TEMPLATE_OUTPUT_WITH_SCIENTIFIC_NOTION, + true, + true), + // default values.yml + empty overrides + external values yaml -> values of helm variables + // is from external values yaml + Arguments.of( + HelmTemplateValues.EMPTY, + false, + BakeManifestRequest.TemplateRenderer.HELM3, + HelmTemplateValues.EXTERNAL, + true, + true), + Arguments.of( + HelmTemplateValues.EMPTY, + false, + BakeManifestRequest.TemplateRenderer.HELM2, + HelmTemplateValues.EXTERNAL, + true, + true)); + } + + /** + * Test the priority of Helm overrides based on different input scenarios, using + * HelmTemplateValues to define both input and expectedTemplateValues configurations. This method + * evaluates how different types of Helm value configurations (default, overrides, and external) + * are applied and prioritized during the Helm chart rendering process. + * + * @param inputOverrides The HelmTemplateValues enum representing the set of values to be used as + * input for the Helm chart baking process. This includes any overrides or default values that + * should be applied to the template rendering. + * @param useValuesFileForOverrides true to test with a rosco container configured to use a values + * file for overrides. false to test with a rosco container configured to use + * --set/--set-string. + * @param helmVersion The version of Helm being tested (e.g. "TemplateRenderer.HELM2", + * TemplateRenderer.HELM3), which may affect the rendering behavior and the handling of values + * and overrides. + * @param expectedTemplateValues The HelmTemplateValues enum representing the + * expectedTemplateValues set of values after the Helm chart rendering process. This is used + * to verify that the correct values are applied based on the input configuration and Helm + * version. + * @param addExternalValuesFile A boolean flag indicating whether an external values YAML file + * should be included in the helm template command. This allows testing the effect of external + * value files on the rendering outcome. + * @throws Exception if any error occurs during file handling, processing the Helm template, or if + * assertions fail due to unexpected rendering results. + */ + @ParameterizedTest(name = "{displayName} - [{index}] {arguments}") + @MethodSource("helmOverridesPriorityTestData") + void testHelmOverridesPriority( + HelmTemplateValues inputOverrides, + boolean useValuesFileForOverrides, + BakeManifestRequest.TemplateRenderer helmVersion, + HelmTemplateValues expectedTemplateValues, + boolean addExternalValuesFile, + boolean rawOverrides) + throws Exception { + + HelmBakeManifestRequest bakeManifestRequest = new HelmBakeManifestRequest(); + bakeManifestRequest.setOutputName("test"); + bakeManifestRequest.setOutputArtifactName("test_artifact"); + + // The artifact is arbitrary. This test is about verifying behavior of + // overrides regardless of the artifact. + Artifact artifact = Artifact.builder().type("git/repo").build(); + + // Use a mutable list since we conditionally add an external values file below. + List inputArtifacts = new ArrayList<>(); + inputArtifacts.add(artifact); + bakeManifestRequest.setInputArtifacts(inputArtifacts); + + bakeManifestRequest.setOverrides(inputOverrides.values); + bakeManifestRequest.setRawOverrides(rawOverrides); + bakeManifestRequest.setTemplateRenderer(helmVersion); + + // set up the first clouddriver response for /artifacts/fetch to return a helm chart. + Path tempDir = Files.createTempDirectory("tempDir"); + addTestHelmChartToPath(tempDir); + + String scenarioName = "artifacts"; + String scenarioTwo = "externalValuesFile"; + wmClouddriver.stubFor( + WireMock.put(urlPathEqualTo("/artifacts/fetch/")) + .inScenario(scenarioName) + .whenScenarioStateIs(Scenario.STARTED) + .willReturn(aResponse().withStatus(200).withBody(createGzippedTarballFromPath(tempDir))) + .willSetStateTo(scenarioTwo)); + + if (addExternalValuesFile) { + Path externalValuesPath = getFilePathFromClassPath("values_external.yaml"); + // This doesn't have to be an artifact that actually refers to this file + // on the file system...especially since it couldn't refer to a file on + // clouddriver's file system. An arbitrary artifact is sufficient. + Artifact valuesArtifact = Artifact.builder().build(); + inputArtifacts.add(valuesArtifact); + + wmClouddriver.stubFor( + WireMock.put(urlPathEqualTo("/artifacts/fetch/")) + .inScenario(scenarioName) + .whenScenarioStateIs(scenarioTwo) + .willReturn( + aResponse().withStatus(200).withBody(Files.readAllBytes(externalValuesPath)))); + } + + GenericContainer containerToUse = + useValuesFileForOverrides ? roscoContainerUsingValuesFileForOverrides : roscoContainer; + + HttpRequest request = + HttpRequest.newBuilder() + .uri( + new URI( + "http://" + + containerToUse.getHost() + + ":" + + containerToUse.getFirstMappedPort() + + "/api/v2/manifest/bake/" + + helmVersion)) + .header("Content-Type", "application/json") + .header( + "X-SPINNAKER-USER", + "test-user") // to silence a warning when X-SPINNAKER-USER is missing + .header( + "X-SPINNAKER-ACCOUNTS", + "test-account") // to silence a warning when X-SPINNAKER-ACCOUNTS is missing + .POST( + HttpRequest.BodyPublishers.ofString(mapper.writeValueAsString(bakeManifestRequest))) + .build(); + + HttpClient client = HttpClient.newHttpClient(); + + HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofString()); + assertThat(response).isNotNull(); + logger.info("response: {}, {}", response.statusCode(), response.body()); + assertThat(response.statusCode()).isEqualTo(200); + + // Verify that clouddriver was called the expected number of times + wmClouddriver.verify( + exactly(1 + (addExternalValuesFile ? 1 : 0)), + putRequestedFor(urlPathEqualTo("/artifacts/fetch/"))); + + // Verify that the response from rosco is as expected + TypeReference> artifactType = new TypeReference<>() {}; + Map map = mapper.readValue(response.body(), artifactType); + assertThat(new String(Base64.getDecoder().decode((String) map.get("reference")))) + .isEqualTo( + String.format( + readFileFromClasspath("expected_template.yaml") + "\n", + expectedTemplateValues.getValues().get(SIMPLE_TEMPLATE_VARIABLE_KEY), + expectedTemplateValues.getValues().get(NESTED_TEMPLATE_VARIABLE_KEY), + expectedTemplateValues.getValues().get(INDEXED_TEMPLATE_VARIABLE_KEY), + expectedTemplateValues.getValues().get(DOTTED_TEMPLATE_VARIABLE_KEY_NON_NESTED), + expectedTemplateValues + .getValues() + .get(ARRAY_TEMPLATE_VARIABLE) + .toString() + .startsWith("{") + && expectedTemplateValues + .getValues() + .get(ARRAY_TEMPLATE_VARIABLE) + .toString() + .endsWith("}") + ? "bar5_overrides" + : expectedTemplateValues.getValues().get(ARRAY_TEMPLATE_VARIABLE).toString())); + } + + /** Translate a classpath resource to a file path */ + private Path getFilePathFromClassPath(String fileName) throws Exception { + return Paths.get( + Objects.requireNonNull(getClass().getClassLoader().getResource(fileName)).toURI()); + } + + /** + * Make a gzipped tarball of all files in a path + * + * @param rootPath the root path of the tarball + * @return a byte array containing the gzipped tarball + */ + private byte[] createGzippedTarballFromPath(Path rootPath) throws IOException { + ArrayList filePathsToAdd = + Files.walk(rootPath, FileVisitOption.FOLLOW_LINKS) + .filter(path -> !path.equals(rootPath)) + .collect(Collectors.toCollection(ArrayList::new)); + // See https://commons.apache.org/proper/commons-compress/examples.html#Common_Archival_Logic + // for background + try (ByteArrayOutputStream os = new ByteArrayOutputStream(); + GzipCompressorOutputStream gzo = new GzipCompressorOutputStream(os); + TarArchiveOutputStream tarArchive = new TarArchiveOutputStream(gzo)) { + for (Path path : filePathsToAdd) { + TarArchiveEntry tarEntry = + new TarArchiveEntry(path.toFile(), rootPath.relativize(path).toString()); + tarArchive.putArchiveEntry(tarEntry); + if (path.toFile().isFile()) { + try (InputStream fileInputStream = Files.newInputStream(path)) { + IOUtils.copy(fileInputStream, tarArchive); + } + } + tarArchive.closeArchiveEntry(); + } + tarArchive.finish(); + gzo.finish(); + return os.toByteArray(); + } + } + + /** + * Adds a test Helm chart to a specified path. This method creates necessary Helm chart files such + * as Chart.yaml, values.yaml, and a sample template. The values.yaml file sets default values for + * the chart, in this case, it sets the value of 'foo' to 'bar'. The templates/foo.yaml file is a + * sample Helm template that uses the 'foo' value. + * + * @param path The root directory path where the Helm chart files will be created. + * @throws IOException If there is an issue creating the files i/o in the specified path. + */ + static void addTestHelmChartToPath(Path path) throws IOException { + + addFile(path, "Chart.yaml", readFileFromClasspath("Chart.yaml")); + addFile(path, "values.yaml", readFileFromClasspath("values.yaml")); + addFile(path, "templates/foo.yaml", readFileFromClasspath("foo.yaml")); + } + + /** + * Create a new file in the temp directory + * + * @param path path of the file to create (relative to the temp directory's root) + * @param content content of the file, or null for an empty file + */ + static void addFile(Path tempDir, String path, String content) throws IOException { + Path pathToCreate = tempDir.resolve(path); + pathToCreate.toFile().getParentFile().mkdirs(); + Files.write(pathToCreate, content.getBytes()); + } + + private static String readFileFromClasspath(String fileName) throws IOException { + // Obtain the URL of the file from the classpath + URL fileUrl = Thread.currentThread().getContextClassLoader().getResource(fileName); + if (fileUrl == null) { + throw new IOException("File not found in classpath: " + fileName); + } + + // Convert URL to a Path and read the file content + try (Stream lines = Files.lines(Paths.get(fileUrl.getPath()), StandardCharsets.UTF_8)) { + return lines.collect(Collectors.joining("\n")); + } + } +} diff --git a/rosco-integration/src/test/resources/Chart.yaml b/rosco-integration/src/test/resources/Chart.yaml new file mode 100644 index 0000000000..24560c799e --- /dev/null +++ b/rosco-integration/src/test/resources/Chart.yaml @@ -0,0 +1,5 @@ +apiVersion: v1 +name: example +description: chart for testing +version: 0.1 +engine: gotpl diff --git a/rosco-integration/src/test/resources/expected_template.yaml b/rosco-integration/src/test/resources/expected_template.yaml new file mode 100644 index 0000000000..7e739e9707 --- /dev/null +++ b/rosco-integration/src/test/resources/expected_template.yaml @@ -0,0 +1,10 @@ +--- +# Source: example/templates/foo.yaml +labels: + helm.sh/chart: example-0.1 +foo: %s +foo1: + test: %s +foo2: %s +foo3.foo4: %s +foo5: %s diff --git a/rosco-integration/src/test/resources/foo.yaml b/rosco-integration/src/test/resources/foo.yaml new file mode 100644 index 0000000000..ad975cd513 --- /dev/null +++ b/rosco-integration/src/test/resources/foo.yaml @@ -0,0 +1,8 @@ +labels: + helm.sh/chart: {{ .Chart.Name }}-{{ .Chart.Version }} +foo: {{ .Values.foo }} +foo1: + test: {{ .Values.foo1.test }} +foo2: {{ (index .Values.foo2 0) }} +foo3.foo4: {{ index .Values "foo3.foo4" }} +foo5: {{ (index .Values.foo5 0) }} diff --git a/rosco-integration/src/test/resources/logback.xml b/rosco-integration/src/test/resources/logback.xml new file mode 100644 index 0000000000..e07c281b0d --- /dev/null +++ b/rosco-integration/src/test/resources/logback.xml @@ -0,0 +1,37 @@ + + + + + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + + + + + + + + + + + + diff --git a/rosco-integration/src/test/resources/values.yaml b/rosco-integration/src/test/resources/values.yaml new file mode 100644 index 0000000000..4ea5fe996d --- /dev/null +++ b/rosco-integration/src/test/resources/values.yaml @@ -0,0 +1,8 @@ +foo: "bar_default" +foo1: + test: "bar1_default" +foo2: + - "bar2_default" +foo3.foo4: "bar3_default" +foo5: + - "bar5_default" diff --git a/rosco-integration/src/test/resources/values_external.yaml b/rosco-integration/src/test/resources/values_external.yaml new file mode 100644 index 0000000000..92ac97289d --- /dev/null +++ b/rosco-integration/src/test/resources/values_external.yaml @@ -0,0 +1,8 @@ + foo: "bar_external" + foo1: + test: "bar1_external" + foo2: + - "bar2_external" + foo3.foo4: "bar3_external" + foo5: + - "bar5_external" diff --git a/rosco-manifests/rosco-manifests.gradle b/rosco-manifests/rosco-manifests.gradle index d135d0997b..43df7f081f 100644 --- a/rosco-manifests/rosco-manifests.gradle +++ b/rosco-manifests/rosco-manifests.gradle @@ -1,9 +1,6 @@ dependencies { implementation project(":rosco-core") - compileOnly "org.projectlombok:lombok" - annotationProcessor "org.projectlombok:lombok" - implementation "org.springframework.boot:spring-boot-starter-web" implementation "io.spinnaker.kork:kork-artifacts" implementation "io.spinnaker.kork:kork-exceptions" @@ -15,6 +12,7 @@ dependencies { implementation "com.squareup.retrofit2:retrofit" testImplementation "org.assertj:assertj-core" + testImplementation "com.squareup.okhttp3:okhttp" testImplementation "org.junit.jupiter:junit-jupiter-api" testImplementation "org.junit.jupiter:junit-jupiter-params" testImplementation "org.mockito:mockito-core" diff --git a/rosco-manifests/src/main/java/com/netflix/spinnaker/rosco/manifests/BakeManifestRequest.java b/rosco-manifests/src/main/java/com/netflix/spinnaker/rosco/manifests/BakeManifestRequest.java index 6a1eb36c8b..783569bfea 100644 --- a/rosco-manifests/src/main/java/com/netflix/spinnaker/rosco/manifests/BakeManifestRequest.java +++ b/rosco-manifests/src/main/java/com/netflix/spinnaker/rosco/manifests/BakeManifestRequest.java @@ -10,7 +10,7 @@ public class BakeManifestRequest { TemplateRenderer templateRenderer; String outputName; String outputArtifactName; - Map overrides; + @Nullable Map overrides; public enum TemplateRenderer { HELM2, diff --git a/rosco-manifests/src/main/java/com/netflix/spinnaker/rosco/manifests/HelmBakeTemplateUtils.java b/rosco-manifests/src/main/java/com/netflix/spinnaker/rosco/manifests/HelmBakeTemplateUtils.java index 5e1f990754..129f9e48be 100644 --- a/rosco-manifests/src/main/java/com/netflix/spinnaker/rosco/manifests/HelmBakeTemplateUtils.java +++ b/rosco-manifests/src/main/java/com/netflix/spinnaker/rosco/manifests/HelmBakeTemplateUtils.java @@ -16,6 +16,10 @@ package com.netflix.spinnaker.rosco.manifests; +import com.netflix.spinnaker.kork.annotations.VisibleForTesting; +import com.netflix.spinnaker.kork.artifacts.artifactstore.ArtifactReferenceURI; +import com.netflix.spinnaker.kork.artifacts.artifactstore.ArtifactStore; +import com.netflix.spinnaker.kork.artifacts.artifactstore.ArtifactStoreConfigurationProperties; import com.netflix.spinnaker.kork.artifacts.model.Artifact; import com.netflix.spinnaker.kork.exceptions.SpinnakerException; import com.netflix.spinnaker.kork.retrofit.exceptions.SpinnakerHttpException; @@ -24,24 +28,29 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.UUID; import java.util.regex.Pattern; import java.util.stream.Collectors; +import lombok.Getter; public abstract class HelmBakeTemplateUtils { private static final String MANIFEST_SEPARATOR = "---\n"; private static final Pattern REGEX_TESTS_MANIFESTS = Pattern.compile("# Source: .*/templates/tests/.*"); - private final ArtifactDownloader artifactDownloader; + @Getter private final ArtifactDownloader artifactDownloader; + private final ArtifactStore artifactStore; + private final ArtifactStoreConfigurationProperties.HelmConfig helmConfig; - protected HelmBakeTemplateUtils(ArtifactDownloader artifactDownloader) { + protected HelmBakeTemplateUtils( + ArtifactDownloader artifactDownloader, + Optional artifactStore, + ArtifactStoreConfigurationProperties.HelmConfig helmConfig) { this.artifactDownloader = artifactDownloader; - } - - public ArtifactDownloader getArtifactDownloader() { - return artifactDownloader; + this.artifactStore = artifactStore.orElse(null); + this.helmConfig = helmConfig; } public abstract String fetchFailureMessage(String description, Exception e); @@ -62,7 +71,8 @@ protected Path downloadArtifactToTmpFile(BakeManifestEnvironment env, Artifact a public abstract String getHelmExecutableForRequest(T request); - protected List getValuePaths(List artifacts, BakeManifestEnvironment env) { + @VisibleForTesting + public List getValuePaths(List artifacts, BakeManifestEnvironment env) { List valuePaths = new ArrayList<>(); try { @@ -103,4 +113,50 @@ protected Path getHelmTypePathFromArtifact( return helmTypeFilePath; } + + /** + * This is a helper method to build the appropriate overrides in the event that an + * ArtifactReferenceURI was passed in an override + */ + protected List buildOverrideList(Map overrides) { + return overrides.entrySet().stream() + .map( + entry -> + entry.getKey() + "=" + expandArtifactReferenceURIs(entry.getValue()).toString()) + .collect(Collectors.toList()); + } + + /** Accessor for whether to expand artifact reference URIs */ + protected boolean isExpandArtifactReferenceURIs() { + return (artifactStore != null && helmConfig.isExpandOverrides()); + } + + /** + * In the event that we encounter and ArtifactReferenceURI, we want to pull down that artifact + * instead of using the raw URI as a value for helm. + */ + protected Object expandArtifactReferenceURIs(Object value) { + if (!isExpandArtifactReferenceURIs() || !(value instanceof String)) { + return value; + } + + // It is important to note, since we do not have an object, but just a + // String, we can only check if the format matches an artifact reference + // URI. + // + // This means if a user is explicitly trying to pass a string with the same + // format, it will attempt to retrieve it and fail. SpEL handles this + // similar problem by returning the raw expression back, but that allows + // for intentional SpEL expressions to silently fail, which is why this is + // not done. Rather than fixing this potential issue now, we can address + // it once someone has reported it, since matching this format seems + // unlikely, but possible. + String ref = (String) value; + if (ArtifactReferenceURI.is(ref)) { + Artifact artifact = artifactStore.get(ArtifactReferenceURI.parse(ref)); + return artifact.getReference(); + } + + return ref; + } } diff --git a/rosco-manifests/src/main/java/com/netflix/spinnaker/rosco/manifests/config/RoscoHelmConfigurationProperties.java b/rosco-manifests/src/main/java/com/netflix/spinnaker/rosco/manifests/config/RoscoHelmConfigurationProperties.java index b3905e6881..9707b87ede 100644 --- a/rosco-manifests/src/main/java/com/netflix/spinnaker/rosco/manifests/config/RoscoHelmConfigurationProperties.java +++ b/rosco-manifests/src/main/java/com/netflix/spinnaker/rosco/manifests/config/RoscoHelmConfigurationProperties.java @@ -24,4 +24,12 @@ public class RoscoHelmConfigurationProperties { private String v3ExecutablePath = "helm3"; private String v2ExecutablePath = "helm"; + /** + * The threshold for determining whether to include overrides directly in the Helm command or + * write them to a separate file. If the total length of override values is less than this + * threshold or if threshold is zero, they will be included in the Helm command using "--set" or + * "--set-string". Otherwise, they will be written to a file, and the file path will be included + * in the Helm command using "--values". + */ + private int overridesFileThreshold = 0; } diff --git a/rosco-manifests/src/main/java/com/netflix/spinnaker/rosco/manifests/helm/HelmBakeManifestRequest.java b/rosco-manifests/src/main/java/com/netflix/spinnaker/rosco/manifests/helm/HelmBakeManifestRequest.java index 9ee6ab7dd3..0e874f9014 100644 --- a/rosco-manifests/src/main/java/com/netflix/spinnaker/rosco/manifests/helm/HelmBakeManifestRequest.java +++ b/rosco-manifests/src/main/java/com/netflix/spinnaker/rosco/manifests/helm/HelmBakeManifestRequest.java @@ -3,12 +3,16 @@ import com.netflix.spinnaker.kork.artifacts.model.Artifact; import com.netflix.spinnaker.rosco.manifests.BakeManifestRequest; import java.util.List; +import javax.annotation.Nullable; import lombok.Data; import lombok.EqualsAndHashCode; @Data @EqualsAndHashCode(callSuper = true) public class HelmBakeManifestRequest extends BakeManifestRequest { + @Nullable String apiVersions; + @Nullable String kubeVersion; + String namespace; /** diff --git a/rosco-manifests/src/main/java/com/netflix/spinnaker/rosco/manifests/helm/HelmSetArgumentParser.java b/rosco-manifests/src/main/java/com/netflix/spinnaker/rosco/manifests/helm/HelmSetArgumentParser.java new file mode 100644 index 0000000000..f173b04624 --- /dev/null +++ b/rosco-manifests/src/main/java/com/netflix/spinnaker/rosco/manifests/helm/HelmSetArgumentParser.java @@ -0,0 +1,552 @@ +/* + * Copyright 2024 Salesforce, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package com.netflix.spinnaker.rosco.manifests.helm; + +import java.io.IOException; +import java.io.PushbackReader; +import java.io.StringReader; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import org.apache.commons.lang3.StringUtils; + +/** + * Parses Helm set arguments from a string input. This class is inspired by the Helm's Go code for + * parsing --set arguments, + * + * @link ... + * specifically designed to handle complex nested structures as defined in Helm set arguments. + * It supports parsing into a map structure, handling lists, nested maps, and basic types. + *

For example, given the --set input as a single comma-separated string: + * "name1=value1,server.port=8080,servers[0].ip=192.168.1.1,servers[0].dns.primary=8.8.8.8,hosts={host1,host2,host3}" + *

The corresponding map output would be: { "name1": "value1", "server": {"port": "8080"}, + * "servers": [{"ip": "192.168.1.1", "dns": {"primary": "8.8.8.8"}}], "hosts": ["host1", + * "host2", "host3"] } + */ +public class HelmSetArgumentParser { + /** + * The maximum index allowed for list elements in helm3's set arguments + * https://github.com/helm/helm/blob/v3.14.2/pkg/strvals/parser.go#L36 + */ + private static final int MAX_INDEX = 65536; + /** + * The maximum level of nesting allowed for names within the helm3's set arguments. + * https://github.com/helm/helm/blob/v3.14.2/pkg/strvals/parser.go#L40 + */ + private static final int MAX_NESTED_NAME_LEVEL = 30; + + private final PushbackReader stringReader; + /** + * Specifies whether all parsed values should be treated as strings, ignoring any potential for + * type inference based on the value content. When true, the parser does not attempt to infer + * types like integers, booleans, or nulls, treating every value as a plain string instead. + */ + private final boolean treatValuesAsString; + + /** + * Initializes a parser for Helm set arguments from a string, supporting complex structures. + * + * @param helmSetArguments A string containing the Helm set arguments to be parsed. The string + * should follow the Helm convention for specifying --set values, which includes the ability + * to define nested properties and lists. For example, + * "key1=value1,key2.subkey1=value2,key3[0]=value3,key4={1,2,3}" is a valid format. + * @param treatValuesAsString Indicates whether to treat all parsed values strictly as strings + * (true), or to attempt to infer their actual data types (false), enhancing type accuracy. + */ + public HelmSetArgumentParser(String helmSetArguments, boolean treatValuesAsString) { + stringReader = new PushbackReader(new StringReader(helmSetArguments)); + this.treatValuesAsString = treatValuesAsString; + } + + /** + * Parses the input string into a structured map of Helm set arguments. It processes the string + * until the end, handling nested structures and type conversions as necessary. + * + *

Ref: Ref: https://github.com/helm/helm/blob/v3.14.2/pkg/strvals/parser.go#L158 + * + * @return A map representing the structured Helm set arguments. + * @throws IOException If parsing fails due to an unexpected read error. + */ + public Map parse() throws IOException { + Map helmArgumentMap = new HashMap<>(); + int currentChar; + while ((currentChar = stringReader.read()) != -1) { + stringReader.unread(currentChar); + // ignoring the return type as return type false is indicative of eof while processing the + // input argument + // original helm code ignores eof, and returns the populated map. + // for example key1=value1,key2= , will update the helmArgumentMap as + // {,} + key(helmArgumentMap, 0); + } + return helmArgumentMap; + } + + /** + * Processes and adds an item to a list based on its index, handling nested structures and type + * conversions. The method supports direct value assignment, nested list handling, and nested map + * handling based on the encountered stop character ('=', '[', '.'). It ensures the list is + * correctly expanded and populated. + * + *

Ref: https://github.com/helm/helm/blob/v3.14.2/pkg/strvals/parser.go#L335 + * + * @param helmValuesList The list to which the item should be added or within which an item should + * be modified. + * @param index The index at which the item should be added or modified. + * @param nestedNameLevel The current level of nesting, used to prevent excessive nesting depths. + * @return The modified list with the new item added or an existing item updated. + * @throws IllegalArgumentException If a negative index is provided or unexpected data is + * encountered. + * @throws IOException If parsing fails due to other reasons, including IOExceptions from + * underlying reads. + */ + private List listItem(List helmValuesList, int index, int nestedNameLevel) + throws IOException { + if (index < 0) { + throw new IllegalArgumentException("negative " + index + " index not allowed"); + } + Set stopCharacters = Set.of('[', '.', '='); + Result readResult = readCharactersUntil(stopCharacters); + if (!readResult.value.isEmpty()) + throw new IllegalArgumentException( + "unexpected data at end of array index " + readResult.value); + if (readResult.isEof()) { + // example argument - "noval[0]" + return helmValuesList; + } + switch (readResult.stopCharacter.get()) { + case '=': + try { + Optional> valueListResultOptional = valList(); + if (valueListResultOptional.isEmpty()) { + // example argument - "noval[0]=" + return setIndex(helmValuesList, index, ""); + } else { + return setIndex(helmValuesList, index, valueListResultOptional.get()); + } + } catch (NotAListException notAListException) { + return setIndex(helmValuesList, index, typedVal(val().value)); + } + case '[': + List currentItems = new ArrayList<>(); + int nextIndex = keyIndex(); + /* + Consider the scenario key[1][2]=value1, key[1][1]=value2 with an initially empty map. + For key[1][2]=value1: + + The parser identifies that it needs to access index 1 of helmValuesList for key1 which doesn't exist yet. Thus, it expands the list: helmValuesList = [null, []]. + Then, it needs to set value1 at index 2 of this nested list. Since the nested list (currentItems) is empty, it's expanded to accommodate index 2: currentItems = [null, null, "value1"]. + + For key[1][1]=value2 (continuing from the previous state): + + The parser again targets index 1 of helmValuesList for key, which now exists and contains [null, null, "value1"]. + To set value2 at index 1, it updates the existing nested list, resulting in currentItems = [null, "value2", "value1"]. + */ + if (helmValuesList.size() > index && helmValuesList.get(index) != null) { + currentItems = (List) helmValuesList.get(index); + } + + List nestedListItems = listItem(currentItems, nextIndex, nestedNameLevel); + return setIndex(helmValuesList, index, nestedListItems); + case '.': + Map nestedMap = new HashMap<>(); + if (helmValuesList.size() > index) { + Object currentElement = helmValuesList.get(index); + if (currentElement != null) { + // If the current element is already a map, use it + nestedMap = (Map) currentElement; + } + } + + boolean res = key(nestedMap, nestedNameLevel); + if (!res) { + // example argument - "a[0]." + return helmValuesList; + } + return setIndex(helmValuesList, index, nestedMap); + + default: + throw new IllegalArgumentException( + "parse error: unexpected token " + readResult.stopCharacter); + } + } + + /** + * Parses a key from the input and updates the provided parsedValuesMap map based on the key's + * type and value. This method handles different structures such as lists and nested maps, and it + * populates the map accordingly. It deals with keys ending in specific characters ('=', '[', ',', + * '.') to determine the structure type and recursively parses nested structures as needed. + * + *

Ref: https://github.com/helm/helm/blob/v3.14.2/pkg/strvals/parser.go#L179 + * + * @param parsedValuesMap The map to be updated with the parsed key and its value. + * @param nestingDepth The current depth of nested keys, used to prevent excessively deep nesting. + * @return true when the argument was processed completely and no eof was encountered. For + * example, "key1=value1" would return true as it is a complete key-value pair. Returns false + * when the argument was processed completely but ended with eof. For example, "key1=" would + * return false as it signifies an incomplete value, updating parsedValuesMap with {key1, ""}. + * @throws IllegalArgumentException If a key without a value is encountered or if the nesting + * level exceeds the maximum allowed depth. + * @throws IOException If parsing the key or its value fails due to other reasons. + */ + private boolean key(Map parsedValuesMap, int nestingDepth) throws IOException { + Set stopCharacters = Set.of('=', '[', ',', '.'); + while (true) { + Result readResult = readCharactersUntil(stopCharacters); + if (readResult.isEof()) { + if (StringUtils.isEmpty(readResult.value)) { + return false; + } + throw new IllegalArgumentException("key " + readResult.value + " has no value"); + } + switch (readResult.stopCharacter.get()) { + case '[': + { + int keyIndex = keyIndex(); + List currentList; + if (parsedValuesMap.containsKey(readResult.value) + && parsedValuesMap.get(readResult.value) != null) { + // Helm does not allow keys with value types as primitives or lists to be assigned a + // map value. + // For example, using the command `helm template test --set a=1,a.b=1` results + // in an error: + // Error: failed parsing --set data: unable to parse key: interface conversion: + // interface {} is int64, not map[string]interface {}. + // The code below throws a class exception to make the behavior consistent. + + currentList = (List) parsedValuesMap.get(readResult.value); + } else { + currentList = new ArrayList<>(); + } + + List currentListTmp = listItem(currentList, keyIndex, nestingDepth); + set(parsedValuesMap, readResult.value, currentListTmp); + + return true; + } + case '=': + { + Optional> valuesList; + try { + valuesList = valList(); + if (valuesList.isEmpty()) { + set(parsedValuesMap, readResult.value, ""); + return false; + } else { + set(parsedValuesMap, readResult.value, valuesList.get()); + } + } catch (NotAListException notAListException) { + Result singleValueResult = val(); + set(parsedValuesMap, readResult.value, typedVal(singleValueResult.value)); + } + return true; + } + case ',': + { + set(parsedValuesMap, readResult.value, ""); + throw new IllegalArgumentException( + String.format("key %s has no value (cannot end with ,)", readResult.value)); + } + case '.': + { + if (nestingDepth++ > MAX_NESTED_NAME_LEVEL) { + throw new IllegalArgumentException( + "Value name nested level is greater than maximum supported nested level of " + + MAX_NESTED_NAME_LEVEL); + } + Map nestedMap = new HashMap<>(); + if (parsedValuesMap.containsKey(readResult.value) + && parsedValuesMap.get(readResult.value) != null) { + // Helm does not allow keys with value types as primitives or lists to be assigned a + // map value. + // For example, using the command `helm template test --set a=1,a.b=1` results + // in an error: + // Error: failed parsing --set data: unable to parse key: interface conversion: + // interface {} is int64, not map[string]interface {}. + // The code below throws a class exception to make the behavior consistent. + + nestedMap = (Map) parsedValuesMap.get(readResult.value); + } + boolean res = key(nestedMap, nestingDepth); + + if (res && nestedMap.isEmpty()) { + // test data "name1.=name2" + throw new IllegalArgumentException("key map " + readResult.value + " has no value"); + } + if (!nestedMap.isEmpty()) { + parsedValuesMap.put(readResult.value, nestedMap); + } + return res; + } + default: + throw new IllegalArgumentException( + "parse error: unexpected token " + readResult.stopCharacter); + } + } + } + + private void set(Map targetMap, String key, Object valueToSet) { + if (key == null || key.isEmpty()) { + return; + } + targetMap.put(key, valueToSet); + } + + /** + * Inserts or updates a value at a specified index within a list. If the index exceeds the current + * list size, the list is expanded with null elements to accommodate the new index. This method + * ensures the index is within a predefined range to prevent excessive list sizes. + * + *

Ref: https://github.com/helm/helm/blob/v3.14.2/pkg/strvals/parser.go#L299 + * + * @param targetList The list to be modified. + * @param index The position at which to insert or update the value. + * @param value The value to insert or update at the specified index. + * @return The modified list with the value set at the specified index. + * @throws IllegalArgumentException if the index is negative or exceeds the maximum allowed index. + */ + private List setIndex(List targetList, int index, Object value) + throws IOException { + if (index < 0) { + throw new IllegalArgumentException("negative " + index + " index not allowed"); + } + if (index > MAX_INDEX) { + throw new IllegalArgumentException( + "index of " + index + " is greater than maximum supported index of " + MAX_INDEX); + } + + // Ensuring the list is large enough + while (targetList.size() <= index) { + targetList.add(null); + } + + targetList.set(index, value); + return targetList; + } + + /** + * Parses the next segment of input as an index, expecting the segment to terminate with a ']' + * character. This method is used to parse array-like index specifications in the input string. + * + *

Ref: https://github.com/helm/helm/blob/v3.14.2/pkg/strvals/parser.go#L324 + * + * @return The parsed index as an integer. + * @throws IOException If an I/O error occurs during reading. + * @throws IllegalArgumentException If the end of the input is reached unexpectedly or if the + * parsed index is not a valid integer. + */ + private int keyIndex() throws IOException { + Set stopCharacters = Set.of(']'); + Result readResult = readCharactersUntil(stopCharacters); + if (readResult.isEof()) { + throw new IllegalArgumentException( + "Error parsing index: Expected closing bracket ']', but encountered EOF"); + } + int parsedIndex; + try { + parsedIndex = Integer.parseInt(readResult.value); + } catch (NumberFormatException e) { + throw new IllegalArgumentException( + "Error parsing index: parsing '" + readResult.value + "': invalid syntax", e); + } + return parsedIndex; + } + + private Result val() throws IOException { + Set stopCharacters = Set.of(','); + return readCharactersUntil(stopCharacters); + } + + /** + * Parses a list of values enclosed in curly braces from the input stream. This method is designed + * to read and interpret a structured list format, expecting values to be separated by commas and + * the entire list to be enclosed within '{' and '}'. Each value within the list is parsed + * according to its type, and the method supports nested lists and objects as elements of the + * outer list. + * + *

Ref: https://github.com/helm/helm/blob/v3.14.2/pkg/strvals/parser.go#L465 + * + * @return An Optional containing a list of parsed objects, each corresponding to a value found + * within the input list. Returns an empty Optional if the initial character indicates an EOF, + * signifying that no list is present at the expected location. + * @throws IOException If an I/O error occurs during reading. + * @throws NotAListException If the initial character read is not '{', indicating that the + * expected list structure is not present. + * @throws IllegalArgumentException If the list structure is malformed, such as lacking a closing + * '}'. + */ + private Optional> valList() throws IOException { + int currentChar = stringReader.read(); + if (currentChar == -1) { + // example argument - "name1.name2=" + return Optional.empty(); + } + + if ((char) currentChar != '{') { + stringReader.unread(currentChar); + throw new NotAListException("not a list"); + } + + List valueList = new ArrayList<>(); + Set stopCharacters = Set.of(',', '}'); + while (true) { + Result readResult = readCharactersUntil(stopCharacters); + if (readResult.isEof()) { + throw new IllegalArgumentException("list must terminate with '}'"); + } + switch (readResult.stopCharacter.get()) { + case '}': + { + int nextChar = stringReader.read(); + if (nextChar != -1 && (char) nextChar != ',') { + stringReader.unread(nextChar); + } + valueList.add(typedVal(readResult.value)); + return Optional.of(valueList); + } + + case ',': + valueList.add(typedVal(readResult.value)); + break; + default: + throw new IllegalArgumentException( + "parse error: unexpected token " + readResult.stopCharacter); + } + } + } + + /** + * Reads characters from the input stream until a stopCharacters character is encountered. This + * method parses input, accumulating characters into a string until one of the specified + * stopCharacters characters is read. It handles escape sequences by including the character + * immediately following a backslash in the output without evaluation. Ref : + * https://github.com/helm/helm/blob/v3.14.2/pkg/strvals/parser.go#L503 + * + * @param stopCharacters A set of characters that should terminate the reading. + * @return A Result object containing the accumulated string up to (but not including) the + * stopCharacters character, the stopCharacters character itself, and a flag indicating if the + * end of the stream (EOF) was reached. + * @throws IOException If an I/O error occurs during reading. + */ + private Result readCharactersUntil(Set stopCharacters) throws IOException { + StringBuilder accumulatedChars = new StringBuilder(); + int r; + while ((r = stringReader.read()) != -1) { + char currentChar = (char) r; + if (stopCharacters.contains(currentChar)) { + return new Result(accumulatedChars.toString(), currentChar); + } + // backslash is the escape character to include special characters like period,comma etc. + // Treat the next character as a literal part of the value, bypassing its special meaning + else if (currentChar == '\\') { + + int nextCharCode = stringReader.read(); + if (nextCharCode == -1) { + // example argument - "key=value\\" + return new Result(accumulatedChars.toString()); + } + accumulatedChars.append((char) nextCharCode); + } else { + accumulatedChars.append(currentChar); + } + } + return new Result(accumulatedChars.toString()); + } + + /** + * Represents the outcome of reading characters from an input stream until a specified stop + * character or end of input. This class captures the accumulated characters as a string. The stop + * character is optional and will be absent if the reading stopped because the end of the input + * (EOF) was reached. + */ + class Result { + + private final String value; + private final Optional stopCharacter; + + public Result(String value, Character stopCharacter) { + this.value = value; + this.stopCharacter = Optional.of(stopCharacter); + } + + public Result(String value) { + this.value = value; + this.stopCharacter = Optional.empty(); + } + + public boolean isEof() { + return stopCharacter.isEmpty(); + } + } + /** + * Converts a string value to its corresponding Java type based on its content. If the {@code + * treatValuesAsString} flag is set, all values are returned as strings. Otherwise, it tries to + * convert the string to a boolean, null, long, or falls back to returning the string itself if no + * specific type conversion is applicable. + * + *

Ref : https://github.com/helm/helm/blob/v3.14.2/pkg/strvals/parser.go#L528 + * + * @param value The string representation of the value to be converted. + * @return The converted value as an Object. This can be a String, Boolean, Long, or null, + * depending on the content of the input string and the value of {@code treatValuesAsString}. + */ + private Object typedVal(String value) { + + if (treatValuesAsString) { + return value; + } + + if (value.equalsIgnoreCase("true")) { + return Boolean.TRUE; + } + + if (value.equalsIgnoreCase("false")) { + return Boolean.FALSE; + } + + if (value.equalsIgnoreCase("null")) { + return null; + } + + if (value.equalsIgnoreCase("0")) { + return 0L; // Long, to match int64 from Go + } + + // Try parsing as a Long if the string does not start with zero + // and is not one of the special cases above. + if (!value.isEmpty() && value.charAt(0) != '0') { + try { + return Long.parseLong(value); + } catch (NumberFormatException e) { + // Not a Long, return the string itself + } + } + + return value; + } + + /** Exception indicating that the expected list structure was not found. */ + class NotAListException extends RuntimeException { + public NotAListException(String message) { + super(message); + } + } +} diff --git a/rosco-manifests/src/main/java/com/netflix/spinnaker/rosco/manifests/helm/HelmTemplateUtils.java b/rosco-manifests/src/main/java/com/netflix/spinnaker/rosco/manifests/helm/HelmTemplateUtils.java index f0d74e9435..b853cff2a0 100644 --- a/rosco-manifests/src/main/java/com/netflix/spinnaker/rosco/manifests/helm/HelmTemplateUtils.java +++ b/rosco-manifests/src/main/java/com/netflix/spinnaker/rosco/manifests/helm/HelmTemplateUtils.java @@ -1,5 +1,9 @@ package com.netflix.spinnaker.rosco.manifests.helm; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; +import com.netflix.spinnaker.kork.artifacts.artifactstore.ArtifactStore; +import com.netflix.spinnaker.kork.artifacts.artifactstore.ArtifactStoreConfigurationProperties; import com.netflix.spinnaker.kork.artifacts.model.Artifact; import com.netflix.spinnaker.rosco.jobs.BakeRecipe; import com.netflix.spinnaker.rosco.manifests.ArtifactDownloader; @@ -8,23 +12,43 @@ import com.netflix.spinnaker.rosco.manifests.HelmBakeTemplateUtils; import com.netflix.spinnaker.rosco.manifests.config.RoscoHelmConfigurationProperties; import java.io.IOException; +import java.io.UncheckedIOException; +import java.io.Writer; +import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; +import java.util.UUID; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; +import org.springframework.boot.context.properties.ConfigurationPropertiesScan; import org.springframework.stereotype.Component; +import org.springframework.util.StringUtils; @Component @Slf4j +@ConfigurationPropertiesScan("com.netflix.spinnaker.kork.artifacts.artifactstore") public class HelmTemplateUtils extends HelmBakeTemplateUtils { + // Threshold for Helm 3, where absolute values >= 1000000 cause scientific notation in overrides. + private static final long HELM3_MAX_ABSOLUTE_NUMERIC_VALUE_THRESHOLD = 1_000_000L; + private static final String OVERRIDES_FILE_PREFIX = "overrides_"; + private static final String YML_FILE_EXTENSION = ".yml"; private final RoscoHelmConfigurationProperties helmConfigurationProperties; + /** + * Dedicated ObjectMapper for YAML processing. This custom ObjectMapper ensures specialized + * handling of YAML format, allowing distinct settings from the default JSON ObjectMapper. + */ + private final ObjectMapper yamlObjectMapper = new ObjectMapper(new YAMLFactory()); public HelmTemplateUtils( ArtifactDownloader artifactDownloader, + Optional artifactStore, + ArtifactStoreConfigurationProperties artifactStoreProperties, RoscoHelmConfigurationProperties helmConfigurationProperties) { - super(artifactDownloader); + super(artifactDownloader, artifactStore, artifactStoreProperties.getHelm()); this.helmConfigurationProperties = helmConfigurationProperties; } @@ -40,7 +64,7 @@ public BakeRecipe buildBakeRecipe(BakeManifestEnvironment env, HelmBakeManifestR templatePath = getHelmTypePathFromArtifact(env, inputArtifacts, request.getHelmChartFilePath()); log.info("path to Chart.yaml: {}", templatePath); - return buildCommand(request, getValuePaths(inputArtifacts, env), templatePath); + return buildCommand(request, getValuePaths(inputArtifacts, env), templatePath, env); } public String fetchFailureMessage(String description, Exception e) { @@ -55,7 +79,10 @@ public String getHelmExecutableForRequest(HelmBakeManifestRequest request) { } public BakeRecipe buildCommand( - HelmBakeManifestRequest request, List valuePaths, Path templatePath) { + HelmBakeManifestRequest request, + List valuePaths, + Path templatePath, + BakeManifestEnvironment env) { BakeRecipe result = new BakeRecipe(); result.setName(request.getOutputName()); @@ -88,24 +115,161 @@ public BakeRecipe buildCommand( command.add("--include-crds"); } + String apiVersions = request.getApiVersions(); + if (StringUtils.hasText(apiVersions)) { + command.add("--api-versions"); + command.add(apiVersions); + } + + String kubeVersion = request.getKubeVersion(); + if (StringUtils.hasText(kubeVersion)) { + command.add("--kube-version"); + command.add(kubeVersion); + } + Map overrides = request.getOverrides(); - if (!overrides.isEmpty()) { - List overrideList = new ArrayList<>(); - for (Map.Entry entry : overrides.entrySet()) { - overrideList.add(entry.getKey() + "=" + entry.getValue().toString()); + Path overridesFile = null; + if (overrides != null && !overrides.isEmpty()) { + String overridesString = getOverridesAsString(overrides); + if (overridesString.length() < helmConfigurationProperties.getOverridesFileThreshold() + || helmConfigurationProperties.getOverridesFileThreshold() == 0) { + + String overrideOption = request.isRawOverrides() ? "--set" : "--set-string"; + command.add(overrideOption); + command.add(overridesString); + } else if (request.isRawOverrides()) { + + // Helm3 treats large numbers as `int64` when passed via `--set` and as `float64` when + // passed via `--values`, causing different template outputs. + // Specifically, numbers >= 1,000,000 or <= -1,000,000 appear in non-scientific + // notation with `--set` and in scientific notation with `--values`. + // To ensure consistent Helm3 template behavior, the overrides YAML file + // feature should be skipped for values >= or <= this threshold. + // In contrast, Helm2 renders large numbers in scientific notation regardless of whether + // they are passed via `--set` or `--values`. + Map overridesForYaml = new HashMap<>(request.getOverrides()); + Map largeOverrides = + getLargeAbsoluteNumericValuedEntries( + request.getOverrides(), HELM3_MAX_ABSOLUTE_NUMERIC_VALUE_THRESHOLD); + overridesForYaml.keySet().removeAll(largeOverrides.keySet()); + // process overrides with larger numeric values as commandline argument + if (!largeOverrides.isEmpty()) { + command.add("--set"); + command.add(getOverridesAsString(largeOverrides)); + } + // covert the remaining overrides to yaml + if (!overridesForYaml.isEmpty()) { + overridesFile = + writeOverridesToFile( + env, getOverridesAsString(overridesForYaml), request.isRawOverrides()); + } + } else { + overridesFile = writeOverridesToFile(env, overridesString, request.isRawOverrides()); } - String overrideOption = request.isRawOverrides() ? "--set" : "--set-string"; - command.add(overrideOption); - command.add(String.join(",", overrideList)); } if (!valuePaths.isEmpty()) { command.add("--values"); command.add(valuePaths.stream().map(Path::toString).collect(Collectors.joining(","))); } + // For shorter overrides, --set/--set-string are used. + // Since --set/--set-string have higher precedence over --values in Helm versions 2.16.1 and + // 3.4.1, and later --values arguments override earlier ones, + // it's important to add override values at the end of the command. This ensures their + // precedence, especially when multiple --values are used. + if (overridesFile != null) { + command.add("--values"); + command.add(overridesFile.toString()); + } result.setCommand(command); return result; } + + /** + * Filters and returns entries from the given map where numeric absolute values meet or exceed the + * specified threshold. + * + * @param overrides : A map containing key-value pairs of Helm chart overrides. + * @param numericAbsoluteValueThreshold : the minimum absolute value to consider as large. + * @return Map: having entries with large absolute numeric values. + */ + public static Map getLargeAbsoluteNumericValuedEntries( + Map overrides, long numericAbsoluteValueThreshold) { + Map largeMap = new HashMap<>(); + + for (Map.Entry entry : overrides.entrySet()) { + Object value = entry.getValue(); + long numericValue = 0; + + if (value instanceof Number) { + numericValue = ((Number) value).longValue(); + } else if (value instanceof String) { + try { + numericValue = Long.parseLong((String) value); + } catch (NumberFormatException e) { + // Value is not a number, so we skip this entry + continue; + } + } + + if (Math.abs(numericValue) >= numericAbsoluteValueThreshold) { + largeMap.put(entry.getKey(), value); + } + } + + return largeMap; + } + /** + * Constructs a comma-separated string representation of the given overrides map in the format + * "key1=value1,key2=value2,...". + * + * @param overrides A map containing key-value pairs of Helm chart overrides. + * @return A string representation of the overrides suitable for Helm command usage. + */ + private String getOverridesAsString(Map overrides) { + List overrideList = buildOverrideList(overrides); + return String.join(",", overrideList); + } + /** + * Writes Helm chart overrides to a YAML file. This method takes a map of Helm chart overrides, + * creates a YAML file with a unique filename (generated using a prefix, a random UUID, and a file + * extension), and resolves the file path within the specified environment. + * + * @param env The environment providing context for file resolution. + * @param overridesString helm --set argument string + * @param rawOverrides If true, preserves the original data types in the overrides map. If false, + * converts all values to strings before writing to the YAML file. + * @return The path to the created YAML file containing the Helm chart overrides. + * @throws IllegalStateException If an error occurs during the file writing process. + */ + private Path writeOverridesToFile( + BakeManifestEnvironment env, String overridesString, boolean rawOverrides) { + String fileName = OVERRIDES_FILE_PREFIX + UUID.randomUUID().toString() + YML_FILE_EXTENSION; + Path filePath = env.resolvePath(fileName); + Map helmParsedMap = null; + HelmSetArgumentParser helmSetArgumentParser = + new HelmSetArgumentParser(overridesString, !rawOverrides); + try { + helmParsedMap = helmSetArgumentParser.parse(); + } catch (IOException e) { + throw new UncheckedIOException("error while parsing overrides string to yaml", e); + } + try (Writer writer = Files.newBufferedWriter(filePath)) { + yamlObjectMapper.writeValue(writer, helmParsedMap); + if (log.isDebugEnabled()) + log.debug( + "Created overrides file at {} with the following contents:{}{}", + filePath.toString(), + System.lineSeparator(), + new String(Files.readAllBytes(filePath))); + } catch (IOException ioException) { + throw new IllegalStateException( + String.format("failed to write override yaml file %s.", filePath.toString()) + + ioException.getMessage(), + ioException); + } + return filePath; + } } diff --git a/rosco-manifests/src/main/java/com/netflix/spinnaker/rosco/manifests/helmfile/HelmfileTemplateUtils.java b/rosco-manifests/src/main/java/com/netflix/spinnaker/rosco/manifests/helmfile/HelmfileTemplateUtils.java index 3abb66c953..f5590d5938 100644 --- a/rosco-manifests/src/main/java/com/netflix/spinnaker/rosco/manifests/helmfile/HelmfileTemplateUtils.java +++ b/rosco-manifests/src/main/java/com/netflix/spinnaker/rosco/manifests/helmfile/HelmfileTemplateUtils.java @@ -16,6 +16,8 @@ package com.netflix.spinnaker.rosco.manifests.helmfile; +import com.netflix.spinnaker.kork.artifacts.artifactstore.ArtifactStore; +import com.netflix.spinnaker.kork.artifacts.artifactstore.ArtifactStoreConfigurationProperties; import com.netflix.spinnaker.kork.artifacts.model.Artifact; import com.netflix.spinnaker.rosco.jobs.BakeRecipe; import com.netflix.spinnaker.rosco.manifests.ArtifactDownloader; @@ -28,6 +30,7 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.springframework.stereotype.Component; @@ -41,8 +44,10 @@ public class HelmfileTemplateUtils extends HelmBakeTemplateUtils artifactStore, + ArtifactStoreConfigurationProperties artifactStoreConfig, RoscoHelmfileConfigurationProperties helmfileConfigurationProperties) { - super(artifactDownloader); + super(artifactDownloader, artifactStore, artifactStoreConfig.getHelm()); this.helmfileConfigurationProperties = helmfileConfigurationProperties; } @@ -105,10 +110,7 @@ public BakeRecipe buildCommand( Map overrides = request.getOverrides(); if (!overrides.isEmpty()) { - List overrideList = new ArrayList<>(); - for (Map.Entry entry : overrides.entrySet()) { - overrideList.add(entry.getKey() + "=" + entry.getValue().toString()); - } + List overrideList = buildOverrideList(overrides); command.add("--set"); command.add(String.join(",", overrideList)); } diff --git a/rosco-manifests/src/test/java/com/netflix/spinnaker/rosco/manifests/ArtifactDownloaderImplTest.java b/rosco-manifests/src/test/java/com/netflix/spinnaker/rosco/manifests/ArtifactDownloaderImplTest.java index 041ad76b45..9d03b963ad 100644 --- a/rosco-manifests/src/test/java/com/netflix/spinnaker/rosco/manifests/ArtifactDownloaderImplTest.java +++ b/rosco-manifests/src/test/java/com/netflix/spinnaker/rosco/manifests/ArtifactDownloaderImplTest.java @@ -30,6 +30,7 @@ import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; +import okhttp3.Request; import okhttp3.ResponseBody; import org.junit.jupiter.api.Test; import retrofit2.Call; @@ -58,11 +59,12 @@ public void downloadsArtifactContent() throws IOException { @Test public void retries() throws IOException { ArtifactDownloaderImpl artifactDownloader = new ArtifactDownloaderImpl(clouddriverService); + Request request = new Request.Builder().url("http://some-url").build(); String testContent = "abcdefg"; try (ArtifactDownloaderImplTest.AutoDeletingFile file = new AutoDeletingFile()) { when(clouddriverService.fetchArtifact(testArtifact)).thenReturn(mockCall); when(mockCall.execute()) - .thenThrow(new SpinnakerNetworkException(new IOException("timeout"))) + .thenThrow(new SpinnakerNetworkException(new IOException("timeout"), request)) .thenReturn(successfulResponse(testContent)); artifactDownloader.downloadArtifactToFile(testArtifact, file.path); diff --git a/rosco-manifests/src/test/java/com/netflix/spinnaker/rosco/manifests/helm/HelmSetArgumentParserTest.java b/rosco-manifests/src/test/java/com/netflix/spinnaker/rosco/manifests/helm/HelmSetArgumentParserTest.java new file mode 100644 index 0000000000..90dfb3fcc8 --- /dev/null +++ b/rosco-manifests/src/test/java/com/netflix/spinnaker/rosco/manifests/helm/HelmSetArgumentParserTest.java @@ -0,0 +1,256 @@ +/* + * Copyright 2024 Salesforce, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.netflix.spinnaker.rosco.manifests.helm; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Stream; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +/** + * Tests for HelmSetArgumentParser, ensuring it correctly parses string inputs into structured maps. + * This class includes tests for various input scenarios, including basic key-value pairs, nested + * structures, lists, type inference, and error handling. The tests are inspired by Helm's own + * parser tests, adapted to validate the Java implementation's compatibility with expected Helm + * behavior. Reference to Helm's Go parser tests for comparison and validation: Helm Go Parser + * Tests. + */ +public class HelmSetArgumentParserTest { + + private static Stream helmParseArgs() { + + return Stream.of( + Arguments.of("name1=value1", Map.of("name1", "value1"), true), + Arguments.of("name1=value1", Map.of("name1", "value1"), false), + Arguments.of("long_int_string=1234567890", Map.of("long_int_string", "1234567890"), true), + Arguments.of("long_int_string=1234567890", Map.of("long_int_string", 1234567890L), false), + Arguments.of("boolean=true", Map.of("boolean", true), false), + Arguments.of("boolean=true", Map.of("boolean", "true"), true), + Arguments.of("is_null=null", mapOfEntries(entry("is_null", null)), false), + Arguments.of("is_null=null", Map.of("is_null", "null"), true), + Arguments.of("zero=0", Map.of("zero", "0"), true), + Arguments.of("zero=0", Map.of("zero", 0L), false), + Arguments.of("leading_zeros=00009", Map.of("leading_zeros", "00009"), true), + Arguments.of("leading_zeros=00009", Map.of("leading_zeros", "00009"), false), + Arguments.of( + "name1=null,f=false,t=true", Map.of("name1", "null", "f", "false", "t", "true"), true), + Arguments.of( + "name1=null,f=false,t=true", + mapOfEntries(entry("name1", null), entry("f", false), entry("t", true)), + false), + Arguments.of( + "name1=value1,name2=value2", Map.of("name1", "value1", "name2", "value2"), true), + Arguments.of( + "name1=value1,name2=value2,", Map.of("name1", "value1", "name2", "value2"), true), + Arguments.of("name1=,name2=value2", Map.of("name1", "", "name2", "value2"), true), + Arguments.of( + "name1=one\\,two,name2=three\\,four", + Map.of("name1", "one,two", "name2", "three,four"), + true), + Arguments.of( + "name1=one\\=two,name2=three\\=four", + Map.of("name1", "one=two", "name2", "three=four"), + true), + Arguments.of("a=b\\", Map.of("a", "b"), true), + Arguments.of( + "name1=one two three,name2=three two one", + Map.of("name1", "one two three", "name2", "three two one"), + true), + Arguments.of("outer.inner=value", Map.of("outer", Map.of("inner", "value")), true), + Arguments.of( + "outer.middle.inner=value", + Map.of("outer", Map.of("middle", Map.of("inner", "value"))), + true), + Arguments.of( + "outer.inner1=value,outer.inner2=value2", + Map.of( + "outer", + Map.of( + "inner1", "value", + "inner2", "value2")), + true), + Arguments.of("name1.name2=", Map.of("name1", Map.of("name2", "")), true), + Arguments.of("name1.name2=1,name1.name2=", Map.of("name1", Map.of("name2", "")), true), + Arguments.of("name1.=", Map.of(), true), + Arguments.of("name1=", Map.of("name1", ""), true), + Arguments.of("name1={value1,value2}", Map.of("name1", List.of("value1", "value2")), true), + Arguments.of("name1={value1,value2}.", Map.of("name1", List.of("value1", "value2")), true), + Arguments.of( + "name1={value1,value2},name2={value1,value2}", + Map.of( + "name1", List.of("value1", "value2"), + "name2", List.of("value1", "value2")), + true), + Arguments.of( + "name1.name2={value1,value2}", + Map.of("name1", Map.of("name2", List.of("value1", "value2"))), + true), + Arguments.of("list[0]=foo", Map.of("list", List.of("foo")), true), + Arguments.of("list[0].foo=bar", Map.of("list", List.of(Map.of("foo", "bar"))), true), + Arguments.of( + "list[0].foo=bar,list[0].hello=world", + Map.of("list", List.of(Map.of("foo", "bar", "hello", "world"))), + true), + Arguments.of("list[0]=foo,list[1]=bar", Map.of("list", List.of("foo", "bar")), true), + Arguments.of("list[0]=foo,list[1]=bar,", Map.of("list", List.of("foo", "bar")), true), + Arguments.of("list[1]=foo,list[0]=bar,", Map.of("list", List.of("bar", "foo")), true), + Arguments.of(".", Map.of(), true), + Arguments.of("a[0].", Map.of("a", List.of()), true), + Arguments.of("a[0][0].", Map.of("a", List.of(List.of())), true), + Arguments.of( + "list[0]=foo,list[3]=bar", + new HashMap() { + { + put("list", Arrays.asList("foo", null, null, "bar")); + } + }, + true), + Arguments.of("noval[0]", Map.of("noval", List.of()), true), + Arguments.of("noval[0]=", Map.of("noval", List.of("")), true), + Arguments.of("foo2[0]=1,foo2[1].", Map.of("foo2", List.of("1")), true), + Arguments.of( + "noval[0][1]={1,2},noval[0][0]={2,2}", + Map.of("noval", List.of(List.of(List.of("2", "2"), List.of("1", "2")))), + true), + Arguments.of( + "noval[0][1]={1,2},noval[0][0]={2,2}", + Map.of("noval", List.of(List.of(List.of(2L, 2L), List.of(1L, 2L)))), + false), + Arguments.of( + "noval[1][2]={a,b,c}", + Map.of("noval", Arrays.asList(null, Arrays.asList(null, null, List.of("a", "b", "c")))), + true), + Arguments.of("nested[0][0]=a", Map.of("nested", List.of(List.of("a"))), true), + Arguments.of( + "nested[1][1]=a", + Map.of("nested", Arrays.asList(null, Arrays.asList(null, "a"))), + true), + Arguments.of( + "name1.name2[0].foo=bar,name1.name2[1].foo=bar", + Map.of("name1", Map.of("name2", List.of(Map.of("foo", "bar"), Map.of("foo", "bar")))), + true), + Arguments.of( + "name1.name2[1].foo=bar,name1.name2[0].foo=bar", + Map.of("name1", Map.of("name2", List.of(Map.of("foo", "bar"), Map.of("foo", "bar")))), + true), + Arguments.of( + "name1.name2[1].foo=bar", + Map.of("name1", Map.of("name2", Arrays.asList(null, Map.of("foo", "bar")))), + true), + Arguments.of("key[0]=value,key=value", Map.of("key", "value"), true), + Arguments.of("key1.key2=value,key1=value", Map.of("key1", "value"), true), + Arguments.of("key=1.234", Map.of("key", "1.234"), true), + Arguments.of("key=1.234", Map.of("key", "1.234"), false)); + } + + private static Stream helmParseArgsError() { + return Stream.of( + Arguments.of("name1=value1,,,,name2=value2,", "key has no value (cannot end with ,)"), + Arguments.of("name1,name2=", "key name1 has no value (cannot end with ,)"), + Arguments.of("name1,name2=value2", "key name1 has no value (cannot end with ,)"), + Arguments.of("name1,name2=value2\\", "key name1 has no value (cannot end with ,)"), + Arguments.of("name1,name2", "key name1 has no value (cannot end with ,)"), + Arguments.of("name1.=name2", "key map name1 has no value"), + Arguments.of("name1.=,name1=name2", "key map name1 has no value"), + Arguments.of("name1.name2", "key name2 has no value"), + Arguments.of("name1.name2,name1.name3", "key name2 has no value (cannot end with ,)"), + Arguments.of("name1={", "list must terminate with '}'"), + Arguments.of("name1.name2={", "list must terminate with '}'"), + Arguments.of("name1[0]={", "list must terminate with '}'"), + Arguments.of("name1[0].name2={", "list must terminate with '}'"), + Arguments.of("name1[0].name2[0]={", "list must terminate with '}'"), + Arguments.of("name1.,name2", "key has no value (cannot end with ,)"), + Arguments.of("a[0][0]{", "unexpected data at end of array index {"), + Arguments.of( + "list[65537]=foo", "index of 65537 is greater than maximum supported index of 65536"), + Arguments.of("list[0].foo=bar,list[-30].hello=world", "negative -30 index not allowed"), + Arguments.of("list[0]=foo,list[-20]=bar", "negative -20 index not allowed"), + Arguments.of("illegal[0]name.foo=bar", "unexpected data at end of array index name"), + Arguments.of("noval[1][2]={a,b},c}", "key c} has no value"), + Arguments.of( + "illegal[0=1", + "Error parsing index: Expected closing bracket ']', but encountered EOF"), + Arguments.of("illegal[ab]=1", "Error parsing index: parsing 'ab': invalid syntax"), + Arguments.of(" ", "key has no value"), + Arguments.of("a=1,a.b=1", "class java.lang.String cannot be cast to class java.util.Map"), + Arguments.of("a=1,a[0]=1", "class java.lang.String cannot be cast to class java.util.List"), + Arguments.of( + "a[0]=1,a.b=1", "class java.util.ArrayList cannot be cast to class java.util.Map"), + Arguments.of( + "a.b=1,a[0]=1", "class java.util.HashMap cannot be cast to class java.util.List"), + // Max limit is 30 for helm3 "v3.13.1". helm2 (v2.14.2) + // doesn't seem to have a limit. + Arguments.of( + createNestedKey(31, "value"), + "Value name nested level is greater than maximum supported nested level of 30")); + } + + @ParameterizedTest + @MethodSource("helmParseArgs") + public void testParseSuccess(String input, Map expected, boolean stringValue) + throws Exception { + HelmSetArgumentParser helmSetArgumentParser = new HelmSetArgumentParser(input, stringValue); + Map map = helmSetArgumentParser.parse(); + assertThat(map).isEqualTo(expected); + } + + @ParameterizedTest + @MethodSource("helmParseArgsError") + public void testParseError(String input, String errorMessage) { + HelmSetArgumentParser helmSetArgumentParser = new HelmSetArgumentParser(input, true); + assertThatThrownBy(helmSetArgumentParser::parse) + .isInstanceOf(Exception.class) + .hasMessageContaining(errorMessage); + } + + private static String createNestedKey(int level, String value) { + if (level < 0) { + throw new IllegalArgumentException("Level must be at least 1"); + } + + StringBuilder stringBuilder = new StringBuilder(); + for (int i = 0; i <= level + 1; i++) { + stringBuilder.append("name").append(i); + if (i <= level) { + stringBuilder.append("."); + } + } + stringBuilder.append("=").append(value); + return stringBuilder.toString(); + } + + @SafeVarargs + private static Map mapOfEntries(Map.Entry... entries) { + Map map = new HashMap<>(); + for (Map.Entry entry : entries) { + map.put(entry.getKey(), entry.getValue()); + } + return map; + } + + private static Map.Entry entry(K key, V value) { + return new HashMap.SimpleEntry<>(key, value); + } +} diff --git a/rosco-manifests/src/test/java/com/netflix/spinnaker/rosco/manifests/helm/HelmTemplateUtilsTest.java b/rosco-manifests/src/test/java/com/netflix/spinnaker/rosco/manifests/helm/HelmTemplateUtilsTest.java index 1a6eb41e90..64aaaa2cac 100644 --- a/rosco-manifests/src/test/java/com/netflix/spinnaker/rosco/manifests/helm/HelmTemplateUtilsTest.java +++ b/rosco-manifests/src/test/java/com/netflix/spinnaker/rosco/manifests/helm/HelmTemplateUtilsTest.java @@ -28,8 +28,13 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import com.netflix.spinnaker.kork.artifacts.artifactstore.ArtifactStore; +import com.netflix.spinnaker.kork.artifacts.artifactstore.ArtifactStoreConfigurationProperties; import com.netflix.spinnaker.kork.artifacts.model.Artifact; import com.netflix.spinnaker.kork.exceptions.SpinnakerException; import com.netflix.spinnaker.kork.retrofit.exceptions.SpinnakerHttpException; @@ -47,6 +52,9 @@ import java.nio.file.Path; import java.util.ArrayList; import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.commons.compress.archivers.tar.TarArchiveEntry; @@ -60,24 +68,38 @@ import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; +import org.junit.jupiter.params.provider.ValueSource; import org.springframework.http.HttpStatus; final class HelmTemplateUtilsTest { + public static final String OVERRIDES_FILE_PATH_PATTERN = + ".*/overrides_[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}\\.yml$"; + private RoscoHelmConfigurationProperties helmConfigurationProperties; private ArtifactDownloader artifactDownloader; private HelmTemplateUtils helmTemplateUtils; private HelmBakeManifestRequest bakeManifestRequest; + /** Configuration for the default values in the event artifact store had been turned off. */ + private ArtifactStoreConfigurationProperties artifactStoreConfig; + + private ArtifactStoreConfigurationProperties.HelmConfig helmConfig; + @BeforeEach - private void init(TestInfo testInfo) { + void init(TestInfo testInfo) { System.out.println("--------------- Test " + testInfo.getDisplayName()); artifactDownloader = mock(ArtifactDownloader.class); - RoscoHelmConfigurationProperties helmConfigurationProperties = - new RoscoHelmConfigurationProperties(); - helmTemplateUtils = new HelmTemplateUtils(artifactDownloader, helmConfigurationProperties); + helmConfigurationProperties = new RoscoHelmConfigurationProperties(); + artifactStoreConfig = new ArtifactStoreConfigurationProperties(); + helmConfig = new ArtifactStoreConfigurationProperties.HelmConfig(); + artifactStoreConfig.setHelm(helmConfig); + helmConfig.setExpandOverrides(false); + helmTemplateUtils = + new HelmTemplateUtils( + artifactDownloader, Optional.empty(), artifactStoreConfig, helmConfigurationProperties); Artifact chartArtifact = Artifact.builder().name("test-artifact").version("3").build(); bakeManifestRequest = new HelmBakeManifestRequest(); @@ -93,6 +115,16 @@ public void nullReferenceTest() throws IOException { } } + @Test + public void realNullReferenceOfOverrides() throws IOException { + bakeManifestRequest.setOverrides(null); + + try (BakeManifestEnvironment env = BakeManifestEnvironment.create()) { + BakeRecipe recipe = helmTemplateUtils.buildBakeRecipe(env, bakeManifestRequest); + } + } + + @Test public void exceptionDownloading() throws IOException { // When artifactDownloader throws an exception, make sure we wrap it and get // a chance to include our own message, so the exception that goes up the @@ -144,7 +176,8 @@ public void removeTestsDirectoryTemplatesWithTests() throws IOException { RoscoHelmConfigurationProperties helmConfigurationProperties = new RoscoHelmConfigurationProperties(); HelmTemplateUtils helmTemplateUtils = - new HelmTemplateUtils(artifactDownloader, helmConfigurationProperties); + new HelmTemplateUtils( + artifactDownloader, Optional.empty(), artifactStoreConfig, helmConfigurationProperties); String output = helmTemplateUtils.removeTestsDirectoryTemplates(inputManifests); @@ -198,7 +231,8 @@ public void removeTestsDirectoryTemplatesWithoutTests() throws IOException { RoscoHelmConfigurationProperties helmConfigurationProperties = new RoscoHelmConfigurationProperties(); HelmTemplateUtils helmTemplateUtils = - new HelmTemplateUtils(artifactDownloader, helmConfigurationProperties); + new HelmTemplateUtils( + artifactDownloader, Optional.empty(), artifactStoreConfig, helmConfigurationProperties); String output = helmTemplateUtils.removeTestsDirectoryTemplates(inputManifests); @@ -213,7 +247,8 @@ public void buildBakeRecipeSelectsHelmExecutableByVersion( RoscoHelmConfigurationProperties helmConfigurationProperties = new RoscoHelmConfigurationProperties(); HelmTemplateUtils helmTemplateUtils = - new HelmTemplateUtils(artifactDownloader, helmConfigurationProperties); + new HelmTemplateUtils( + artifactDownloader, Optional.empty(), artifactStoreConfig, helmConfigurationProperties); HelmBakeManifestRequest request = new HelmBakeManifestRequest(); Artifact artifact = Artifact.builder().build(); @@ -246,7 +281,8 @@ public void buildBakeRecipeWithGitRepoArtifact(@TempDir Path tempDir) throws IOE RoscoHelmConfigurationProperties helmConfigurationProperties = new RoscoHelmConfigurationProperties(); HelmTemplateUtils helmTemplateUtils = - new HelmTemplateUtils(artifactDownloader, helmConfigurationProperties); + new HelmTemplateUtils( + artifactDownloader, Optional.empty(), artifactStoreConfig, helmConfigurationProperties); HelmBakeManifestRequest request = new HelmBakeManifestRequest(); @@ -286,7 +322,8 @@ public void buildBakeRecipeWithGitRepoArtifactUsingHelmChartFilePath(@TempDir Pa RoscoHelmConfigurationProperties helmConfigurationProperties = new RoscoHelmConfigurationProperties(); HelmTemplateUtils helmTemplateUtils = - new HelmTemplateUtils(artifactDownloader, helmConfigurationProperties); + new HelmTemplateUtils( + artifactDownloader, Optional.empty(), artifactStoreConfig, helmConfigurationProperties); HelmBakeManifestRequest request = new HelmBakeManifestRequest(); @@ -340,13 +377,66 @@ public void buildBakeRecipeWithGitRepoArtifactUsingHelmChartFilePath(@TempDir Pa } } + @Test + public void buildBakeRecipeIncludingHelmVersionsOptionsWithHelm3() throws IOException { + ArtifactDownloader artifactDownloader = mock(ArtifactDownloader.class); + RoscoHelmConfigurationProperties helmConfigurationProperties = + new RoscoHelmConfigurationProperties(); + HelmTemplateUtils helmTemplateUtils = + new HelmTemplateUtils( + artifactDownloader, Optional.empty(), artifactStoreConfig, helmConfigurationProperties); + + HelmBakeManifestRequest request = new HelmBakeManifestRequest(); + Artifact artifact = Artifact.builder().build(); + request.setTemplateRenderer(BakeManifestRequest.TemplateRenderer.HELM3); + request.setApiVersions("customApiVersion"); + request.setKubeVersion("customKubernetesVersion"); + request.setInputArtifacts(Collections.singletonList(artifact)); + request.setOverrides(Collections.emptyMap()); + + try (BakeManifestEnvironment env = BakeManifestEnvironment.create()) { + BakeRecipe bakeRecipe = helmTemplateUtils.buildBakeRecipe(env, request); + assertTrue(bakeRecipe.getCommand().contains("--api-versions")); + assertTrue(bakeRecipe.getCommand().indexOf("--api-versions") > 3); + assertTrue(bakeRecipe.getCommand().contains("--kube-version")); + assertTrue(bakeRecipe.getCommand().indexOf("--kube-version") > 5); + } + } + + @Test + public void buildBakeRecipeIncludingHelmVersionsOptionsWithHelm2() throws IOException { + ArtifactDownloader artifactDownloader = mock(ArtifactDownloader.class); + RoscoHelmConfigurationProperties helmConfigurationProperties = + new RoscoHelmConfigurationProperties(); + HelmTemplateUtils helmTemplateUtils = + new HelmTemplateUtils( + artifactDownloader, Optional.empty(), artifactStoreConfig, helmConfigurationProperties); + + HelmBakeManifestRequest request = new HelmBakeManifestRequest(); + Artifact artifact = Artifact.builder().build(); + request.setTemplateRenderer(BakeManifestRequest.TemplateRenderer.HELM2); + request.setApiVersions("customApiVersion"); + request.setKubeVersion("customKubernetesVersion"); + request.setInputArtifacts(Collections.singletonList(artifact)); + request.setOverrides(Collections.emptyMap()); + + try (BakeManifestEnvironment env = BakeManifestEnvironment.create()) { + BakeRecipe bakeRecipe = helmTemplateUtils.buildBakeRecipe(env, request); + assertTrue(bakeRecipe.getCommand().contains("--api-versions")); + assertTrue(bakeRecipe.getCommand().indexOf("--api-versions") > 3); + assertTrue(bakeRecipe.getCommand().contains("--kube-version")); + assertTrue(bakeRecipe.getCommand().indexOf("--kube-version") > 5); + } + } + @Test public void buildBakeRecipeIncludingCRDsWithHelm3() throws IOException { ArtifactDownloader artifactDownloader = mock(ArtifactDownloader.class); RoscoHelmConfigurationProperties helmConfigurationProperties = new RoscoHelmConfigurationProperties(); HelmTemplateUtils helmTemplateUtils = - new HelmTemplateUtils(artifactDownloader, helmConfigurationProperties); + new HelmTemplateUtils( + artifactDownloader, Optional.empty(), artifactStoreConfig, helmConfigurationProperties); HelmBakeManifestRequest request = new HelmBakeManifestRequest(); Artifact artifact = Artifact.builder().build(); @@ -363,6 +453,119 @@ public void buildBakeRecipeIncludingCRDsWithHelm3() throws IOException { } } + @ParameterizedTest( + name = + "ensureOverrides reference: {0} overrides: {1} expected: {2} artifactStoreNull: {3}, expandOverrides: {4}, overridesInValuesFile: {5}") + @MethodSource("ensureOverrides") + public void ensureOverridesGetEvaluated( + String reference, + Map overrides, + Map expectedMap, + Boolean artifactStoreNull, + Boolean expandOverrides, + Boolean overridesInValuesFile) + throws IOException { + + // Assume that expectedMap has one element, and turn it into an expected argument for + // --set/--set-string + assertThat(expectedMap).hasSize(1); + Map.Entry entry = expectedMap.entrySet().iterator().next(); + String expected = entry.getKey() + "=" + entry.getValue(); + + ArtifactStore artifactStore = null; + if (!artifactStoreNull) { + artifactStore = mock(ArtifactStore.class); + when(artifactStore.get(any())).thenReturn(Artifact.builder().reference(reference).build()); + } + RoscoHelmConfigurationProperties helmConfigurationProperties = + new RoscoHelmConfigurationProperties(); + + if (overridesInValuesFile) { + // Set the threshold so that rosco always uses a values file for + // overrides, no matter their size. + helmConfigurationProperties.setOverridesFileThreshold(1); + } + + // do not use the artifactStoreConfig field as we are trying to test various + // values of expandOverrides + ArtifactStoreConfigurationProperties artifactStoreConfiguration = + new ArtifactStoreConfigurationProperties(); + ArtifactStoreConfigurationProperties.HelmConfig helmConfig = + new ArtifactStoreConfigurationProperties.HelmConfig(); + helmConfig.setExpandOverrides(expandOverrides); + artifactStoreConfiguration.setHelm(helmConfig); + + HelmTemplateUtils helmTemplateUtils = + new HelmTemplateUtils( + artifactDownloader, + Optional.ofNullable(artifactStore), + artifactStoreConfiguration, + helmConfigurationProperties); + HelmBakeManifestRequest request = new HelmBakeManifestRequest(); + request.setOutputName("output_name"); + request.setTemplateRenderer(BakeManifestRequest.TemplateRenderer.HELM3); + request.setOverrides(overrides); + try (BakeManifestEnvironment env = BakeManifestEnvironment.create()) { + BakeRecipe recipe = + helmTemplateUtils.buildCommand(request, List.of(), Path.of("template_path"), env); + if (overridesInValuesFile) { + List helmTemplateCommand = recipe.getCommand(); + assertThat(Collections.frequency(helmTemplateCommand, "--values")).isEqualTo(1); + String lastValuesArgument = + helmTemplateCommand.get(helmTemplateCommand.lastIndexOf("--values") + 1); + assertThat(lastValuesArgument).matches(OVERRIDES_FILE_PATH_PATTERN); + List overridesYamlContents = Files.readAllLines(Path.of(lastValuesArgument)); + assertThat(helmTemplateCommand).doesNotContain("--set"); + assertThat(helmTemplateCommand).doesNotContain("--set-string"); + assertThat(helmTemplateCommand).contains("--values"); + assertThat( + new ObjectMapper(new YAMLFactory()) + .readValue( + String.join(System.lineSeparator(), overridesYamlContents), + new TypeReference>() {})) + .isEqualTo(expectedMap); + } else { + assertThat(recipe.getCommand().contains(expected)).isTrue(); + } + } + } + + private static Stream ensureOverrides() { + return Stream.of( + Arguments.of( + "test", Map.of("foo", "ref://bar/baz"), Map.of("foo", "test"), false, true, false), + Arguments.of( + "test", Map.of("foo", "ref://bar/baz"), Map.of("foo", "test"), false, true, true), + Arguments.of( + "test", + Map.of("foo", "ref://bar/baz"), + Map.of("foo", "ref://bar/baz"), + false, + false, + false), + Arguments.of( + "test", + Map.of("foo", "ref://bar/baz"), + Map.of("foo", "ref://bar/baz"), + false, + false, + true), + Arguments.of( + "test", + Map.of("foo", "ref://bar/baz"), + Map.of("foo", "ref://bar/baz"), + true, + false, + false), + Arguments.of( + "test", + Map.of("foo", "ref://bar/baz"), + Map.of("foo", "ref://bar/baz"), + true, + false, + true)); + } + @ParameterizedTest @MethodSource("helmRendererArgsCRDs") public void buildBakeRecipeNotIncludingCRDs( @@ -372,7 +575,8 @@ public void buildBakeRecipeNotIncludingCRDs( RoscoHelmConfigurationProperties helmConfigurationProperties = new RoscoHelmConfigurationProperties(); HelmTemplateUtils helmTemplateUtils = - new HelmTemplateUtils(artifactDownloader, helmConfigurationProperties); + new HelmTemplateUtils( + artifactDownloader, Optional.empty(), artifactStoreConfig, helmConfigurationProperties); HelmBakeManifestRequest request = new HelmBakeManifestRequest(); Artifact artifact = Artifact.builder().build(); @@ -424,6 +628,120 @@ public void httpExceptionDownloading() throws IOException { } } + @Test + public void testOverrideThresholdShortEnough() throws IOException { + bakeManifestRequest.setOverrides(ImmutableMap.of("key1", "value1", "key2", "value2")); + bakeManifestRequest.setRawOverrides(false); + helmConfigurationProperties.setOverridesFileThreshold(100); + + try (BakeManifestEnvironment env = BakeManifestEnvironment.create()) { + BakeRecipe recipe = helmTemplateUtils.buildBakeRecipe(env, bakeManifestRequest); + + assertThat(recipe.getCommand()).contains("--set-string"); + assertThat(recipe.getCommand()).doesNotContain("--values"); + } + } + + @Test + public void testOverrideThresholdShortEnoughWithRawOverrides() throws IOException { + bakeManifestRequest.setOverrides(ImmutableMap.of("key1", "value1", "key2", "value2")); + bakeManifestRequest.setRawOverrides(true); + helmConfigurationProperties.setOverridesFileThreshold(100); + + try (BakeManifestEnvironment env = BakeManifestEnvironment.create()) { + BakeRecipe recipe = helmTemplateUtils.buildBakeRecipe(env, bakeManifestRequest); + + assertThat(recipe.getCommand()).contains("--set"); + assertThat(recipe.getCommand()).doesNotContain("--values"); + } + } + + @Test + public void testOverrideThresholdExceedsLimitWithRawOverridesAsFalse() throws IOException { + Artifact chartArtifact = Artifact.builder().name("test-artifact").build(); + Artifact valuesArtifact = Artifact.builder().name("test-artifact_values").build(); + bakeManifestRequest = new HelmBakeManifestRequest(); + bakeManifestRequest.setInputArtifacts(ImmutableList.of(chartArtifact, valuesArtifact)); + bakeManifestRequest.setOverrides( + ImmutableMap.of("key1", "valu&e1", "key2", "value2:", "key3", 1, "key4", true)); + bakeManifestRequest.setRawOverrides(false); + helmConfigurationProperties.setOverridesFileThreshold(10); + try (BakeManifestEnvironment env = BakeManifestEnvironment.create()) { + BakeRecipe recipe = helmTemplateUtils.buildBakeRecipe(env, bakeManifestRequest); + List helmTemplateCommand = recipe.getCommand(); + assertThat(Collections.frequency(helmTemplateCommand, "--values")).isEqualTo(2); + String lastValuesArgument = + helmTemplateCommand.get(helmTemplateCommand.lastIndexOf("--values") + 1); + assertThat(lastValuesArgument).matches(OVERRIDES_FILE_PATH_PATTERN); + List overridesYamlContents = Files.readAllLines(Path.of(lastValuesArgument)); + assertThat(helmTemplateCommand).doesNotContain("--set"); + assertThat(helmTemplateCommand).doesNotContain("--set-string"); + assertThat(helmTemplateCommand).contains("--values"); + assertThat( + new ObjectMapper(new YAMLFactory()) + .readValue( + String.join(System.lineSeparator(), overridesYamlContents), + new TypeReference>() {})) + .isEqualTo( + ImmutableMap.of("key1", "valu&e1", "key2", "value2:", "key3", "1", "key4", "true")); + + assertThat(overridesYamlContents) + .containsExactly( + "---", "key1: \"valu&e1\"", "key2: \"value2:\"", "key3: \"1\"", "key4: \"true\""); + } + } + + @ParameterizedTest( + name = "testOverrideThresholdExceedsLimitWithRawOverridesAsTrue: expandOverrides {0}") + @ValueSource(booleans = {true, false}) + public void testOverrideThresholdExceedsLimitWithRawOverridesAsTrue(boolean expandOverrides) + throws IOException { + Artifact chartArtifact = Artifact.builder().name("test-artifact").build(); + Artifact valuesArtifact = Artifact.builder().name("test-artifact_values").build(); + + Map expectedOverrides = + ImmutableMap.of("key1", "valu&e1", "key2", "value2:", "key4", true); + ArtifactStore artifactStore = null; + if (expandOverrides) { + artifactStore = mock(ArtifactStore.class); + } + helmConfig.setExpandOverrides(expandOverrides); + + helmTemplateUtils = + new HelmTemplateUtils( + artifactDownloader, + Optional.ofNullable(artifactStore), + artifactStoreConfig, + helmConfigurationProperties); + + bakeManifestRequest = new HelmBakeManifestRequest(); + bakeManifestRequest.setInputArtifacts(ImmutableList.of(chartArtifact, valuesArtifact)); + bakeManifestRequest.setOverrides( + ImmutableMap.of("key1", "valu&e1", "key2", "value2:", "key3", 100000000, "key4", true)); + bakeManifestRequest.setRawOverrides(true); + helmConfigurationProperties.setOverridesFileThreshold(10); + try (BakeManifestEnvironment env = BakeManifestEnvironment.create()) { + BakeRecipe recipe = helmTemplateUtils.buildBakeRecipe(env, bakeManifestRequest); + List helmTemplateCommand = recipe.getCommand(); + String lastValuesArgument = + helmTemplateCommand.get(helmTemplateCommand.lastIndexOf("--values") + 1); + assertThat(Collections.frequency(helmTemplateCommand, "--values")).isEqualTo(2); + assertThat(lastValuesArgument).matches(OVERRIDES_FILE_PATH_PATTERN); + List overridesYamlContents = Files.readAllLines(Path.of(lastValuesArgument)); + assertThat(helmTemplateCommand).contains("--set"); + assertThat(helmTemplateCommand).contains("key3=100000000"); + assertThat(helmTemplateCommand).doesNotContain("--set-string"); + assertThat(helmTemplateCommand).contains("--values"); + assertThat( + new ObjectMapper(new YAMLFactory()) + .readValue( + String.join(System.lineSeparator(), overridesYamlContents), + new TypeReference>() {})) + .isEqualTo(expectedOverrides); + assertThat(overridesYamlContents) + .containsExactly("---", "key1: \"valu&e1\"", "key2: \"value2:\"", "key4: true"); + } + } /** * Add a helm chart for testing * diff --git a/rosco-manifests/src/test/java/com/netflix/spinnaker/rosco/manifests/helmfile/HelmfileTemplateUtilsTest.java b/rosco-manifests/src/test/java/com/netflix/spinnaker/rosco/manifests/helmfile/HelmfileTemplateUtilsTest.java index 91d76ebf8f..46bc62a122 100644 --- a/rosco-manifests/src/test/java/com/netflix/spinnaker/rosco/manifests/helmfile/HelmfileTemplateUtilsTest.java +++ b/rosco-manifests/src/test/java/com/netflix/spinnaker/rosco/manifests/helmfile/HelmfileTemplateUtilsTest.java @@ -30,6 +30,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import com.netflix.spinnaker.kork.artifacts.artifactstore.ArtifactStoreConfigurationProperties; import com.netflix.spinnaker.kork.artifacts.model.Artifact; import com.netflix.spinnaker.kork.exceptions.SpinnakerException; import com.netflix.spinnaker.kork.retrofit.exceptions.SpinnakerHttpException; @@ -49,6 +50,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.Stream; import org.apache.commons.compress.archivers.tar.TarArchiveEntry; @@ -72,15 +74,26 @@ final class HelmfileTemplateUtilsTest { private HelmfileBakeManifestRequest bakeManifestRequest; + private ArtifactStoreConfigurationProperties artifactStoreConfig; + @BeforeEach - private void init(TestInfo testInfo) { + void init(TestInfo testInfo) { System.out.println("--------------- Test " + testInfo.getDisplayName()); artifactDownloader = mock(ArtifactDownloader.class); RoscoHelmfileConfigurationProperties helmfileConfigurationProperties = new RoscoHelmfileConfigurationProperties(); + artifactStoreConfig = new ArtifactStoreConfigurationProperties(); + ArtifactStoreConfigurationProperties.HelmConfig helmConfig = + new ArtifactStoreConfigurationProperties.HelmConfig(); + artifactStoreConfig.setHelm(helmConfig); + helmConfig.setExpandOverrides(false); helmfileTemplateUtils = - new HelmfileTemplateUtils(artifactDownloader, helmfileConfigurationProperties); + new HelmfileTemplateUtils( + artifactDownloader, + Optional.empty(), + artifactStoreConfig, + helmfileConfigurationProperties); Artifact chartArtifact = Artifact.builder().name("test-artifact").version("3").build(); bakeManifestRequest = new HelmfileBakeManifestRequest(); @@ -177,7 +190,11 @@ public void removeTestsDirectoryTemplatesWithTests() throws IOException { RoscoHelmfileConfigurationProperties helmfileConfigurationProperties = new RoscoHelmfileConfigurationProperties(); HelmfileTemplateUtils helmfileTemplateUtils = - new HelmfileTemplateUtils(artifactDownloader, helmfileConfigurationProperties); + new HelmfileTemplateUtils( + artifactDownloader, + Optional.empty(), + artifactStoreConfig, + helmfileConfigurationProperties); String output = helmfileTemplateUtils.removeTestsDirectoryTemplates(inputManifests); @@ -231,7 +248,11 @@ public void removeTestsDirectoryTemplatesWithoutTests() throws IOException { RoscoHelmfileConfigurationProperties helmfileConfigurationProperties = new RoscoHelmfileConfigurationProperties(); HelmfileTemplateUtils helmfileTemplateUtils = - new HelmfileTemplateUtils(artifactDownloader, helmfileConfigurationProperties); + new HelmfileTemplateUtils( + artifactDownloader, + Optional.empty(), + artifactStoreConfig, + helmfileConfigurationProperties); String output = helmfileTemplateUtils.removeTestsDirectoryTemplates(inputManifests); @@ -246,7 +267,11 @@ public void buildBakeRecipeSelectsHelm3ExecutableWhenNoneSet( RoscoHelmfileConfigurationProperties helmfileConfigurationProperties = new RoscoHelmfileConfigurationProperties(); HelmfileTemplateUtils helmfileTemplateUtils = - new HelmfileTemplateUtils(artifactDownloader, helmfileConfigurationProperties); + new HelmfileTemplateUtils( + artifactDownloader, + Optional.empty(), + artifactStoreConfig, + helmfileConfigurationProperties); RoscoHelmConfigurationProperties helmConfigurationProperties = new RoscoHelmConfigurationProperties(); @@ -280,7 +305,11 @@ public void buildBakeRecipeWithGitRepoArtifact(@TempDir Path tempDir) throws IOE RoscoHelmfileConfigurationProperties helmfileConfigurationProperties = new RoscoHelmfileConfigurationProperties(); HelmfileTemplateUtils helmfileTemplateUtils = - new HelmfileTemplateUtils(artifactDownloader, helmfileConfigurationProperties); + new HelmfileTemplateUtils( + artifactDownloader, + Optional.empty(), + artifactStoreConfig, + helmfileConfigurationProperties); HelmfileBakeManifestRequest request = new HelmfileBakeManifestRequest(); @@ -320,7 +349,11 @@ public void buildBakeRecipeWithGitRepoArtifactUsingHelmfileFilePath(@TempDir Pat RoscoHelmfileConfigurationProperties helmfileConfigurationProperties = new RoscoHelmfileConfigurationProperties(); HelmfileTemplateUtils helmfileTemplateUtils = - new HelmfileTemplateUtils(artifactDownloader, helmfileConfigurationProperties); + new HelmfileTemplateUtils( + artifactDownloader, + Optional.empty(), + artifactStoreConfig, + helmfileConfigurationProperties); HelmfileBakeManifestRequest request = new HelmfileBakeManifestRequest(); @@ -459,7 +492,11 @@ public void buildBakeRecipeIncludesEnvironmentWhenSet() throws IOException { RoscoHelmfileConfigurationProperties helmfileConfigurationProperties = new RoscoHelmfileConfigurationProperties(); HelmfileTemplateUtils helmfileTemplateUtils = - new HelmfileTemplateUtils(artifactDownloader, helmfileConfigurationProperties); + new HelmfileTemplateUtils( + artifactDownloader, + Optional.empty(), + artifactStoreConfig, + helmfileConfigurationProperties); HelmfileBakeManifestRequest request = new HelmfileBakeManifestRequest(); Artifact artifact = Artifact.builder().build(); @@ -483,7 +520,11 @@ public void buildBakeRecipeDoesNotIncludeEnvironmentWhenNotSet() throws IOExcept RoscoHelmfileConfigurationProperties helmfileConfigurationProperties = new RoscoHelmfileConfigurationProperties(); HelmfileTemplateUtils helmfileTemplateUtils = - new HelmfileTemplateUtils(artifactDownloader, helmfileConfigurationProperties); + new HelmfileTemplateUtils( + artifactDownloader, + Optional.empty(), + artifactStoreConfig, + helmfileConfigurationProperties); HelmfileBakeManifestRequest request = new HelmfileBakeManifestRequest(); Artifact artifact = Artifact.builder().build(); @@ -502,7 +543,11 @@ public void buildBakeRecipeIncludesNamespaceWhenSet() throws IOException { RoscoHelmfileConfigurationProperties helmfileConfigurationProperties = new RoscoHelmfileConfigurationProperties(); HelmfileTemplateUtils helmfileTemplateUtils = - new HelmfileTemplateUtils(artifactDownloader, helmfileConfigurationProperties); + new HelmfileTemplateUtils( + artifactDownloader, + Optional.empty(), + artifactStoreConfig, + helmfileConfigurationProperties); HelmfileBakeManifestRequest request = new HelmfileBakeManifestRequest(); Artifact artifact = Artifact.builder().build(); @@ -526,7 +571,11 @@ public void buildBakeRecipeDoesNotIncludeNamespaceWhenNotSet() throws IOExceptio RoscoHelmfileConfigurationProperties helmfileConfigurationProperties = new RoscoHelmfileConfigurationProperties(); HelmfileTemplateUtils helmfileTemplateUtils = - new HelmfileTemplateUtils(artifactDownloader, helmfileConfigurationProperties); + new HelmfileTemplateUtils( + artifactDownloader, + Optional.empty(), + artifactStoreConfig, + helmfileConfigurationProperties); HelmfileBakeManifestRequest request = new HelmfileBakeManifestRequest(); Artifact artifact = Artifact.builder().build(); @@ -545,7 +594,11 @@ public void buildBakeRecipeIncludingCRDsWithHelm3() throws IOException { RoscoHelmfileConfigurationProperties helmfileConfigurationProperties = new RoscoHelmfileConfigurationProperties(); HelmfileTemplateUtils helmfileTemplateUtils = - new HelmfileTemplateUtils(artifactDownloader, helmfileConfigurationProperties); + new HelmfileTemplateUtils( + artifactDownloader, + Optional.empty(), + artifactStoreConfig, + helmfileConfigurationProperties); HelmfileBakeManifestRequest request = new HelmfileBakeManifestRequest(); Artifact artifact = Artifact.builder().build(); @@ -567,7 +620,11 @@ public void buildBakeRecipeNotIncludingCRDsWithHelm3() throws IOException { RoscoHelmfileConfigurationProperties helmfileConfigurationProperties = new RoscoHelmfileConfigurationProperties(); HelmfileTemplateUtils helmfileTemplateUtils = - new HelmfileTemplateUtils(artifactDownloader, helmfileConfigurationProperties); + new HelmfileTemplateUtils( + artifactDownloader, + Optional.empty(), + artifactStoreConfig, + helmfileConfigurationProperties); HelmfileBakeManifestRequest request = new HelmfileBakeManifestRequest(); Artifact artifact = Artifact.builder().build(); @@ -586,7 +643,11 @@ public void buildBakeRecipeIncludesOverridesWhenSet() throws IOException { RoscoHelmfileConfigurationProperties helmfileConfigurationProperties = new RoscoHelmfileConfigurationProperties(); HelmfileTemplateUtils helmfileTemplateUtils = - new HelmfileTemplateUtils(artifactDownloader, helmfileConfigurationProperties); + new HelmfileTemplateUtils( + artifactDownloader, + Optional.empty(), + artifactStoreConfig, + helmfileConfigurationProperties); HelmfileBakeManifestRequest request = new HelmfileBakeManifestRequest(); Artifact artifact = Artifact.builder().build(); @@ -610,7 +671,11 @@ public void buildBakeRecipeDoesNotIncludeOverridesWhenNotSet() throws IOExceptio RoscoHelmfileConfigurationProperties helmfileConfigurationProperties = new RoscoHelmfileConfigurationProperties(); HelmfileTemplateUtils helmfileTemplateUtils = - new HelmfileTemplateUtils(artifactDownloader, helmfileConfigurationProperties); + new HelmfileTemplateUtils( + artifactDownloader, + Optional.empty(), + artifactStoreConfig, + helmfileConfigurationProperties); HelmfileBakeManifestRequest request = new HelmfileBakeManifestRequest(); Artifact artifact = Artifact.builder().build(); @@ -629,7 +694,11 @@ public void buildBakeRecipeIncludesValuesWhenSet() throws IOException { RoscoHelmfileConfigurationProperties helmfileConfigurationProperties = new RoscoHelmfileConfigurationProperties(); HelmfileTemplateUtils helmfileTemplateUtils = - new HelmfileTemplateUtils(artifactDownloader, helmfileConfigurationProperties); + new HelmfileTemplateUtils( + artifactDownloader, + Optional.empty(), + artifactStoreConfig, + helmfileConfigurationProperties); HelmfileBakeManifestRequest request = new HelmfileBakeManifestRequest(); @@ -656,7 +725,11 @@ public void buildBakeRecipeDoesNotIncludeValuesWhenNotSet() throws IOException { RoscoHelmfileConfigurationProperties helmfileConfigurationProperties = new RoscoHelmfileConfigurationProperties(); HelmfileTemplateUtils helmfileTemplateUtils = - new HelmfileTemplateUtils(artifactDownloader, helmfileConfigurationProperties); + new HelmfileTemplateUtils( + artifactDownloader, + Optional.empty(), + artifactStoreConfig, + helmfileConfigurationProperties); HelmfileBakeManifestRequest request = new HelmfileBakeManifestRequest(); Artifact artifact = Artifact.builder().build(); diff --git a/rosco-web/pkg_scripts/postInstall.sh b/rosco-web/pkg_scripts/postInstall.sh index ff531bf55d..b8f6f6b652 100755 --- a/rosco-web/pkg_scripts/postInstall.sh +++ b/rosco-web/pkg_scripts/postInstall.sh @@ -16,6 +16,14 @@ if [ -z `getent passwd spinnaker` ]; then useradd --gid spinnaker spinnaker -m --home-dir /home/spinnaker fi +ARCH=$(uname -m) +if [ "$ARCH" = "x86_64" ]; then + ARCH="amd64" +fi + +# Get the PACKER_PLUGINS environment variable or set a default value +PACKER_PLUGINS=${PACKER_PLUGINS:-"amazon azure googlecompute"} + create_temp_dir() { TEMPDIR=$(mktemp -d /tmp/installrosco.XXXX) cd $TEMPDIR @@ -27,13 +35,19 @@ remove_temp_dir() { } install_packer() { - PACKER_VERSION="1.8.1" - local packer_version=$(/usr/bin/packer --version) + PACKER_VERSION="1.10.1" + local packer_version="$(/usr/bin/packer --version)" local packer_status=$? - if [ $packer_status -ne 0 ] || [ "$packer_version" != "$PACKER_VERSION" ]; then - wget https://releases.hashicorp.com/packer/${PACKER_VERSION}/packer_${PACKER_VERSION}_linux_amd64.zip - unzip -o "packer_${PACKER_VERSION}_linux_amd64.zip" -d /usr/bin + + if [ $packer_status -ne 0 ] || ! echo "$packer_version" | grep -q "$PACKER_VERSION"; then + wget https://releases.hashicorp.com/packer/${PACKER_VERSION}/packer_${PACKER_VERSION}_linux_${ARCH}.zip + unzip -o "packer_${PACKER_VERSION}_linux_${ARCH}.zip" -d /usr/bin fi + + # Install plugins as the "spinnaker" user + for plugin in $PACKER_PLUGINS; do + su - spinnaker -c "/usr/bin/packer plugins install \"github.com/hashicorp/$plugin\"" + done } install_helm() { diff --git a/rosco-web/rosco-web.gradle b/rosco-web/rosco-web.gradle index c5517c8c3b..d14d9dde1d 100644 --- a/rosco-web/rosco-web.gradle +++ b/rosco-web/rosco-web.gradle @@ -11,9 +11,9 @@ dependencies { implementation project(":rosco-core") implementation project(":rosco-manifests") implementation "io.spinnaker.kork:kork-web" - implementation "io.swagger:swagger-annotations" + implementation "io.swagger.core.v3:swagger-annotations" - implementation "org.codehaus.groovy:groovy" + implementation "org.apache.groovy:groovy" implementation "io.spinnaker.kork:kork-artifacts" implementation "io.spinnaker.kork:kork-config" implementation "org.springframework.boot:spring-boot-starter-web" @@ -28,6 +28,8 @@ dependencies { testImplementation "com.squareup.retrofit2:retrofit" testImplementation "com.squareup.retrofit2:converter-jackson" testImplementation "com.github.tomakehurst:wiremock-jre8-standalone" + testImplementation "org.apache.commons:commons-compress:1.21" + testImplementation "org.assertj:assertj-core" testCompileOnly "com.squareup.retrofit:retrofit" } diff --git a/rosco-web/src/main/groovy/com/netflix/spinnaker/rosco/Main.groovy b/rosco-web/src/main/groovy/com/netflix/spinnaker/rosco/Main.groovy index d98596f697..2cfa29a03f 100644 --- a/rosco-web/src/main/groovy/com/netflix/spinnaker/rosco/Main.groovy +++ b/rosco-web/src/main/groovy/com/netflix/spinnaker/rosco/Main.groovy @@ -38,8 +38,6 @@ import com.netflix.spinnaker.rosco.services.ServiceConfig import org.springframework.beans.factory.annotation.Qualifier import org.springframework.boot.autoconfigure.EnableAutoConfiguration import org.springframework.boot.autoconfigure.batch.BatchAutoConfiguration -import org.springframework.boot.autoconfigure.condition.ConditionalOnBean -import org.springframework.boot.autoconfigure.condition.ConditionalOnExpression import org.springframework.boot.autoconfigure.groovy.template.GroovyTemplateAutoConfiguration import org.springframework.boot.builder.SpringApplicationBuilder import org.springframework.boot.web.servlet.support.SpringBootServletInitializer @@ -104,13 +102,11 @@ class Main extends SpringBootServletInitializer { } @Bean - @ConditionalOnExpression('${artifact-store.enabled:false}') EmbeddedArtifactSerializer artifactSerializer(ArtifactStore store, @Qualifier("artifactObjectMapper") ObjectMapper objectMapper) { return new EmbeddedArtifactSerializer(objectMapper, store); } @Bean - @ConditionalOnBean(EmbeddedArtifactSerializer.class) ObjectMapper objectMapper(Jackson2ObjectMapperBuilder builder, EmbeddedArtifactSerializer serializer) { return builder.createXmlMapper(false) .serializerByType(Artifact.class, serializer) diff --git a/rosco-web/src/main/groovy/com/netflix/spinnaker/rosco/controllers/BakeryController.groovy b/rosco-web/src/main/groovy/com/netflix/spinnaker/rosco/controllers/BakeryController.groovy index 048a044968..201120f565 100644 --- a/rosco-web/src/main/groovy/com/netflix/spinnaker/rosco/controllers/BakeryController.groovy +++ b/rosco-web/src/main/groovy/com/netflix/spinnaker/rosco/controllers/BakeryController.groovy @@ -31,8 +31,8 @@ import com.netflix.spinnaker.rosco.providers.registry.CloudProviderBakeHandlerRe import com.netflix.spinnaker.security.AuthenticatedRequest import groovy.transform.InheritConstructors import groovy.util.logging.Slf4j -import io.swagger.annotations.ApiOperation -import io.swagger.annotations.ApiParam +import io.swagger.v3.oas.annotations.Operation +import io.swagger.v3.oas.annotations.Parameter import org.springframework.beans.factory.annotation.Autowired import org.springframework.beans.factory.annotation.Value import org.springframework.http.HttpStatus @@ -222,10 +222,10 @@ class BakeryController { } } - @ApiOperation(value = "Look up bake request status") + @Operation(summary = "Look up bake request status") @RequestMapping(value = "/api/v1/{region}/status/{statusId}", method = RequestMethod.GET) - BakeStatus lookupStatus(@ApiParam(value = "The region of the bake request to lookup", required = true) @PathVariable("region") String region, - @ApiParam(value = "The id of the bake request to lookup", required = true) @PathVariable("statusId") String statusId) { + BakeStatus lookupStatus(@Parameter(description = "The region of the bake request to lookup", required = true) @PathVariable("region") String region, + @Parameter(description = "The id of the bake request to lookup", required = true) @PathVariable("statusId") String statusId) { def bakeStatus = bakeStore.retrieveBakeStatusById(statusId) if (bakeStatus) { @@ -235,10 +235,10 @@ class BakeryController { throw new IllegalArgumentException("Unable to retrieve status for '$statusId'.") } - @ApiOperation(value = "Look up bake details") + @Operation(summary = "Look up bake details") @RequestMapping(value = "/api/v1/{region}/bake/{bakeId}", method = RequestMethod.GET) - Bake lookupBake(@ApiParam(value = "The region of the bake to lookup", required = true) @PathVariable("region") String region, - @ApiParam(value = "The id of the bake to lookup", required = true) @PathVariable("bakeId") String bakeId) { + Bake lookupBake(@Parameter(description = "The region of the bake to lookup", required = true) @PathVariable("region") String region, + @Parameter(description = "The id of the bake to lookup", required = true) @PathVariable("bakeId") String bakeId) { def bake = bakeStore.retrieveBakeDetailsById(bakeId) if (bake) { @@ -317,10 +317,10 @@ class BakeryController { } // TODO(duftler): Synchronize this with existing bakery api. - @ApiOperation(value = "Cancel bake request") + @Operation(summary = "Cancel bake request") @RequestMapping(value = "/api/v1/{region}/cancel/{statusId}", method = RequestMethod.GET) - String cancelBake(@ApiParam(value = "The region of the bake request to cancel", required = true) @PathVariable("region") String region, - @ApiParam(value = "The id of the bake request to cancel", required = true) @PathVariable("statusId") String statusId) { + String cancelBake(@Parameter(description = "The region of the bake request to cancel", required = true) @PathVariable("region") String region, + @Parameter(description = "The id of the bake request to cancel", required = true) @PathVariable("statusId") String statusId) { if (bakeStore.cancelBakeById(statusId)) { jobExecutor.cancelJob(statusId) diff --git a/rosco-web/src/test/groovy/com/netflix/spinnaker/rosco/controllers/BakeryControllerSpec.groovy b/rosco-web/src/test/groovy/com/netflix/spinnaker/rosco/controllers/BakeryControllerSpec.groovy index 7374fd36d8..d4f2e83c69 100644 --- a/rosco-web/src/test/groovy/com/netflix/spinnaker/rosco/controllers/BakeryControllerSpec.groovy +++ b/rosco-web/src/test/groovy/com/netflix/spinnaker/rosco/controllers/BakeryControllerSpec.groovy @@ -720,7 +720,7 @@ class BakeryControllerSpec extends Specification { def result = bakeryController.bakeOptions() then: - result.size == 2 + result.size() == 2 result.find { it.cloudProvider == "aws" }.baseImages[0].id == "santa" result.find { it.cloudProvider == "gce" }.baseImages[0].id == "claus" diff --git a/rosco-web/src/test/java/com/netflix/spinnaker/rosco/controllers/V2BakeryControllerTest.java b/rosco-web/src/test/java/com/netflix/spinnaker/rosco/controllers/V2BakeryControllerTest.java index 7866eb92ed..35930bac9b 100644 --- a/rosco-web/src/test/java/com/netflix/spinnaker/rosco/controllers/V2BakeryControllerTest.java +++ b/rosco-web/src/test/java/com/netflix/spinnaker/rosco/controllers/V2BakeryControllerTest.java @@ -29,6 +29,7 @@ import com.netflix.spinnaker.kork.retrofit.exceptions.SpinnakerHttpException; import com.netflix.spinnaker.kork.retrofit.exceptions.SpinnakerServerException; import com.netflix.spinnaker.rosco.Main; +import com.netflix.spinnaker.rosco.executor.BakePoller; import com.netflix.spinnaker.rosco.manifests.helm.HelmBakeManifestRequest; import com.netflix.spinnaker.rosco.services.ClouddriverService; import java.nio.charset.StandardCharsets; @@ -54,6 +55,8 @@ class V2BakeryControllerTest { @Autowired private WebApplicationContext webApplicationContext; + @MockBean BakePoller bakePoller; + @MockBean ClouddriverService clouddriverService; @Autowired ObjectMapper objectMapper; @@ -61,7 +64,7 @@ class V2BakeryControllerTest { private HelmBakeManifestRequest bakeManifestRequest; @BeforeEach - private void init(TestInfo testInfo) { + void init(TestInfo testInfo) { System.out.println("--------------- Test " + testInfo.getDisplayName()); webAppMockMvc = webAppContextSetup(webApplicationContext).build(); diff --git a/rosco-web/src/test/java/com/netflix/spinnaker/rosco/controllers/V2BakeryControllerWithClouddriverServiceTest.java b/rosco-web/src/test/java/com/netflix/spinnaker/rosco/controllers/V2BakeryControllerWithClouddriverServiceTest.java index d7113d05c9..1e9e44f197 100644 --- a/rosco-web/src/test/java/com/netflix/spinnaker/rosco/controllers/V2BakeryControllerWithClouddriverServiceTest.java +++ b/rosco-web/src/test/java/com/netflix/spinnaker/rosco/controllers/V2BakeryControllerWithClouddriverServiceTest.java @@ -95,7 +95,7 @@ static void registerClouddriverBaseUrl(DynamicPropertyRegistry registry) { } @BeforeEach - private void init(TestInfo testInfo) { + void init(TestInfo testInfo) { System.out.println("--------------- Test " + testInfo.getDisplayName()); webAppMockMvc = diff --git a/settings.gradle b/settings.gradle index 53e004d6a1..6be99e6d84 100644 --- a/settings.gradle +++ b/settings.gradle @@ -8,7 +8,11 @@ rootProject.name = 'rosco' -include 'rosco-bom', 'rosco-core', 'rosco-web', 'rosco-manifests' +include 'rosco-bom', + 'rosco-core', + 'rosco-integration', + 'rosco-manifests', + 'rosco-web' def setBuildFile(project) { project.buildFileName = "${project.name}.gradle"