diff --git a/avm-transpiler/Earthfile b/avm-transpiler/Earthfile index e4db70ea091..95199fbe3d2 100644 --- a/avm-transpiler/Earthfile +++ b/avm-transpiler/Earthfile @@ -12,11 +12,7 @@ source: build: FROM +source # build avm transpiler, and make sure the big build and deps folders don't hit cache - DO ../build-system/s3-cache-scripts/+WITH_CACHE \ - --prefix="avm-transpiler" \ - --rebuild_patterns="../noir/.rebuild_patterns_native .rebuild_patterns" \ - --command="./scripts/bootstrap_native.sh && rm -rf target/release/{build,deps}" \ - --build_artifacts="target" + RUN ./scripts/bootstrap_native.sh && rm -rf target/release/{build,deps} SAVE ARTIFACT target/release/avm-transpiler avm-transpiler SAVE ARTIFACT scripts/compile_then_transpile.sh diff --git a/avm-transpiler/bootstrap_cache.sh b/avm-transpiler/bootstrap_cache.sh index fdedcb627fd..4959fd6b980 100755 --- a/avm-transpiler/bootstrap_cache.sh +++ b/avm-transpiler/bootstrap_cache.sh @@ -2,7 +2,10 @@ set -eu cd "$(dirname "$0")" +source ../build-system/scripts/setup_env '' '' mainframe_$USER > /dev/null echo -e "\033[1mRetrieving avm-transpiler from remote cache...\033[0m" -HASH=$(AZTEC_CACHE_REBUILD_PATTERNS="../noir/.rebuild_patterns_native .rebuild_patterns" ../build-system/s3-cache-scripts/compute-content-hash.sh) -../build-system/s3-cache-scripts/cache-download.sh avm-transpiler-$HASH.tar.gz +extract_repo_if_working_copy_clean avm-transpiler \ + /usr/src/avm-transpiler/target/release/avm-transpiler ./target/release/ + +remove_old_images avm-transpiler diff --git a/barretenberg/cpp/Earthfile b/barretenberg/cpp/Earthfile index 72b3fb7f218..3c2c7ad5b51 100644 --- a/barretenberg/cpp/Earthfile +++ b/barretenberg/cpp/Earthfile @@ -131,7 +131,7 @@ preset-sol: preset-wasm-threads: FROM +source - DO +CACHE_BUILD_BIN --prefix=preset-wasm-threads \ + DO +CACHE_BUILD_BIN --prefix=preset-wasm-threads-v1 \ --command="cmake --preset wasm-threads -Bbuild && cmake --build build --target barretenberg.wasm" SAVE ARTIFACT build/bin diff --git a/barretenberg/cpp/bootstrap_cache.sh b/barretenberg/cpp/bootstrap_cache.sh index 062551fa4ce..0066d75a3d9 100755 --- a/barretenberg/cpp/bootstrap_cache.sh +++ b/barretenberg/cpp/bootstrap_cache.sh @@ -2,30 +2,16 @@ set -eu cd "$(dirname "$0")" +source ../../build-system/scripts/setup_env '' '' mainframe_$USER > /dev/null echo -e "\033[1mRetrieving bb binary from remote cache...\033[0m" +extract_repo_if_working_copy_clean barretenberg-x86_64-linux-clang \ + /usr/src/barretenberg/cpp/build/bin ./build \ + /usr/src/barretenberg/cpp/build-pic/lib ./build-pic -SCRIPTS_PATH=../../build-system/s3-cache-scripts/ -HASH=$(AZTEC_CACHE_REBUILD_PATTERNS=.rebuild_patterns $SCRIPTS_PATH/compute-content-hash.sh) -TMP=$(mktemp -d) +echo -e "\033[1mRetrieving bb.wasm from remote cache...\033[0m" +extract_repo_if_working_copy_clean barretenberg-wasm-linux-clang \ + /usr/src/barretenberg/cpp/build-wasm/bin ./build-wasm \ + /usr/src/barretenberg/cpp/build-wasm-threads/bin ./build-wasm-threads -function on_exit() { - rm -rf "$TMP" -} -trap on_exit EXIT - -# Parallel download of all the cached builds because they're quite big -echo " -barretenberg-preset-wasm -barretenberg-preset-wasm-threads -barretenberg-preset-release -barretenberg-preset-release-world-state -" | xargs --max-procs 0 -I {} bash -c "$SCRIPTS_PATH/cache-download.sh {}-$HASH.tar.gz $TMP/{}" - -# # clobber the existing build with the cached build -cp -r $TMP/barretenberg-preset-wasm/build build-wasm/ -cp -r $TMP/barretenberg-preset-wasm-threads/build build-wasm-threads/ - -mkdir -p build -cp -r $TMP/barretenberg-preset-release/build/* build/ -cp -r $TMP/barretenberg-preset-release-world-state/build/* build/ +remove_old_images barretenberg-wasm-linux-clang diff --git a/barretenberg/ts/Earthfile b/barretenberg/ts/Earthfile index 798bb685e37..e4df38e4c5d 100644 --- a/barretenberg/ts/Earthfile +++ b/barretenberg/ts/Earthfile @@ -6,11 +6,12 @@ CACHE: ARG build_artifacts ARG prefix # TODO(#8929): reinstate bb.js caching - DO ../../build-system/s3-cache-scripts/+WITH_CACHE \ - --prefix="bb.js-$prefix" \ - --command="$command" \ - --rebuild_patterns="../cpp/.rebuild_patterns .rebuild_patterns" \ - --build_artifacts="$build_artifacts" + RUN $command + # DO ../../build-system/s3-cache-scripts/+WITH_CACHE \ + # --prefix="bb.js-$prefix" \ + # --command="$command" \ + # --rebuild_patterns="../cpp/.rebuild_patterns .rebuild_patterns" \ + # --build_artifacts="$build_artifacts" deps: FROM ../../build-images+from-registry @@ -33,7 +34,7 @@ deps: esm: FROM +deps DO +CACHE \ - --prefix="esm" \ + --prefix="browser" \ --command="yarn build:esm" \ --build_artifacts="." SAVE ARTIFACT /usr/src/barretenberg/ts build diff --git a/barretenberg/ts/bootstrap_cache.sh b/barretenberg/ts/bootstrap_cache.sh index 06967c486a3..8cfbfabaaa6 100755 --- a/barretenberg/ts/bootstrap_cache.sh +++ b/barretenberg/ts/bootstrap_cache.sh @@ -4,29 +4,9 @@ set -eu cd "$(dirname "$0")" source ../../build-system/scripts/setup_env '' '' mainframe_$USER > /dev/null -CACHE_SCRIPTS=../../build-system/s3-cache-scripts - echo -e "\033[1mRetrieving bb.js from remote cache...\033[0m" -TMP=$(mktemp -d) - -function on_exit() { - rm -rf "$TMP" -} -trap on_exit EXIT - -HASH=$(AZTEC_CACHE_REBUILD_PATTERNS="../cpp/.rebuild_patterns .rebuild_patterns" $CACHE_SCRIPTS/compute-content-hash.sh) - -# Parallel download of all the cached builds because they're quite big -echo " -bb.js-esm -bb.js-cjs -bb.js-browser -" | xargs --max-procs 0 -I {} bash -c "$CACHE_SCRIPTS/cache-download.sh {}-$HASH.tar.gz $TMP/{}" - -mkdir -p dest -cp -r $TMP/bb.js-esm/dest/* dest/ -cp -r $TMP/bb.js-cjs/dest/* dest/ -cp -r $TMP/bb.js-browser/dest/* dest/ - +extract_repo_if_working_copy_clean bb.js /usr/src/barretenberg/ts/dest . # Annoyingly we still need to install modules, so they can be found as part of module resolution when portalled. yarn install + +remove_old_images bb.js diff --git a/bootstrap.sh b/bootstrap.sh index b0341af730c..2378c85ec1e 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -128,7 +128,7 @@ elif [ "$CMD" = "full" ]; then elif [ "$CMD" = "fast" ]; then export USE_CACHE=1 if ! can_use_ci_cache; then - echo -e "${BOLD}${YELLOW}WARNING: AWS credentials are missing. Note this is for internal aztec devs only.${RESET}" + echo -e "${BOLD}${YELLOW}WARNING: Either docker or aws credentials are missing. Install docker and request credentials. Note this is for internal aztec devs only.${RESET}" exit 1 fi elif [ "$CMD" = "check" ]; then diff --git a/build-system/s3-cache-scripts/cache-download.sh b/build-system/s3-cache-scripts/cache-download.sh index fbe626c2b74..be8be95a63a 100755 --- a/build-system/s3-cache-scripts/cache-download.sh +++ b/build-system/s3-cache-scripts/cache-download.sh @@ -1,14 +1,13 @@ #!/bin/bash set -eu -if [ "$#" -lt 1 ]; then +if [ "$#" -ne 1 ]; then echo "Usage: $0 " exit 1 fi # Get the tar.gz file name from the argument TAR_FILE="$1" -OUT_DIR="${2:-.}" function on_exit() { # Cleanup the temporary tar.gz file @@ -21,7 +20,6 @@ trap on_exit EXIT aws ${S3_BUILD_CACHE_AWS_PARAMS:-} s3 cp "s3://aztec-ci-artifacts/build-cache/$TAR_FILE" "$TAR_FILE" --quiet --no-progress # Extract the cache file -mkdir -p "$OUT_DIR" -tar -xzf "$TAR_FILE" -C "$OUT_DIR" +tar -xzf "$TAR_FILE" -echo "Cache download and extraction of $TAR_FILE complete." +echo "Cache download and extraction complete." diff --git a/build-system/s3-cache-scripts/cache-upload.sh b/build-system/s3-cache-scripts/cache-upload.sh index 3dd2397db83..d574d246e90 100755 --- a/build-system/s3-cache-scripts/cache-upload.sh +++ b/build-system/s3-cache-scripts/cache-upload.sh @@ -24,4 +24,4 @@ trap on_exit EXIT # Rest of args are our binary paths tar -czf "$TAR_FILE" $@ -aws ${S3_BUILD_CACHE_AWS_PARAMS:-} s3 cp "$TAR_FILE" "s3://aztec-ci-artifacts/build-cache/$NAME" +aws ${S3_BUILD_CACHE_AWS_PARAMS:-} s3 cp "$TAR_FILE" "s3://aztec-ci-artifacts/build-cache/$NAME" \ No newline at end of file diff --git a/build-system/scripts/can_use_ci_cache b/build-system/scripts/can_use_ci_cache index b3dd6bdd240..2831dabb2bb 100755 --- a/build-system/scripts/can_use_ci_cache +++ b/build-system/scripts/can_use_ci_cache @@ -1,9 +1,3 @@ #!/usr/bin/env bash set -euo pipefail -if [ ! -z ${AWS_ACCESS_KEY_ID:-} ] && [ ! -z ${AWS_SECRET_ACCESS_KEY:-} ]; then - exit 0 -elif [ -f ~/.aws/credentials ]; then - exit 0 -else - exit 1 -fi +type docker &> /dev/null && docker ps 2>&1 > /dev/null && [ -f ~/.aws/credentials ] && ecr_login > /dev/null || exit 1 \ No newline at end of file diff --git a/l1-contracts/.rebuild_patterns b/l1-contracts/.rebuild_patterns deleted file mode 100644 index 09a492e2260..00000000000 --- a/l1-contracts/.rebuild_patterns +++ /dev/null @@ -1 +0,0 @@ -^l1-contracts/src/.*\.sol$ diff --git a/l1-contracts/Earthfile b/l1-contracts/Earthfile index 011d4b98215..9ad5d1000f6 100644 --- a/l1-contracts/Earthfile +++ b/l1-contracts/Earthfile @@ -1,26 +1,15 @@ VERSION 0.8 -src: +build: FROM ../build-images+from-registry WORKDIR /usr/src/l1-contracts COPY --dir lib src terraform test *.json *.toml *.sh . - DO ../build-system/s3-cache-scripts/+WRITE_CONTENT_HASH --rebuild_patterns=".rebuild_patterns" - -lint: - FROM +src + #RUN git init && git add . && yarn lint && yarn slither && yarn slither-has-diff + # "slither": "forge clean && forge build --build-info --skip '*/test/**' --force && slither . --checklist --ignore-compile --show-ignored-findings --config-file ./slither.config.json | tee slither_output.md", + # "slither-has-diff": "./slither_has_diff.sh" RUN solhint --config ./.solhint.json --fix "src/**/*.sol" RUN forge clean && forge fmt --check - -build: - FROM +lint - WORKDIR /usr/src/l1-contracts - - DO ../build-system/s3-cache-scripts/+WITH_CACHE \ - --prefix="l1-contracts" \ - --rebuild_patterns=".rebuild_patterns" \ - --command="forge build" \ - --build_artifacts="out" - + RUN forge build SAVE ARTIFACT /usr/src/l1-contracts /usr/src/l1-contracts test: diff --git a/l1-contracts/bootstrap_cache.sh b/l1-contracts/bootstrap_cache.sh index 6f509bc0574..2cb1f5ad049 100755 --- a/l1-contracts/bootstrap_cache.sh +++ b/l1-contracts/bootstrap_cache.sh @@ -2,7 +2,9 @@ set -eu cd "$(dirname "$0")" +source ../build-system/scripts/setup_env '' '' mainframe_$USER > /dev/null echo -e "\033[1mRetrieving contracts from remote cache...\033[0m" -HASH=$(AZTEC_CACHE_REBUILD_PATTERNS=.rebuild_patterns ../build-system/s3-cache-scripts/compute-content-hash.sh) -../build-system/s3-cache-scripts/cache-download.sh l1-contracts-$HASH.tar.gz +extract_repo_if_working_copy_clean l1-contracts /usr/src/l1-contracts/out . + +remove_old_images l1-contracts diff --git a/noir-projects/Earthfile b/noir-projects/Earthfile index 3cd70133ddb..4345a432089 100644 --- a/noir-projects/Earthfile +++ b/noir-projects/Earthfile @@ -1,82 +1,69 @@ VERSION 0.8 -deps: - LOCALLY - LET bb_source_hash = $(cd .. && git ls-tree -r HEAD | grep 'barretenberg/cpp' | awk '{print $3}' | git hash-object --stdin) - +source: FROM ../build-images+from-registry + + # Install nargo COPY ../noir/+nargo/nargo /usr/bin/nargo - COPY ../barretenberg/cpp/+preset-release/bin/bb /usr/src/barretenberg/cpp/build/bin/bb - RUN --secret AWS_ACCESS_KEY_ID --secret AWS_SECRET_ACCESS_KEY mkdir -p ~/.aws && \ - bash -c 'echo -e "[default]\naws_access_key_id=$AWS_ACCESS_KEY_ID\naws_secret_access_key=$AWS_SECRET_ACCESS_KEY" > ~/.aws/credentials' - ENV BB_HASH=$bb_source_hash - ENV NARGO=nargo + # Install world state napi + COPY ../barretenberg/cpp/+preset-release-world-state/bin/world_state_napi.node /usr/src/barretenberg/cpp/build/bin/world_state_napi.node -source: - FROM +deps WORKDIR /usr/src/noir-projects - COPY package.json yarn.lock . - RUN yarn - - COPY mega_honk_circuits.json . + # Copy source. + COPY package.json yarn.lock mega_honk_circuits.json . COPY --dir aztec-nr noir-contracts noir-protocol-circuits mock-protocol-circuits scripts . build-contracts: ARG RAYON_NUM_THREADS - + LOCALLY + LET bb_source_hash = $(cd .. && git ls-tree -r HEAD | grep 'barretenberg/cpp' | awk '{print $3}' | git hash-object --stdin) FROM +source - ENV RAYON_NUM_THREADS=$RAYON_NUM_THREADS + RUN --secret AWS_ACCESS_KEY_ID --secret AWS_SECRET_ACCESS_KEY mkdir -p ~/.aws && \ + bash -c 'echo -e "[default]\naws_access_key_id=$AWS_ACCESS_KEY_ID\naws_secret_access_key=$AWS_SECRET_ACCESS_KEY" > ~/.aws/credentials' + + # Install bb + COPY ../barretenberg/cpp/+preset-release/bin/bb /usr/src/barretenberg/cpp/build/bin/bb # Install transpiler COPY ../avm-transpiler/+build/avm-transpiler /usr/bin/avm-transpiler - ENV TRANSPILER=avm-transpiler - - WORKDIR /usr/src/noir-projects/noir-contracts - DO ../build-system/s3-cache-scripts/+WITH_CACHE \ - --prefix="noir-projects-noir-contracts" \ - --rebuild_patterns="../../noir/.rebuild_patterns_native ../../avm-transpiler/.rebuild_patterns ../../barretenberg/cpp/.rebuild_patterns .rebuild_patterns" \ - --command="./bootstrap.sh" \ - --build_artifacts="target" - WORKDIR /usr/src/noir-projects + RUN yarn + ENV RAYON_NUM_THREADS=$RAYON_NUM_THREADS + RUN cd noir-contracts && NARGO=nargo TRANSPILER=avm-transpiler ./bootstrap.sh SAVE ARTIFACT noir-contracts build-protocol-circuits: ARG RAYON_NUM_THREADS LOCALLY + LET bb_source_hash = $(cd .. && git ls-tree -r HEAD | grep 'barretenberg/cpp' | awk '{print $3}' | git hash-object --stdin) FROM +source + RUN --secret AWS_ACCESS_KEY_ID --secret AWS_SECRET_ACCESS_KEY mkdir -p ~/.aws && \ + bash -c 'echo -e "[default]\naws_access_key_id=$AWS_ACCESS_KEY_ID\naws_secret_access_key=$AWS_SECRET_ACCESS_KEY" > ~/.aws/credentials' - ENV RAYON_NUM_THREADS=$RAYON_NUM_THREADS - ENV PARALLEL_VK=false - - WORKDIR /usr/src/noir-projects/noir-protocol-circuits - - DO ../build-system/s3-cache-scripts/+WITH_CACHE \ - --prefix="noir-projects-noir-protocol-circuits" \ - --rebuild_patterns="../../noir/.rebuild_patterns_native ../../barretenberg/cpp/.rebuild_patterns .rebuild_patterns" \ - --command="./bootstrap.sh" \ - --build_artifacts="target Nargo.toml private_kernel_reset_dimensions.json crates/autogenerated" + # Install bb + COPY ../barretenberg/cpp/+preset-release/bin/bb /usr/src/barretenberg/cpp/build/bin/bb + RUN yarn - WORKDIR /usr/src/noir-projects + ENV RAYON_NUM_THREADS=$RAYON_NUM_THREADS + RUN cd noir-protocol-circuits && BB_HASH=$bb_source_hash NARGO=nargo PARALLEL_VK=false ./bootstrap.sh SAVE ARTIFACT noir-protocol-circuits build-mock-protocol-circuits: ARG RAYON_NUM_THREADS LOCALLY + LET bb_source_hash = $(cd .. && git ls-tree -r HEAD | grep 'barretenberg/cpp' | awk '{print $3}' | git hash-object --stdin) FROM +source + RUN --secret AWS_ACCESS_KEY_ID --secret AWS_SECRET_ACCESS_KEY mkdir -p ~/.aws && \ + bash -c 'echo -e "[default]\naws_access_key_id=$AWS_ACCESS_KEY_ID\naws_secret_access_key=$AWS_SECRET_ACCESS_KEY" > ~/.aws/credentials' - ENV RAYON_NUM_THREADS=$RAYON_NUM_THREADS - ENV PARALLEL_VK=false - - WORKDIR /usr/src/noir-projects/mock-protocol-circuits - DO ../build-system/s3-cache-scripts/+WITH_CACHE \ - --prefix="noir-projects-mock-protocol-circuits" \ - --rebuild_patterns="../../noir/.rebuild_patterns_native ../../barretenberg/cpp/.rebuild_patterns .rebuild_patterns" \ - --command="./bootstrap.sh" \ - --build_artifacts="target" + # Install bb + COPY ../barretenberg/cpp/+preset-release/bin/bb /usr/src/barretenberg/cpp/build/bin/bb + RUN yarn - WORKDIR /usr/src/noir-projects + ENV RAYON_NUM_THREADS=$RAYON_NUM_THREADS + RUN echo "building with num threads $RAYON_NUM_THREADS" + RUN cd mock-protocol-circuits && BB_HASH=$bb_source_hash NARGO=nargo PARALLEL_VK=false ./bootstrap.sh SAVE ARTIFACT mock-protocol-circuits build: @@ -122,7 +109,7 @@ format: FROM +source WORKDIR /usr/src/noir-projects/noir-protocol-circuits - RUN node ./scripts/generate_variants.js + RUN yarn && node ./scripts/generate_variants.js RUN nargo fmt --check WORKDIR /usr/src/noir-projects/mock-protocol-circuits diff --git a/noir-projects/bootstrap_cache.sh b/noir-projects/bootstrap_cache.sh index df73bf279a4..97cf5134182 100755 --- a/noir-projects/bootstrap_cache.sh +++ b/noir-projects/bootstrap_cache.sh @@ -2,19 +2,16 @@ set -eu cd "$(dirname "$0")" - -SCRIPTS_PATH=../build-system/s3-cache-scripts/ +source ../build-system/scripts/setup_env '' '' mainframe_$USER > /dev/null echo -e "\033[1mRetrieving noir projects from remote cache...\033[0m" +extract_repo_if_working_copy_clean noir-projects \ + /usr/src/noir-projects/noir-contracts/target ./noir-contracts \ + /usr/src/noir-projects/noir-protocol-circuits/target ./noir-protocol-circuits -PROTOCOL_CIRCUITS_HASH=$(AZTEC_CACHE_REBUILD_PATTERNS="../noir/.rebuild_patterns_native ../barretenberg/cpp/.rebuild_patterns ./noir-protocol-circuits/.rebuild_patterns" $SCRIPTS_PATH/compute-content-hash.sh) -MOCK_CIRCUITS_HASH=$(AZTEC_CACHE_REBUILD_PATTERNS="../noir/.rebuild_patterns_native ../barretenberg/cpp/.rebuild_patterns ./mock-protocol-circuits/.rebuild_patterns" $SCRIPTS_PATH/compute-content-hash.sh) -CONTRACTS_HASH=$(AZTEC_CACHE_REBUILD_PATTERNS="../noir/.rebuild_patterns_native ../avm-transpiler/.rebuild_patterns ../barretenberg/cpp/.rebuild_patterns noir-contracts/.rebuild_patterns" $SCRIPTS_PATH/compute-content-hash.sh) - -echo " -noir-protocol-circuits $PROTOCOL_CIRCUITS_HASH -mock-protocol-circuits $MOCK_CIRCUITS_HASH -noir-contracts $CONTRACTS_HASH -" | xargs --max-procs 0 --max-args 2 bash -c "$SCRIPTS_PATH/cache-download.sh noir-projects-\$0-\$1.tar.gz \$0" +remove_old_images noir-projects yarn + +./mock-protocol-circuits/bootstrap.sh +(cd ./noir-protocol-circuits && yarn && node ./scripts/generate_variants.js) \ No newline at end of file diff --git a/noir-projects/mock-protocol-circuits/.rebuild_patterns b/noir-projects/mock-protocol-circuits/.rebuild_patterns deleted file mode 100644 index 332509323a3..00000000000 --- a/noir-projects/mock-protocol-circuits/.rebuild_patterns +++ /dev/null @@ -1,4 +0,0 @@ -^noir-projects/mock-protocol-circuits/.*\.(nr|toml)$ -^noir-projects/mock-protocol-circuits/bootstrap.sh$ -^noir-projects/noir-protocol-circuits/crates/types/.*\.(nr|toml)$ -^noir-projects/scripts/generate_vk_json.js$ diff --git a/noir-projects/noir-contracts/.rebuild_patterns b/noir-projects/noir-contracts/.rebuild_patterns deleted file mode 100644 index 7feb1d9bfbf..00000000000 --- a/noir-projects/noir-contracts/.rebuild_patterns +++ /dev/null @@ -1,4 +0,0 @@ -^noir-projects/noir-contracts/.*\.(nr|toml|sh|json|js)$ -^noir-projects/aztec-nr/.*\.(nr|toml)$ -^noir-projects/noir-protocol-circuits/crates/types/.*\.(nr|toml)$ -^noir-projects/scripts/generate_vk_json.js$ diff --git a/noir-projects/noir-protocol-circuits/.rebuild_patterns b/noir-projects/noir-protocol-circuits/.rebuild_patterns deleted file mode 100644 index 110634d45d7..00000000000 --- a/noir-projects/noir-protocol-circuits/.rebuild_patterns +++ /dev/null @@ -1,4 +0,0 @@ -^noir-projects/noir-protocol-circuits/.*\.(nr|toml|json)$ -^noir-projects/noir-protocol-circuits/bootstrap.sh$ -^noir-projects/noir-protocol-circuits/scripts/generate_variants.js$ -^noir-projects/scripts/generate_vk_json.js$ diff --git a/noir-projects/noir-protocol-circuits/bootstrap.sh b/noir-projects/noir-protocol-circuits/bootstrap.sh index c90b7e00328..f1e2f981940 100755 --- a/noir-projects/noir-protocol-circuits/bootstrap.sh +++ b/noir-projects/noir-protocol-circuits/bootstrap.sh @@ -58,4 +58,6 @@ else for pathname in "./target"/*.json; do BB_HASH=$BB_HASH node ../scripts/generate_vk_json.js "$pathname" "./target/keys" done + + fi diff --git a/noir-projects/scripts/generate_vk_json.js b/noir-projects/scripts/generate_vk_json.js index ca3bc6b3008..6f4c6a3da88 100644 --- a/noir-projects/scripts/generate_vk_json.js +++ b/noir-projects/scripts/generate_vk_json.js @@ -56,9 +56,6 @@ async function removeFunctionArtifact(artifactPath) { async function getBytecodeHash(artifactPath) { const { bytecode } = JSON.parse(await fs.readFile(artifactPath)); - if (!bytecode) { - throw new Error("No bytecode found in artifact: " + artifactPath); - } return crypto.createHash("md5").update(bytecode).digest("hex"); } diff --git a/noir/bootstrap_cache.sh b/noir/bootstrap_cache.sh index 8f7eee62d18..d4f2d006401 100755 --- a/noir/bootstrap_cache.sh +++ b/noir/bootstrap_cache.sh @@ -2,11 +2,12 @@ set -eu cd "$(dirname "$0")" +source ../build-system/scripts/setup_env '' '' mainframe_$USER > /dev/null echo -e "\033[1mRetrieving noir packages from remote cache...\033[0m" -NATIVE_HASH=$(AZTEC_CACHE_REBUILD_PATTERNS=.rebuild_patterns_native ../build-system/s3-cache-scripts/compute-content-hash.sh) -../build-system/s3-cache-scripts/cache-download.sh noir-nargo-$NATIVE_HASH.tar.gz - +extract_repo_if_working_copy_clean noir-packages /usr/src/noir/packages ./ echo -e "\033[1mRetrieving nargo from remote cache...\033[0m" -PACKAGES_HASH=$(AZTEC_CACHE_REBUILD_PATTERNS="../barretenberg/cpp/.rebuild_patterns .rebuild_patterns_packages" ../build-system/s3-cache-scripts/compute-content-hash.sh) -../build-system/s3-cache-scripts/cache-download.sh noir-packages-$PACKAGES_HASH.tar.gz +extract_repo_if_working_copy_clean noir /usr/src/noir/noir-repo/target/release ./noir-repo/target/ + +remove_old_images noir-packages +remove_old_images noir diff --git a/scripts/earthly-local b/scripts/earthly-local index bb9991fb8e4..50bf07187b6 100755 --- a/scripts/earthly-local +++ b/scripts/earthly-local @@ -10,7 +10,8 @@ function start_minio() { # Already started return fi - docker run -d -p 12000:9000 -p 12001:12001 -v minio-data:/data \ + mkdir -p ~/.minio/data + docker run -d -p 12000:9000 -p 12001:12001 -v ~/.minio/data:/data \ quay.io/minio/minio server /data --console-address ":12001" # make our cache bucket AWS_ACCESS_KEY_ID="minioadmin" AWS_SECRET_ACCESS_KEY="minioadmin" aws --endpoint-url http://localhost:12000 s3 mb s3://aztec-ci-artifacts 2>/dev/null || true @@ -29,11 +30,6 @@ if ! git diff-index --quiet HEAD --; then S3_BUILD_CACHE_MINIO_URL="" elif [ ! -z "${AWS_ACCESS_KEY_ID:-}" ] ; then S3_BUILD_CACHE_DOWNLOAD=true -elif [ -f ~/.aws/credentials ]; then - # make credentials avaialble to earthly - AWS_ACCESS_KEY_ID=$(aws configure get default.aws_access_key_id) - AWS_SECRET_ACCESS_KEY=$(aws configure get default.aws_secret_access_key) - S3_BUILD_CACHE_DOWNLOAD=true else S3_BUILD_CACHE_UPLOAD=false S3_BUILD_CACHE_DOWNLOAD=false