From 217de090b15feb38d5cccfb21867cfd94edf0061 Mon Sep 17 00:00:00 2001 From: ludamad Date: Mon, 8 Jan 2024 13:28:19 -0500 Subject: [PATCH] chore: noir sync (#3884) Needed a bit of a graft here: - Looked at current commit on noir/.gitrepo, it pointed to 'just nargo compile' commit, fixed that to actual commit in aztec-packages branch of noir - Fixed parent commit - did git subrepo noir pull and fixed merge conflicts per instructions - this PR resulted --- noir/.github/workflows/publish-acvm.yml | 4 +- noir/.github/workflows/publish-docs.yml | 13 +- noir/.github/workflows/release.yml | 15 - noir/.github/workflows/spellcheck.yml | 32 +- noir/.gitrepo | 4 +- noir/CONTRIBUTING.md | 4 + noir/Cargo.lock | 866 +++++++++++++----- noir/Cargo.toml | 12 +- noir/README.md | 4 +- noir/acvm-repo/CHANGELOG.md | 8 +- noir/acvm-repo/acir/CHANGELOG.md | 8 +- noir/acvm-repo/acir/acir_docs.md | 40 +- noir/acvm-repo/acir/codegen/acir.cpp | 24 +- noir/acvm-repo/acir/src/circuit/mod.rs | 2 +- noir/acvm-repo/acir/src/circuit/opcodes.rs | 4 +- .../acir/src/native_types/expression/mod.rs | 14 +- .../acir/tests/test_program_serialization.rs | 2 +- noir/acvm-repo/acir_field/src/generic_ark.rs | 5 +- noir/acvm-repo/acvm/CHANGELOG.md | 8 +- noir/acvm-repo/acvm/Cargo.toml | 2 +- .../acvm/src/compiler/optimizers/mod.rs | 10 +- .../compiler/optimizers/redundant_range.rs | 91 +- .../acvm/src/compiler/transformers/csat.rs | 14 +- .../acvm/src/compiler/transformers/mod.rs | 11 +- noir/acvm-repo/acvm/src/pwg/arithmetic.rs | 30 +- noir/acvm-repo/acvm/src/pwg/memory_op.rs | 4 +- noir/acvm-repo/acvm/src/pwg/mod.rs | 10 +- noir/acvm-repo/acvm/tests/solver.rs | 22 +- noir/acvm-repo/acvm_js/Cargo.toml | 14 +- noir/acvm-repo/acvm_js/package.json | 15 +- noir/acvm-repo/acvm_js/src/execute.rs | 9 +- noir/acvm-repo/acvm_js/src/lib.rs | 2 +- noir/acvm-repo/acvm_js/src/logging.rs | 31 +- .../test/browser/execute_circuit.test.ts | 4 +- .../acvm_js/test/node/execute_circuit.test.ts | 4 +- .../CHANGELOG.md | 0 .../Cargo.toml | 5 +- .../build.rs | 0 .../src/acvm_backend.wasm | Bin .../src/fixed_base_scalar_mul.rs | 0 .../src/lib.rs | 21 +- .../src/wasm/barretenberg_structures.rs | 0 .../src/wasm/mod.rs | 0 .../src/wasm/pedersen.rs | 0 .../src/wasm/schnorr.rs | 0 noir/aztec_macros/src/lib.rs | 6 +- noir/compiler/fm/src/file_map.rs | 2 +- noir/compiler/fm/src/lib.rs | 2 +- noir/compiler/noirc_driver/Cargo.toml | 2 +- noir/compiler/noirc_driver/src/lib.rs | 36 +- noir/compiler/noirc_errors/Cargo.toml | 2 +- noir/compiler/noirc_errors/src/debug_info.rs | 3 +- noir/compiler/noirc_evaluator/Cargo.toml | 2 +- .../brillig/brillig_gen/brillig_black_box.rs | 41 +- noir/compiler/noirc_evaluator/src/errors.rs | 4 +- noir/compiler/noirc_evaluator/src/ssa.rs | 17 +- .../src/ssa/acir_gen/acir_ir/acir_variable.rs | 171 +++- .../ssa/acir_gen/acir_ir/generated_acir.rs | 4 +- .../noirc_evaluator/src/ssa/acir_gen/mod.rs | 55 +- .../src/ssa/function_builder/mod.rs | 25 +- .../noirc_evaluator/src/ssa/opt/array_use.rs | 1 + .../src/ssa/opt/assert_constant.rs | 1 + .../src/ssa/opt/constant_folding.rs | 1 + .../src/ssa/opt/defunctionalize.rs | 1 + .../noirc_evaluator/src/ssa/opt/die.rs | 1 + .../src/ssa/opt/fill_internal_slices.rs | 1 + .../src/ssa/opt/flatten_cfg.rs | 106 +-- .../noirc_evaluator/src/ssa/opt/inlining.rs | 1 + .../noirc_evaluator/src/ssa/opt/mem2reg.rs | 1 + .../src/ssa/opt/simplify_cfg.rs | 1 + .../noirc_evaluator/src/ssa/opt/unrolling.rs | 1 + .../src/ssa/ssa_gen/context.rs | 56 +- .../noirc_evaluator/src/ssa/ssa_gen/mod.rs | 24 +- noir/compiler/noirc_frontend/Cargo.toml | 2 +- .../src/hir/def_collector/dc_crate.rs | 60 +- noir/compiler/noirc_frontend/src/hir/mod.rs | 35 +- .../src/hir/resolution/import.rs | 37 +- .../src/hir/resolution/resolver.rs | 8 +- .../src/hir/resolution/traits.rs | 7 + .../noirc_frontend/src/hir/type_check/expr.rs | 195 ++-- .../noirc_frontend/src/hir/type_check/mod.rs | 8 +- .../noirc_frontend/src/hir_def/expr.rs | 6 + .../src/monomorphization/ast.rs | 2 +- .../src/monomorphization/mod.rs | 120 ++- .../noirc_frontend/src/node_interner.rs | 138 ++- noir/compiler/noirc_frontend/src/tests.rs | 1 + noir/compiler/noirc_printable_type/Cargo.toml | 1 + noir/compiler/noirc_printable_type/src/lib.rs | 3 + .../source-resolver/lib-node/index.js | 32 - .../source-resolver/lib-node/index.js.map | 1 - .../source-resolver/lib-node/index_node.js | 20 - .../lib-node/index_node.js.map | 1 - noir/compiler/source-resolver/lib/index.js | 27 - .../compiler/source-resolver/lib/index.js.map | 1 - noir/compiler/wasm/Cargo.toml | 5 +- noir/compiler/wasm/package.json | 11 +- noir/compiler/wasm/src/circuit.rs | 18 - noir/compiler/wasm/src/compile.rs | 9 +- noir/compiler/wasm/src/compile_new.rs | 4 +- noir/compiler/wasm/src/lib.rs | 23 +- noir/cspell.json | 31 +- noir/deny.toml | 1 + .../docs/explainers/explainer-recursion.md | 10 +- .../docs/getting_started/create_a_project.md | 4 +- .../getting_started/installation/index.md | 4 +- .../installation/other_install_methods.md | 6 +- .../docs/getting_started/project_breakdown.md | 2 +- .../docs/getting_started/tooling/testing.md | 4 +- noir/docs/docs/how_to/how-to-recursion.md | 16 +- noir/docs/docs/index.md | 16 +- noir/docs/docs/migration_notes.md | 6 +- .../noir/concepts}/_category_.json | 4 +- .../docs/noir/{syntax => concepts}/assert.md | 0 .../noir/{syntax => concepts}/comments.md | 0 .../noir/{syntax => concepts}/control_flow.md | 0 .../noir/{syntax => concepts}/data_bus.md | 0 .../data_types/_category_.json | 0 .../{syntax => concepts}/data_types/arrays.md | 0 .../data_types/booleans.md | 0 .../{syntax => concepts}/data_types/fields.md | 0 .../concepts}/data_types/function_types.md | 2 +- .../noir/concepts}/data_types/index.md | 4 +- .../data_types/integers.md | 0 .../data_types/references.md | 0 .../data_types/slices.mdx | 0 .../data_types/strings.md | 0 .../data_types/structs.md | 0 .../{syntax => concepts}/data_types/tuples.md | 0 .../data_types/vectors.mdx | 0 .../noir/{syntax => concepts}/distinct.md | 0 .../noir/{syntax => concepts}/functions.md | 0 .../noir/{syntax => concepts}/generics.md | 0 .../docs/noir/{syntax => concepts}/lambdas.md | 0 .../noir/concepts}/mutability.md | 4 +- .../noir/syntax => docs/noir/concepts}/ops.md | 2 +- .../noir/{syntax => concepts}/shadowing.md | 0 .../noir => noir/concepts}/traits.md | 0 .../noir/concepts}/unconstrained.md | 2 +- .../modules_packages_crates/dependencies.md | 8 +- .../noir/modules_packages_crates/modules.md | 2 +- .../noir/standard_library/black_box_fns.md | 2 +- .../docs/noir/standard_library/logging.md | 2 +- .../docs/docs/noir/standard_library/traits.md | 284 ++++++ noir/docs/docusaurus.config.ts | 4 + noir/docs/src/css/custom.css | 50 +- .../explanations/standard_library/traits.md | 140 --- .../getting_started/00_nargo_installation.md | 4 +- .../getting_started/02_breakdown.md | 2 +- .../versioned_docs/version-v0.17.0/index.md | 6 +- .../language_concepts/03_ops.md | 2 +- .../language_concepts/05_unconstrained.md | 2 +- .../language_concepts/07_mutability.md | 4 +- .../language_concepts/data_types.md | 2 +- .../version-v0.17.0/migration_notes.md | 4 +- .../modules_packages_crates/dependencies.md | 8 +- .../modules_packages_crates/modules.md | 2 +- .../version-v0.17.0/nargo/02_testing.md | 4 +- .../noir_js/reference/01_noirjs.md | 2 +- .../noir_js/reference/02_bb_backend.md | 2 +- .../standard_library/black_box_fns.md | 2 +- .../standard_library/logging.md | 2 +- .../getting_started/00_nargo_installation.md | 6 +- .../getting_started/02_breakdown.md | 2 +- .../versioned_docs/version-v0.19.0/index.md | 6 +- .../language_concepts/03_ops.md | 2 +- .../language_concepts/05_unconstrained.md | 2 +- .../language_concepts/07_mutability.md | 4 +- .../language_concepts/data_types.md | 2 +- .../version-v0.19.0/migration_notes.md | 4 +- .../modules_packages_crates/dependencies.md | 8 +- .../modules_packages_crates/modules.md | 2 +- .../version-v0.19.0/nargo/02_testing.md | 4 +- .../noir_js/reference/01_noirjs.md | 2 +- .../noir_js/reference/02_bb_backend.md | 2 +- .../noir_js/reference/noir_js/classes/Noir.md | 2 +- .../standard_library/black_box_fns.md | 2 +- .../standard_library/logging.md | 2 +- .../getting_started/00_nargo_installation.md | 6 +- .../getting_started/02_breakdown.md | 2 +- .../versioned_docs/version-v0.19.1/index.md | 6 +- .../language_concepts/03_ops.md | 2 +- .../language_concepts/05_unconstrained.md | 2 +- .../language_concepts/07_mutability.md | 4 +- .../language_concepts/data_types.md | 2 +- .../version-v0.19.1/migration_notes.md | 4 +- .../modules_packages_crates/dependencies.md | 8 +- .../modules_packages_crates/modules.md | 2 +- .../version-v0.19.1/nargo/02_testing.md | 4 +- .../noir_js/reference/noir_js/classes/Noir.md | 2 +- .../standard_library/black_box_fns.md | 2 +- .../standard_library/logging.md | 2 +- .../getting_started/00_nargo_installation.md | 6 +- .../getting_started/02_breakdown.md | 2 +- .../versioned_docs/version-v0.19.2/index.md | 6 +- .../language_concepts/03_ops.md | 2 +- .../language_concepts/05_unconstrained.md | 2 +- .../language_concepts/07_mutability.md | 4 +- .../language_concepts/data_types.md | 2 +- .../version-v0.19.2/migration_notes.md | 4 +- .../modules_packages_crates/dependencies.md | 8 +- .../modules_packages_crates/modules.md | 2 +- .../version-v0.19.2/nargo/02_testing.md | 4 +- .../noir_js/reference/noir_js/classes/Noir.md | 2 +- .../standard_library/black_box_fns.md | 2 +- .../standard_library/logging.md | 2 +- .../getting_started/00_nargo_installation.md | 6 +- .../getting_started/02_breakdown.md | 2 +- .../versioned_docs/version-v0.19.3/index.md | 6 +- .../language_concepts/03_ops.md | 2 +- .../language_concepts/05_unconstrained.md | 2 +- .../language_concepts/07_mutability.md | 4 +- .../language_concepts/data_types.md | 2 +- .../version-v0.19.3/migration_notes.md | 4 +- .../modules_packages_crates/dependencies.md | 8 +- .../modules_packages_crates/modules.md | 2 +- .../version-v0.19.3/nargo/02_testing.md | 4 +- .../noir_js/reference/noir_js/classes/Noir.md | 2 +- .../standard_library/black_box_fns.md | 2 +- .../standard_library/logging.md | 2 +- .../getting_started/00_nargo_installation.md | 6 +- .../getting_started/02_breakdown.md | 2 +- .../versioned_docs/version-v0.19.4/index.md | 6 +- .../language_concepts/03_ops.md | 2 +- .../language_concepts/05_unconstrained.md | 2 +- .../language_concepts/07_mutability.md | 4 +- .../language_concepts/data_types.md | 2 +- .../version-v0.19.4/migration_notes.md | 4 +- .../modules_packages_crates/dependencies.md | 8 +- .../modules_packages_crates/modules.md | 2 +- .../version-v0.19.4/nargo/02_testing.md | 4 +- .../noir_js/reference/noir_js/classes/Noir.md | 2 +- .../standard_library/black_box_fns.md | 2 +- .../standard_library/logging.md | 2 +- .../explainers/explainer-recursion.md | 2 +- .../explanations/noir/traits.md | 0 .../explanations/standard_library/traits.md | 0 .../getting_started/_category_.json | 0 .../getting_started/create_a_project.md | 0 .../installation/_category_.json | 0 .../getting_started/installation/index.md | 0 .../installation/other_install_methods.md | 6 +- .../getting_started/project_breakdown.md | 2 +- .../getting_started/tooling/_category_.json | 0 .../getting_started/tooling/index.md | 0 .../tooling/language_server.md | 0 .../getting_started/tooling/testing.md | 4 +- .../how_to/_category_.json | 0 .../how_to/how-to-recursion.md | 8 +- .../how_to/merkle-proof.mdx | 0 .../how_to/solidity_verifier.md | 0 .../{version-v.. => version-v0.22.0}/index.md | 5 +- .../migration_notes.md | 4 +- .../modules_packages_crates/_category_.json | 0 .../crates_and_packages.md | 0 .../modules_packages_crates/dependencies.md | 8 +- .../noir/modules_packages_crates/modules.md | 2 +- .../modules_packages_crates/workspaces.md | 0 .../noir/standard_library/_category_.json | 0 .../noir/standard_library/black_box_fns.md | 2 +- .../cryptographic_primitives/_category_.json | 0 .../cryptographic_primitives/ec_primitives.md | 0 .../ecdsa_sig_verification.mdx | 0 .../cryptographic_primitives/eddsa.mdx | 0 .../cryptographic_primitives/hashes.mdx | 0 .../cryptographic_primitives/index.md | 0 .../cryptographic_primitives/scalar.mdx | 0 .../cryptographic_primitives/schnorr.mdx | 0 .../noir/standard_library/logging.md | 2 +- .../noir/standard_library/merkle_trees.md | 0 .../noir/standard_library/options.md | 0 .../noir/standard_library/recursion.md | 0 .../noir/standard_library/zeroed.md | 0 .../noir/syntax/_category_.json | 0 .../noir/syntax/assert.md | 0 .../noir/syntax/comments.md | 0 .../noir/syntax/control_flow.md | 0 .../noir/syntax/data_bus.md | 0 .../noir/syntax/data_types/_category_.json | 0 .../noir/syntax/data_types/arrays.md | 0 .../noir/syntax/data_types/booleans.md | 0 .../noir/syntax/data_types/fields.md | 0 .../noir/syntax/data_types/function_types.md | 0 .../noir/syntax/data_types/index.md | 2 +- .../noir/syntax/data_types/integers.md | 0 .../noir/syntax/data_types/references.md | 0 .../noir/syntax/data_types/slices.mdx | 0 .../noir/syntax/data_types/strings.md | 0 .../noir/syntax/data_types/structs.md | 0 .../noir/syntax/data_types/tuples.md | 0 .../noir/syntax/data_types/vectors.mdx | 0 .../noir/syntax/distinct.md | 0 .../noir/syntax/functions.md | 0 .../noir/syntax/generics.md | 0 .../noir/syntax/lambdas.md | 0 .../noir/syntax/mutability.md | 4 +- .../version-v0.22.0}/noir/syntax/ops.md | 2 +- .../noir/syntax/shadowing.md | 0 .../noir/syntax/unconstrained.md | 2 +- .../NoirJS/backend_barretenberg/.nojekyll | 0 .../classes/BarretenbergBackend.md | 0 .../NoirJS/backend_barretenberg/index.md | 0 .../interfaces/Backend.md | 0 .../type-aliases/BackendOptions.md | 0 .../type-aliases/CompiledCircuit.md | 0 .../type-aliases/ProofData.md | 0 .../backend_barretenberg/typedoc-sidebar.cjs | 0 .../reference/NoirJS/noir_js/.nojekyll | 0 .../reference/NoirJS/noir_js/classes/Noir.md | 0 .../reference/NoirJS/noir_js/functions/and.md | 0 .../NoirJS/noir_js/functions/blake2s256.md | 0 .../functions/ecdsa_secp256k1_verify.md | 0 .../functions/ecdsa_secp256r1_verify.md | 0 .../NoirJS/noir_js/functions/keccak256.md | 0 .../NoirJS/noir_js/functions/sha256.md | 0 .../reference/NoirJS/noir_js/functions/xor.md | 0 .../reference/NoirJS/noir_js/index.md | 0 .../noir_js/type-aliases/CompiledCircuit.md | 0 .../type-aliases/ForeignCallHandler.md | 0 .../noir_js/type-aliases/ForeignCallInput.md | 0 .../noir_js/type-aliases/ForeignCallOutput.md | 0 .../NoirJS/noir_js/type-aliases/InputMap.md | 0 .../NoirJS/noir_js/type-aliases/ProofData.md | 0 .../NoirJS/noir_js/type-aliases/WitnessMap.md | 0 .../NoirJS/noir_js/typedoc-sidebar.cjs | 0 .../reference/_category_.json | 0 .../reference/nargo_commands.md | 0 .../tutorials/noirjs_app.md | 0 noir/noir_stdlib/src/cmp.nr | 310 +++++++ noir/noir_stdlib/src/ec/montcurve.nr | 25 +- noir/noir_stdlib/src/ec/swcurve.nr | 40 +- noir/noir_stdlib/src/ec/tecurve.nr | 38 +- noir/noir_stdlib/src/lib.nr | 1 + noir/noir_stdlib/src/ops.nr | 121 ++- noir/noir_stdlib/src/prelude.nr | 2 + .../double_verify_proof/target/acir.gz | Bin 1186 -> 0 bytes .../double_verify_proof/target/witness.gz | Bin 8144 -> 0 bytes .../compile_failure/cyclic_dep/Nargo.toml | 7 + .../compile_failure/cyclic_dep/Prover.toml | 0 .../cyclic_dep/dep1/Nargo.toml | 7 + .../cyclic_dep/dep1/src/lib.nr | 3 + .../cyclic_dep/dep2/Nargo.toml | 7 + .../cyclic_dep/dep2/src/lib.nr | 3 + .../compile_failure/cyclic_dep/src/main.nr | 7 + .../impl_with_where_clause/src/main.nr | 18 +- .../trait_default_implementation/src/main.nr | 11 +- .../trait_override_implementation/src/main.nr | 22 +- .../compile_success_empty/traits/src/main.nr | 10 +- .../bit_shifts_runtime/src/main.nr | 11 + .../Nargo.toml | 2 +- .../brillig_array_eq/Prover.toml | 2 + .../brillig_array_eq/src/main.nr | 4 + .../brillig_ecdsa_secp256k1/Nargo.toml | 6 + .../Prover.toml | 0 .../src/main.nr | 9 +- .../brillig_ecdsa_secp256r1/Nargo.toml | 6 + .../brillig_ecdsa_secp256r1/Prover.toml | 20 + .../brillig_ecdsa_secp256r1/src/main.nr | 16 + .../brillig_set_slice_of_slice/Nargo.toml | 0 .../brillig_set_slice_of_slice/src/main.nr | 0 .../brillig_to_bits/Nargo.toml | 0 .../brillig_to_bits/src/main.nr | 0 .../global_consts/src/main.nr | 16 +- .../operator_overloading/Nargo.toml | 7 + .../operator_overloading/Prover.toml | 2 + .../operator_overloading/src/main.nr | 154 ++++ .../regression_3889/Nargo.toml | 7 + .../regression_3889/Prover.toml | 10 + .../regression_3889/src/main.nr | 23 + .../signed_comparison/Nargo.toml | 6 + .../signed_comparison/Prover.toml | 3 + .../signed_comparison/src/main.nr | 13 + .../execution_success/submodules/src/main.nr | 6 +- .../unconstrained_empty/Nargo.toml | 0 .../unconstrained_empty/src/main.nr | 0 .../should_fail_mismatch/src/main.nr | 3 +- noir/tooling/backend_interface/Cargo.toml | 2 +- .../backend_interface/src/cli/write_vk.rs | 1 + noir/tooling/backend_interface/src/lib.rs | 5 +- .../backend_interface/src/proof_system.rs | 5 +- .../backend_interface/src/smart_contract.rs | 2 +- noir/tooling/debugger/Cargo.toml | 14 +- noir/tooling/debugger/README.md | 98 +- noir/tooling/debugger/build.rs | 74 ++ noir/tooling/debugger/src/context.rs | 230 ++++- noir/tooling/debugger/src/dap.rs | 568 ++++++++++++ noir/tooling/debugger/src/lib.rs | 16 + noir/tooling/debugger/src/repl.rs | 86 +- .../debugger/src/source_code_printer.rs | 317 +++++++ noir/tooling/debugger/tests/debug.rs | 55 ++ noir/tooling/lsp/Cargo.toml | 1 + noir/tooling/lsp/src/lib.rs | 84 +- noir/tooling/lsp/src/notifications/mod.rs | 143 ++- .../lsp/src/requests/code_lens_request.rs | 236 +++++ .../lsp/src/requests/goto_definition.rs | 107 +-- noir/tooling/lsp/src/requests/mod.rs | 44 +- noir/tooling/lsp/src/requests/profile_run.rs | 10 +- noir/tooling/lsp/src/requests/test_run.rs | 20 +- noir/tooling/lsp/src/requests/tests.rs | 9 +- noir/tooling/lsp/src/types.rs | 17 +- noir/tooling/nargo/Cargo.toml | 10 +- noir/tooling/nargo/src/artifacts/debug.rs | 89 ++ noir/tooling/nargo/src/lib.rs | 20 +- noir/tooling/nargo/src/ops/compile.rs | 42 +- noir/tooling/nargo/src/ops/execute.rs | 7 +- noir/tooling/nargo/src/ops/foreign_calls.rs | 153 +++- noir/tooling/nargo/src/ops/test.rs | 3 +- noir/tooling/nargo_cli/Cargo.toml | 7 +- noir/tooling/nargo_cli/src/cli/check_cmd.rs | 22 +- .../nargo_cli/src/cli/codegen_verifier_cmd.rs | 12 +- noir/tooling/nargo_cli/src/cli/compile_cmd.rs | 55 +- noir/tooling/nargo_cli/src/cli/dap_cmd.rs | 180 ++++ noir/tooling/nargo_cli/src/cli/debug_cmd.rs | 21 +- noir/tooling/nargo_cli/src/cli/execute_cmd.rs | 41 +- noir/tooling/nargo_cli/src/cli/fmt_cmd.rs | 19 +- noir/tooling/nargo_cli/src/cli/info_cmd.rs | 12 +- noir/tooling/nargo_cli/src/cli/lsp_cmd.rs | 8 +- noir/tooling/nargo_cli/src/cli/mod.rs | 5 + noir/tooling/nargo_cli/src/cli/prove_cmd.rs | 27 +- noir/tooling/nargo_cli/src/cli/test_cmd.rs | 38 +- noir/tooling/nargo_cli/src/cli/verify_cmd.rs | 17 +- noir/tooling/nargo_cli/src/errors.rs | 3 + noir/tooling/nargo_cli/src/main.rs | 15 +- noir/tooling/nargo_toml/src/errors.rs | 3 + noir/tooling/nargo_toml/src/lib.rs | 57 +- noir/tooling/noir_codegen/package.json | 11 +- noir/tooling/noir_codegen/src/index.ts | 30 +- noir/tooling/noir_codegen/src/noir_types.ts | 70 +- .../noir_codegen/test/assert_lt/src/main.nr | 13 +- .../test/assert_lt/target/assert_lt.json | 2 +- noir/tooling/noir_codegen/test/index.test.ts | 92 +- noir/tooling/noir_js/package.json | 11 +- .../noir_js_backend_barretenberg/package.json | 11 +- noir/tooling/noir_js_types/package.json | 11 +- noir/tooling/noirc_abi_wasm/package.json | 15 +- noir/tooling/noirc_abi_wasm/src/lib.rs | 2 +- .../test/browser/structs.test.ts | 26 + .../noirc_abi_wasm/test/node/structs.test.ts | 22 + .../noirc_abi_wasm/test/shared/structs.ts | 79 ++ .../aztec-nr/safe-math/src/safe_u120.nr | 2 +- .../src/__snapshots__/index.test.ts.snap | 4 +- .../should-compile-different-namespace.nr | 1 - .../src/abis/append_only_tree_snapshot.nr | 2 +- .../src/abis/constant_rollup_data.nr | 2 +- .../rollup-lib/src/abis/global_variables.nr | 2 +- .../src/abis/public_data_tree_leaf.nr | 2 +- .../src/crates/types/src/abis/call_context.nr | 2 +- .../src/crates/types/src/abis/call_request.nr | 2 +- .../types/src/abis/function_selector.nr | 2 +- .../types/src/abis/new_contract_data.nr | 2 +- .../crates/types/src/abis/public_data_read.nr | 2 +- .../src/abis/public_data_update_request.nr | 2 +- .../src/crates/types/src/abis/side_effect.nr | 2 +- .../src/crates/types/src/address.nr | 2 +- .../src/contrakt/storage_update_request.nr | 2 +- 454 files changed, 6079 insertions(+), 1985 deletions(-) rename noir/acvm-repo/{barretenberg_blackbox_solver => bn254_blackbox_solver}/CHANGELOG.md (100%) rename noir/acvm-repo/{barretenberg_blackbox_solver => bn254_blackbox_solver}/Cargo.toml (87%) rename noir/acvm-repo/{barretenberg_blackbox_solver => bn254_blackbox_solver}/build.rs (100%) rename noir/acvm-repo/{barretenberg_blackbox_solver => bn254_blackbox_solver}/src/acvm_backend.wasm (100%) rename noir/acvm-repo/{barretenberg_blackbox_solver => bn254_blackbox_solver}/src/fixed_base_scalar_mul.rs (100%) rename noir/acvm-repo/{barretenberg_blackbox_solver => bn254_blackbox_solver}/src/lib.rs (82%) rename noir/acvm-repo/{barretenberg_blackbox_solver => bn254_blackbox_solver}/src/wasm/barretenberg_structures.rs (100%) rename noir/acvm-repo/{barretenberg_blackbox_solver => bn254_blackbox_solver}/src/wasm/mod.rs (100%) rename noir/acvm-repo/{barretenberg_blackbox_solver => bn254_blackbox_solver}/src/wasm/pedersen.rs (100%) rename noir/acvm-repo/{barretenberg_blackbox_solver => bn254_blackbox_solver}/src/wasm/schnorr.rs (100%) delete mode 100644 noir/compiler/source-resolver/lib-node/index.js delete mode 100644 noir/compiler/source-resolver/lib-node/index.js.map delete mode 100644 noir/compiler/source-resolver/lib-node/index_node.js delete mode 100644 noir/compiler/source-resolver/lib-node/index_node.js.map delete mode 100644 noir/compiler/source-resolver/lib/index.js delete mode 100644 noir/compiler/source-resolver/lib/index.js.map delete mode 100644 noir/compiler/wasm/src/circuit.rs rename noir/docs/{versioned_docs/version-v../noir/syntax => docs/noir/concepts}/_category_.json (72%) rename noir/docs/docs/noir/{syntax => concepts}/assert.md (100%) rename noir/docs/docs/noir/{syntax => concepts}/comments.md (100%) rename noir/docs/docs/noir/{syntax => concepts}/control_flow.md (100%) rename noir/docs/docs/noir/{syntax => concepts}/data_bus.md (100%) rename noir/docs/docs/noir/{syntax => concepts}/data_types/_category_.json (100%) rename noir/docs/docs/noir/{syntax => concepts}/data_types/arrays.md (100%) rename noir/docs/docs/noir/{syntax => concepts}/data_types/booleans.md (100%) rename noir/docs/docs/noir/{syntax => concepts}/data_types/fields.md (100%) rename noir/docs/{versioned_docs/version-v../noir/syntax => docs/noir/concepts}/data_types/function_types.md (88%) rename noir/docs/{versioned_docs/version-v../noir/syntax => docs/noir/concepts}/data_types/index.md (95%) rename noir/docs/docs/noir/{syntax => concepts}/data_types/integers.md (100%) rename noir/docs/docs/noir/{syntax => concepts}/data_types/references.md (100%) rename noir/docs/docs/noir/{syntax => concepts}/data_types/slices.mdx (100%) rename noir/docs/docs/noir/{syntax => concepts}/data_types/strings.md (100%) rename noir/docs/docs/noir/{syntax => concepts}/data_types/structs.md (100%) rename noir/docs/docs/noir/{syntax => concepts}/data_types/tuples.md (100%) rename noir/docs/docs/noir/{syntax => concepts}/data_types/vectors.mdx (100%) rename noir/docs/docs/noir/{syntax => concepts}/distinct.md (100%) rename noir/docs/docs/noir/{syntax => concepts}/functions.md (100%) rename noir/docs/docs/noir/{syntax => concepts}/generics.md (100%) rename noir/docs/docs/noir/{syntax => concepts}/lambdas.md (100%) rename noir/docs/{versioned_docs/version-v../noir/syntax => docs/noir/concepts}/mutability.md (97%) rename noir/docs/{versioned_docs/version-v../noir/syntax => docs/noir/concepts}/ops.md (97%) rename noir/docs/docs/noir/{syntax => concepts}/shadowing.md (100%) rename noir/docs/docs/{explanations/noir => noir/concepts}/traits.md (100%) rename noir/docs/{versioned_docs/version-v../noir/syntax => docs/noir/concepts}/unconstrained.md (98%) create mode 100644 noir/docs/docs/noir/standard_library/traits.md delete mode 100644 noir/docs/versioned_docs/version-v../explanations/standard_library/traits.md rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/explainers/explainer-recursion.md (98%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/explanations/noir/traits.md (100%) rename noir/docs/{docs => versioned_docs/version-v0.22.0}/explanations/standard_library/traits.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/getting_started/_category_.json (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/getting_started/create_a_project.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/getting_started/installation/_category_.json (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/getting_started/installation/index.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/getting_started/installation/other_install_methods.md (94%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/getting_started/project_breakdown.md (99%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/getting_started/tooling/_category_.json (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/getting_started/tooling/index.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/getting_started/tooling/language_server.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/getting_started/tooling/testing.md (90%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/how_to/_category_.json (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/how_to/how-to-recursion.md (95%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/how_to/merkle-proof.mdx (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/how_to/solidity_verifier.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/index.md (95%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/migration_notes.md (92%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/modules_packages_crates/_category_.json (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/modules_packages_crates/crates_and_packages.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/modules_packages_crates/dependencies.md (97%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/modules_packages_crates/modules.md (96%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/modules_packages_crates/workspaces.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/standard_library/_category_.json (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/standard_library/black_box_fns.md (96%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/standard_library/cryptographic_primitives/_category_.json (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/standard_library/cryptographic_primitives/ec_primitives.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/standard_library/cryptographic_primitives/eddsa.mdx (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/standard_library/cryptographic_primitives/hashes.mdx (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/standard_library/cryptographic_primitives/index.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/standard_library/cryptographic_primitives/scalar.mdx (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/standard_library/cryptographic_primitives/schnorr.mdx (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/standard_library/logging.md (87%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/standard_library/merkle_trees.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/standard_library/options.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/standard_library/recursion.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/standard_library/zeroed.md (100%) rename noir/docs/{docs => versioned_docs/version-v0.22.0}/noir/syntax/_category_.json (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/syntax/assert.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/syntax/comments.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/syntax/control_flow.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/syntax/data_bus.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/syntax/data_types/_category_.json (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/syntax/data_types/arrays.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/syntax/data_types/booleans.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/syntax/data_types/fields.md (100%) rename noir/docs/{docs => versioned_docs/version-v0.22.0}/noir/syntax/data_types/function_types.md (100%) rename noir/docs/{docs => versioned_docs/version-v0.22.0}/noir/syntax/data_types/index.md (98%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/syntax/data_types/integers.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/syntax/data_types/references.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/syntax/data_types/slices.mdx (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/syntax/data_types/strings.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/syntax/data_types/structs.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/syntax/data_types/tuples.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/syntax/data_types/vectors.mdx (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/syntax/distinct.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/syntax/functions.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/syntax/generics.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/syntax/lambdas.md (100%) rename noir/docs/{docs => versioned_docs/version-v0.22.0}/noir/syntax/mutability.md (97%) rename noir/docs/{docs => versioned_docs/version-v0.22.0}/noir/syntax/ops.md (97%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/noir/syntax/shadowing.md (100%) rename noir/docs/{docs => versioned_docs/version-v0.22.0}/noir/syntax/unconstrained.md (98%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/reference/NoirJS/backend_barretenberg/.nojekyll (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/reference/NoirJS/backend_barretenberg/index.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/reference/NoirJS/backend_barretenberg/interfaces/Backend.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/reference/NoirJS/backend_barretenberg/type-aliases/CompiledCircuit.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/reference/NoirJS/backend_barretenberg/type-aliases/ProofData.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/reference/NoirJS/backend_barretenberg/typedoc-sidebar.cjs (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/reference/NoirJS/noir_js/.nojekyll (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/reference/NoirJS/noir_js/classes/Noir.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/reference/NoirJS/noir_js/functions/and.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/reference/NoirJS/noir_js/functions/blake2s256.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/reference/NoirJS/noir_js/functions/keccak256.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/reference/NoirJS/noir_js/functions/sha256.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/reference/NoirJS/noir_js/functions/xor.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/reference/NoirJS/noir_js/index.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/reference/NoirJS/noir_js/type-aliases/CompiledCircuit.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/reference/NoirJS/noir_js/type-aliases/ForeignCallInput.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/reference/NoirJS/noir_js/type-aliases/InputMap.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/reference/NoirJS/noir_js/type-aliases/ProofData.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/reference/NoirJS/noir_js/type-aliases/WitnessMap.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/reference/NoirJS/noir_js/typedoc-sidebar.cjs (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/reference/_category_.json (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/reference/nargo_commands.md (100%) rename noir/docs/versioned_docs/{version-v.. => version-v0.22.0}/tutorials/noirjs_app.md (100%) create mode 100644 noir/noir_stdlib/src/cmp.nr delete mode 100644 noir/test_programs/acir_artifacts/double_verify_proof/target/acir.gz delete mode 100644 noir/test_programs/acir_artifacts/double_verify_proof/target/witness.gz create mode 100644 noir/test_programs/compile_failure/cyclic_dep/Nargo.toml create mode 100644 noir/test_programs/compile_failure/cyclic_dep/Prover.toml create mode 100644 noir/test_programs/compile_failure/cyclic_dep/dep1/Nargo.toml create mode 100644 noir/test_programs/compile_failure/cyclic_dep/dep1/src/lib.nr create mode 100644 noir/test_programs/compile_failure/cyclic_dep/dep2/Nargo.toml create mode 100644 noir/test_programs/compile_failure/cyclic_dep/dep2/src/lib.nr create mode 100644 noir/test_programs/compile_failure/cyclic_dep/src/main.nr rename noir/test_programs/execution_success/{brillig_ecdsa => brillig_array_eq}/Nargo.toml (67%) create mode 100644 noir/test_programs/execution_success/brillig_array_eq/Prover.toml create mode 100644 noir/test_programs/execution_success/brillig_array_eq/src/main.nr create mode 100644 noir/test_programs/execution_success/brillig_ecdsa_secp256k1/Nargo.toml rename noir/test_programs/execution_success/{brillig_ecdsa => brillig_ecdsa_secp256k1}/Prover.toml (100%) rename noir/test_programs/execution_success/{brillig_ecdsa => brillig_ecdsa_secp256k1}/src/main.nr (52%) create mode 100644 noir/test_programs/execution_success/brillig_ecdsa_secp256r1/Nargo.toml create mode 100644 noir/test_programs/execution_success/brillig_ecdsa_secp256r1/Prover.toml create mode 100644 noir/test_programs/execution_success/brillig_ecdsa_secp256r1/src/main.nr rename noir/test_programs/{compile_success_empty => execution_success}/brillig_set_slice_of_slice/Nargo.toml (100%) rename noir/test_programs/{compile_success_empty => execution_success}/brillig_set_slice_of_slice/src/main.nr (100%) rename noir/test_programs/{compile_success_empty => execution_success}/brillig_to_bits/Nargo.toml (100%) rename noir/test_programs/{compile_success_empty => execution_success}/brillig_to_bits/src/main.nr (100%) create mode 100644 noir/test_programs/execution_success/operator_overloading/Nargo.toml create mode 100644 noir/test_programs/execution_success/operator_overloading/Prover.toml create mode 100644 noir/test_programs/execution_success/operator_overloading/src/main.nr create mode 100644 noir/test_programs/execution_success/regression_3889/Nargo.toml create mode 100644 noir/test_programs/execution_success/regression_3889/Prover.toml create mode 100644 noir/test_programs/execution_success/regression_3889/src/main.nr create mode 100644 noir/test_programs/execution_success/signed_comparison/Nargo.toml create mode 100644 noir/test_programs/execution_success/signed_comparison/Prover.toml create mode 100644 noir/test_programs/execution_success/signed_comparison/src/main.nr rename noir/test_programs/{compile_success_empty => execution_success}/unconstrained_empty/Nargo.toml (100%) rename noir/test_programs/{compile_success_empty => execution_success}/unconstrained_empty/src/main.nr (100%) create mode 100644 noir/tooling/debugger/build.rs create mode 100644 noir/tooling/debugger/src/dap.rs create mode 100644 noir/tooling/debugger/src/source_code_printer.rs create mode 100644 noir/tooling/debugger/tests/debug.rs create mode 100644 noir/tooling/lsp/src/requests/code_lens_request.rs create mode 100644 noir/tooling/nargo_cli/src/cli/dap_cmd.rs create mode 100644 noir/tooling/noirc_abi_wasm/test/browser/structs.test.ts create mode 100644 noir/tooling/noirc_abi_wasm/test/node/structs.test.ts create mode 100644 noir/tooling/noirc_abi_wasm/test/shared/structs.ts diff --git a/noir/.github/workflows/publish-acvm.yml b/noir/.github/workflows/publish-acvm.yml index 59a104e3f75..0251aaa0377 100644 --- a/noir/.github/workflows/publish-acvm.yml +++ b/noir/.github/workflows/publish-acvm.yml @@ -45,9 +45,9 @@ jobs: env: CARGO_REGISTRY_TOKEN: ${{ secrets.ACVM_CRATES_IO_TOKEN }} - - name: Publish barretenberg_blackbox_solver + - name: Publish bn254_blackbox_solver run: | - cargo publish --package barretenberg_blackbox_solver + cargo publish --package bn254_blackbox_solver env: CARGO_REGISTRY_TOKEN: ${{ secrets.ACVM_CRATES_IO_TOKEN }} diff --git a/noir/.github/workflows/publish-docs.yml b/noir/.github/workflows/publish-docs.yml index 4ef7dd89777..9ea2f1e1491 100644 --- a/noir/.github/workflows/publish-docs.yml +++ b/noir/.github/workflows/publish-docs.yml @@ -1,12 +1,10 @@ name: Publish documentation on: - workflow_dispatch: - inputs: - noir-ref: - description: The noir reference to checkout - required: false - default: 'master' + push: + branches: + - master + paths: [docs/**] jobs: publish-docs: @@ -16,9 +14,6 @@ jobs: steps: - name: Checkout release branch uses: actions/checkout@v4 - with: - ref: ${{ inputs.noir-ref }} - token: ${{ secrets.NOIR_RELEASES_TOKEN }} - name: Setup Node.js uses: actions/setup-node@v2 diff --git a/noir/.github/workflows/release.yml b/noir/.github/workflows/release.yml index 3b2393eaa8f..f9f6fe2fc54 100644 --- a/noir/.github/workflows/release.yml +++ b/noir/.github/workflows/release.yml @@ -120,21 +120,6 @@ jobs: token: ${{ secrets.NOIR_REPO_TOKEN }} inputs: '{ "noir-ref": "${{ needs.release-please.outputs.tag-name }}", "npm-tag": "latest" }' - publish-docs: - name: Publish docs - needs: [release-please] - if: ${{ needs.release-please.outputs.tag-name }} - runs-on: ubuntu-latest - - steps: - - name: Dispatch to publish-docs - uses: benc-uk/workflow-dispatch@v1 - with: - workflow: publish-docs.yml - ref: master - token: ${{ secrets.NOIR_REPO_TOKEN }} - inputs: '{ "noir-ref": "${{ needs.release-please.outputs.tag-name }}" }' - publish-acvm: name: Publish acvm needs: [release-please] diff --git a/noir/.github/workflows/spellcheck.yml b/noir/.github/workflows/spellcheck.yml index 89fcdd12d12..83d67325775 100644 --- a/noir/.github/workflows/spellcheck.yml +++ b/noir/.github/workflows/spellcheck.yml @@ -8,6 +8,32 @@ concurrency: cancel-in-progress: true jobs: - spellcheck: - name: Spellcheck - uses: noir-lang/.github/.github/workflows/spellcheck.yml@main + code: + name: Code + runs-on: ubuntu-latest + steps: + - name: Checkout sources + uses: actions/checkout@v4 + + - name: Check spelling + uses: streetsidesoftware/cspell-action@v2 + with: + strict: false # Do not fail, if a spelling mistake is found (This can be annoying for contributors) + incremental_files_only: true # Run this action on files which have changed in PR + files: | + **/*.{md,rs} + + docs: + name: Documentation + runs-on: ubuntu-latest + steps: + - name: Checkout sources + uses: actions/checkout@v4 + + - name: Check spelling + uses: streetsidesoftware/cspell-action@v2 + with: + strict: true # Documentation has higher standards for correctness. + incremental_files_only: true # Run this action on files which have changed in PR + files: | + ./docs/**/*.md diff --git a/noir/.gitrepo b/noir/.gitrepo index 6e7719f291a..6d4be4e1638 100644 --- a/noir/.gitrepo +++ b/noir/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/noir-lang/noir branch = aztec-packages - commit = a0e2ac5319abca5259c5823f7b037b6eb693e5b6 - parent = 3d08ef9d828b81ddb1fc6e9aebae6896fe4ee946 + commit = 50cb28c7ae8dff415cdf86c67445e6dcd72c41a5 + parent = d2ae2cd529b0ef132c0b6c7c35938066c89d809c method = merge cmdver = 0.4.6 diff --git a/noir/CONTRIBUTING.md b/noir/CONTRIBUTING.md index 9cbbeeb677f..d2553b003f8 100644 --- a/noir/CONTRIBUTING.md +++ b/noir/CONTRIBUTING.md @@ -92,6 +92,10 @@ This strategy avoids scenarios where pull requests grow too large/out-of-scope a The easiest way to do this is to have multiple Conventional Commits while you work and then you can cherry-pick the smaller changes into separate branches for pull requesting. +### Typos and other small changes + +Significant changes, like new features or important bug fixes, typically have a more pronounced impact on the project’s overall development. For smaller fixes, such as typos, we encourage you to report them instead of opening PRs. This approach helps us manage our resources effectively and ensures that every change contributes meaningfully to the project. PRs involving such smaller fixes will likely be closed and incorporated in PRs authored by the core team. + ### Reviews For any repository in the noir-lang organization, we require code review & approval by __one__ Noir team member before the changes are merged, as enforced by GitHub branch protection. Non-breaking pull requests may be merged at any time. Breaking pull requests should only be merged when the team has general agreement of the changes and is preparing a breaking release. diff --git a/noir/Cargo.lock b/noir/Cargo.lock index 82d4030f100..05bcfb2b582 100644 --- a/noir/Cargo.lock +++ b/noir/Cargo.lock @@ -7,7 +7,7 @@ name = "acir" version = "0.38.0" dependencies = [ "acir_field", - "base64", + "base64 0.21.2", "bincode", "brillig", "flate2", @@ -28,7 +28,7 @@ dependencies = [ "ark-bls12-381", "ark-bn254", "ark-ff", - "cfg-if", + "cfg-if 1.0.0", "hex", "num-bigint", "num-traits", @@ -43,13 +43,13 @@ dependencies = [ "acvm_blackbox_solver", "brillig_vm", "indexmap 1.9.3", - "log", "num-bigint", "num-traits", "paste", "proptest", - "rand", + "rand 0.8.5", "thiserror", + "tracing", ] [[package]] @@ -71,20 +71,20 @@ name = "acvm_js" version = "0.38.0" dependencies = [ "acvm", - "barretenberg_blackbox_solver", + "bn254_blackbox_solver", "build-data", - "cfg-if", + "cfg-if 1.0.0", "console_error_panic_hook", "const-str", "gloo-utils", "js-sys", - "log", "pkg-config", "serde", + "tracing-subscriber", + "tracing-web", "wasm-bindgen", "wasm-bindgen-futures", "wasm-bindgen-test", - "wasm-logger", ] [[package]] @@ -108,7 +108,7 @@ version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a824f2aa7e75a0c98c5a504fceb80649e9c35265d44525b5f94de4771a395cd" dependencies = [ - "getrandom", + "getrandom 0.2.10", "once_cell", "version_check", ] @@ -119,8 +119,8 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" dependencies = [ - "cfg-if", - "getrandom", + "cfg-if 1.0.0", + "getrandom 0.2.10", "once_cell", "version_check", ] @@ -342,7 +342,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" dependencies = [ "num-traits", - "rand", + "rand 0.8.5", ] [[package]] @@ -393,7 +393,7 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "138985dd8aefbefeaa66b01b7f5b2b6b4c333fcef1cc5f32c63a2aabe37d6de3" dependencies = [ - "futures", + "futures 0.3.28", "lsp-types 0.94.1", "pin-project-lite", "rustix", @@ -407,17 +407,6 @@ dependencies = [ "waitpid-any", ] -[[package]] -name = "atty" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" -dependencies = [ - "hermit-abi 0.1.19", - "libc", - "winapi", -] - [[package]] name = "autocfg" version = "1.1.0" @@ -442,7 +431,6 @@ dependencies = [ "const_format", "dirs", "flate2", - "log", "reqwest", "serde", "serde_json", @@ -450,6 +438,7 @@ dependencies = [ "tempfile", "test-binary", "thiserror", + "tracing", ] [[package]] @@ -460,41 +449,25 @@ checksum = "4319208da049c43661739c5fade2ba182f09d1dc2299b32298d3a31692b17e12" dependencies = [ "addr2line", "cc", - "cfg-if", + "cfg-if 1.0.0", "libc", "miniz_oxide", "object", "rustc-demangle", ] -[[package]] -name = "barretenberg_blackbox_solver" -version = "0.38.0" -dependencies = [ - "acir", - "acvm_blackbox_solver", - "ark-ec", - "ark-ff", - "flate2", - "getrandom", - "grumpkin", - "js-sys", - "num-bigint", - "pkg-config", - "reqwest", - "rust-embed", - "tar", - "thiserror", - "wasm-bindgen-futures", - "wasmer", -] - [[package]] name = "base16ct" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce" +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + [[package]] name = "base64" version = "0.21.2" @@ -604,6 +577,28 @@ dependencies = [ "generic-array", ] +[[package]] +name = "bn254_blackbox_solver" +version = "0.38.0" +dependencies = [ + "acir", + "acvm_blackbox_solver", + "ark-ec", + "ark-ff", + "flate2", + "getrandom 0.2.10", + "grumpkin", + "js-sys", + "num-bigint", + "pkg-config", + "reqwest", + "rust-embed", + "tar", + "thiserror", + "wasm-bindgen-futures", + "wasmer", +] + [[package]] name = "brillig" version = "0.38.0" @@ -757,6 +752,12 @@ version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" +[[package]] +name = "cfg-if" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" + [[package]] name = "cfg-if" version = "1.0.0" @@ -882,7 +883,7 @@ checksum = "fc4159b76af02757139baf42c0c971c6dc155330999fbfd8eddb29b97fb2db68" dependencies = [ "codespan-reporting", "lsp-types 0.88.0", - "url", + "url 2.4.0", ] [[package]] @@ -929,6 +930,12 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" +[[package]] +name = "comma" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55b672471b4e9f9e95499ea597ff64941a309b2cdbffcc46f2cc5e2d971fd335" + [[package]] name = "console" version = "0.15.7" @@ -947,7 +954,7 @@ version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "wasm-bindgen", ] @@ -989,6 +996,12 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f7144d30dcf0fafbce74250a3963025d8d52177934239851c917d29f1df280c2" +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + [[package]] name = "core-foundation-sys" version = "0.8.4" @@ -1002,7 +1015,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "80128832c58ea9cbd041d2a759ec449224487b2c1e400453d99d244eead87a8e" dependencies = [ "autocfg", - "cfg-if", + "cfg-if 1.0.0", "libc", "scopeguard", "windows-sys 0.33.0", @@ -1014,7 +1027,7 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ee34052ee3d93d6d8f3e6f81d85c47921f6653a19a7b70e939e3e602d893a674" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", ] [[package]] @@ -1115,7 +1128,7 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", ] [[package]] @@ -1156,11 +1169,11 @@ dependencies = [ [[package]] name = "crossbeam-channel" -version = "0.5.8" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200" +checksum = "14c3242926edf34aec4ac3a77108ad4854bffaa2e4ddc1824124ce59231302d5" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "crossbeam-utils", ] @@ -1170,7 +1183,7 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "crossbeam-epoch", "crossbeam-utils", ] @@ -1182,7 +1195,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" dependencies = [ "autocfg", - "cfg-if", + "cfg-if 1.0.0", "crossbeam-utils", "memoffset 0.9.0", "scopeguard", @@ -1194,17 +1207,17 @@ version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "crossbeam-utils", ] [[package]] name = "crossbeam-utils" -version = "0.8.16" +version = "0.8.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" +checksum = "c06d96137f14f244c37f989d9fff8f95e6c18b918e71f36638f8c49112e4c78f" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", ] [[package]] @@ -1214,7 +1227,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef2b4b23cddf68b89b8f8069890e8c270d54e2d5fe1b143820234805e4cb17ef" dependencies = [ "generic-array", - "rand_core", + "rand_core 0.6.4", "subtle", "zeroize", ] @@ -1250,6 +1263,17 @@ dependencies = [ "memchr", ] +[[package]] +name = "dap" +version = "0.4.1-alpha1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35c7fc89d334ab745ba679f94c7314c9b17ecdcd923c111df6206e9fd7729fa9" +dependencies = [ + "serde", + "serde_json", + "thiserror", +] + [[package]] name = "darling" version = "0.20.3" @@ -1291,11 +1315,11 @@ version = "5.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "hashbrown 0.14.0", "lock_api", "once_cell", - "parking_lot_core", + "parking_lot_core 0.9.8", ] [[package]] @@ -1337,6 +1361,19 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "derive_more" +version = "0.99.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +dependencies = [ + "convert_case", + "proc-macro2", + "quote", + "rustc_version", + "syn 1.0.109", +] + [[package]] name = "difflib" version = "0.4.0" @@ -1369,7 +1406,7 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "dirs-sys-next", ] @@ -1448,7 +1485,7 @@ dependencies = [ "generic-array", "group", "pkcs8", - "rand_core", + "rand_core 0.6.4", "sec1", "subtle", "zeroize", @@ -1472,7 +1509,7 @@ version = "0.8.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", ] [[package]] @@ -1522,19 +1559,6 @@ dependencies = [ "syn 2.0.26", ] -[[package]] -name = "env_logger" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a12e6657c4c97ebab115a42dcee77225f7f482cdd841cf7088c657a42e9e00e7" -dependencies = [ - "atty", - "humantime", - "log", - "regex", - "termcolor", -] - [[package]] name = "equivalent" version = "1.0.1" @@ -1589,7 +1613,7 @@ version = "3.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef033ed5e9bad94e55838ca0ca906db0e043f517adda0c8b79c7a8c66c93c1b5" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "rustix", "windows-sys 0.48.0", ] @@ -1600,7 +1624,7 @@ version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d013fc25338cc558c5c2cfbad646908fb23591e2404481826742b651c9af7160" dependencies = [ - "rand_core", + "rand_core 0.6.4", "subtle", ] @@ -1619,7 +1643,7 @@ version = "0.2.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d4029edd3e734da6fe05b6cd7bd2960760a616bd2ddd0d59a0124746d6272af0" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "libc", "redox_syscall 0.3.5", "windows-sys 0.48.0", @@ -1678,7 +1702,7 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" dependencies = [ - "percent-encoding", + "percent-encoding 2.3.0", ] [[package]] @@ -1687,6 +1711,12 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" +[[package]] +name = "futures" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678" + [[package]] name = "futures" version = "0.3.28" @@ -1695,6 +1725,7 @@ checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" dependencies = [ "futures-channel", "futures-core", + "futures-executor", "futures-io", "futures-sink", "futures-task", @@ -1717,6 +1748,18 @@ version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c" +[[package]] +name = "futures-executor" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", + "num_cpus", +] + [[package]] name = "futures-io" version = "0.3.28" @@ -1752,6 +1795,7 @@ version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533" dependencies = [ + "futures 0.1.31", "futures-channel", "futures-core", "futures-io", @@ -1779,7 +1823,7 @@ version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "877e94aff08e743b651baaea359664321055749b398adff8740a7399af7796e7" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", ] [[package]] @@ -1792,16 +1836,27 @@ dependencies = [ "version_check", ] +[[package]] +name = "getrandom" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" +dependencies = [ + "cfg-if 1.0.0", + "libc", + "wasi 0.9.0+wasi-snapshot-preview1", +] + [[package]] name = "getrandom" version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "js-sys", "libc", - "wasi", + "wasi 0.11.0+wasi-snapshot-preview1", "wasm-bindgen", ] @@ -1872,7 +1927,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7" dependencies = [ "ff", - "rand_core", + "rand_core 0.6.4", "subtle", ] @@ -1902,7 +1957,7 @@ dependencies = [ "indexmap 1.9.3", "slab", "tokio", - "tokio-util", + "tokio-util 0.7.8", "tracing", ] @@ -1960,15 +2015,6 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" -[[package]] -name = "hermit-abi" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" -dependencies = [ - "libc", -] - [[package]] name = "hermit-abi" version = "0.3.2" @@ -2024,12 +2070,6 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" -[[package]] -name = "humantime" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" - [[package]] name = "hyper" version = "0.14.27" @@ -2103,6 +2143,17 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" +[[package]] +name = "idna" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38f09e0f0b1fb55fdee1f17470ad800da77af5186a1a76c026b679358b7e844e" +dependencies = [ + "matches", + "unicode-bidi", + "unicode-normalization", +] + [[package]] name = "idna" version = "0.4.0" @@ -2137,7 +2188,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0acd33ff0285af998aaf9b57342af478078f53492322fafc47450e09397e0e9" dependencies = [ "bitmaps", - "rand_core", + "rand_core 0.6.4", "rand_xoshiro", "serde", "sized-chunks", @@ -2215,6 +2266,15 @@ dependencies = [ "str_stack", ] +[[package]] +name = "instant" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +dependencies = [ + "cfg-if 1.0.0", +] + [[package]] name = "ipnet" version = "2.8.0" @@ -2227,7 +2287,7 @@ version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" dependencies = [ - "hermit-abi 0.3.2", + "hermit-abi", "rustix", "windows-sys 0.48.0", ] @@ -2260,13 +2320,127 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "jsonrpc" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34efde8d2422fb79ed56db1d3aea8fa5b583351d15a26770cdee2f88813dd702" +dependencies = [ + "base64 0.13.1", + "minreq", + "serde", + "serde_json", +] + +[[package]] +name = "jsonrpc-client-transports" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2b99d4207e2a04fb4581746903c2bb7eb376f88de9c699d0f3e10feeac0cd3a" +dependencies = [ + "derive_more", + "futures 0.3.28", + "jsonrpc-core", + "jsonrpc-pubsub", + "log", + "serde", + "serde_json", + "url 1.7.2", +] + +[[package]] +name = "jsonrpc-core" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14f7f76aef2d054868398427f6c54943cf3d1caa9a7ec7d0c38d69df97a965eb" +dependencies = [ + "futures 0.3.28", + "futures-executor", + "futures-util", + "log", + "serde", + "serde_derive", + "serde_json", +] + +[[package]] +name = "jsonrpc-core-client" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b51da17abecbdab3e3d4f26b01c5ec075e88d3abe3ab3b05dc9aa69392764ec0" +dependencies = [ + "futures 0.3.28", + "jsonrpc-client-transports", +] + +[[package]] +name = "jsonrpc-derive" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b939a78fa820cdfcb7ee7484466746a7377760970f6f9c6fe19f9edcc8a38d2" +dependencies = [ + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "jsonrpc-http-server" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1dea6e07251d9ce6a552abfb5d7ad6bc290a4596c8dcc3d795fae2bbdc1f3ff" +dependencies = [ + "futures 0.3.28", + "hyper", + "jsonrpc-core", + "jsonrpc-server-utils", + "log", + "net2", + "parking_lot 0.11.2", + "unicase", +] + +[[package]] +name = "jsonrpc-pubsub" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240f87695e6c6f62fb37f05c02c04953cf68d6408b8c1c89de85c7a0125b1011" +dependencies = [ + "futures 0.3.28", + "jsonrpc-core", + "lazy_static", + "log", + "parking_lot 0.11.2", + "rand 0.7.3", + "serde", +] + +[[package]] +name = "jsonrpc-server-utils" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa4fdea130485b572c39a460d50888beb00afb3e35de23ccd7fad8ff19f0e0d4" +dependencies = [ + "bytes", + "futures 0.3.28", + "globset", + "jsonrpc-core", + "lazy_static", + "log", + "tokio", + "tokio-stream", + "tokio-util 0.6.10", + "unicase", +] + [[package]] name = "k256" version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72c1e0b51e7ec0a97369623508396067a486bd0cbed95a2659a4b863d28cfc8b" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "ecdsa", "elliptic-curve", "sha2", @@ -2346,7 +2520,7 @@ dependencies = [ "serde", "serde_json", "serde_repr", - "url", + "url 2.4.0", ] [[package]] @@ -2359,7 +2533,7 @@ dependencies = [ "serde", "serde_json", "serde_repr", - "url", + "url 2.4.0", ] [[package]] @@ -2371,6 +2545,21 @@ dependencies = [ "libc", ] +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + +[[package]] +name = "matches" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" + [[package]] name = "memchr" version = "2.5.0" @@ -2404,15 +2593,6 @@ dependencies = [ "autocfg", ] -[[package]] -name = "memoffset" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1" -dependencies = [ - "autocfg", -] - [[package]] name = "memoffset" version = "0.9.0" @@ -2437,6 +2617,17 @@ dependencies = [ "adler", ] +[[package]] +name = "minreq" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb3371dfc7b772c540da1380123674a8e20583aca99907087d990ca58cf44203" +dependencies = [ + "log", + "serde", + "serde_json", +] + [[package]] name = "mio" version = "0.8.10" @@ -2444,7 +2635,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f3d0b296e374a4e6f3c7b0a1f5a51d748a0d34c85e7dc48fc3fa9a87657fe09" dependencies = [ "libc", - "wasi", + "wasi 0.11.0+wasi-snapshot-preview1", "windows-sys 0.48.0", ] @@ -2462,7 +2653,11 @@ dependencies = [ "codespan-reporting", "fm", "iter-extended", - "log", + "jsonrpc", + "jsonrpc-core", + "jsonrpc-core-client", + "jsonrpc-derive", + "jsonrpc-http-server", "noirc_abi", "noirc_driver", "noirc_errors", @@ -2472,8 +2667,10 @@ dependencies = [ "rayon", "rustc_version", "serde", + "serial_test", "tempfile", "thiserror", + "tracing", ] [[package]] @@ -2485,15 +2682,15 @@ dependencies = [ "assert_fs", "async-lsp", "backend-interface", - "barretenberg_blackbox_solver", "bb_abstraction_leaks", + "bn254_blackbox_solver", "build-data", "clap", "color-eyre", "const_format", "criterion", + "dap", "dirs", - "env_logger", "fm", "hex", "iai", @@ -2521,10 +2718,9 @@ dependencies = [ "test-binary", "thiserror", "tokio", - "tokio-util", - "toml", + "tokio-util 0.7.8", + "toml 0.7.6", "tower", - "tracing", "tracing-appender", "tracing-subscriber", ] @@ -2538,7 +2734,7 @@ dependencies = [ "serde", "similar-asserts", "thiserror", - "toml", + "toml 0.7.6", ] [[package]] @@ -2552,8 +2748,19 @@ dependencies = [ "semver", "serde", "thiserror", - "toml", - "url", + "toml 0.7.6", + "url 2.4.0", +] + +[[package]] +name = "net2" +version = "0.2.39" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b13b648036a2339d06de780866fbdfda0dde886de7b3af2ddeba8b14f4ee34ac" +dependencies = [ + "cfg-if 0.1.10", + "libc", + "winapi", ] [[package]] @@ -2573,21 +2780,34 @@ checksum = "8f3790c00a0150112de0f4cd161e3d7fc4b2d8a5542ffc35f099a2562aecb35c" dependencies = [ "bitflags 1.3.2", "cc", - "cfg-if", + "cfg-if 1.0.0", "libc", "memoffset 0.6.5", ] [[package]] name = "nix" -version = "0.26.2" +version = "0.25.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfdda3d196821d6af13126e40375cdf7da646a96114af134d5f417a9a1dc8e1a" +checksum = "f346ff70e7dbfd675fe90590b92d59ef2de15a8779ae305ebcbfd3f0caf59be4" dependencies = [ + "autocfg", "bitflags 1.3.2", - "cfg-if", + "cfg-if 1.0.0", + "libc", + "memoffset 0.6.5", + "pin-utils", +] + +[[package]] +name = "nix" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b" +dependencies = [ + "bitflags 1.3.2", + "cfg-if 1.0.0", "libc", - "static_assertions", ] [[package]] @@ -2595,12 +2815,22 @@ name = "noir_debugger" version = "0.22.0" dependencies = [ "acvm", + "assert_cmd", + "build-data", "codespan-reporting", + "dap", "easy-repl", + "fm", "nargo", + "noirc_driver", "noirc_errors", "noirc_printable_type", "owo-colors", + "rexpect", + "rustc_version", + "serde_json", + "tempfile", + "test-binary", "thiserror", ] @@ -2622,6 +2852,7 @@ dependencies = [ "serde", "serde_json", "serde_with", + "thiserror", "tokio", "tower", "wasm-bindgen", @@ -2635,17 +2866,17 @@ dependencies = [ "build-data", "console_error_panic_hook", "fm", - "getrandom", + "getrandom 0.2.10", "gloo-utils", "js-sys", - "log", "nargo", "noirc_driver", "noirc_errors", "noirc_frontend", "serde", + "tracing-subscriber", + "tracing-web", "wasm-bindgen", - "wasm-logger", ] [[package]] @@ -2662,7 +2893,7 @@ dependencies = [ "strum", "strum_macros", "thiserror", - "toml", + "toml 0.7.6", ] [[package]] @@ -2672,7 +2903,7 @@ dependencies = [ "acvm", "build-data", "console_error_panic_hook", - "getrandom", + "getrandom 0.2.10", "gloo-utils", "iter-extended", "js-sys", @@ -2693,13 +2924,13 @@ dependencies = [ "fm", "fxhash", "iter-extended", - "log", "noirc_abi", "noirc_errors", "noirc_evaluator", "noirc_frontend", "rust-embed", "serde", + "tracing", ] [[package]] @@ -2711,9 +2942,9 @@ dependencies = [ "codespan", "codespan-reporting", "fm", - "log", "serde", "serde_with", + "tracing", ] [[package]] @@ -2724,12 +2955,12 @@ dependencies = [ "fxhash", "im", "iter-extended", - "log", "noirc_errors", "noirc_frontend", "num-bigint", "serde", "thiserror", + "tracing", ] [[package]] @@ -2741,7 +2972,6 @@ dependencies = [ "chumsky", "fm", "iter-extended", - "log", "noirc_errors", "noirc_printable_type", "regex", @@ -2754,6 +2984,7 @@ dependencies = [ "strum_macros", "tempfile", "thiserror", + "tracing", ] [[package]] @@ -2762,6 +2993,7 @@ version = "0.22.0" dependencies = [ "acvm", "iter-extended", + "jsonrpc", "regex", "serde", "serde_json", @@ -2831,7 +3063,7 @@ version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "hermit-abi 0.3.2", + "hermit-abi", "libc", ] @@ -2879,6 +3111,17 @@ dependencies = [ "sha2", ] +[[package]] +name = "parking_lot" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core 0.8.6", +] + [[package]] name = "parking_lot" version = "0.12.1" @@ -2886,7 +3129,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ "lock_api", - "parking_lot_core", + "parking_lot_core 0.9.8", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" +dependencies = [ + "cfg-if 1.0.0", + "instant", + "libc", + "redox_syscall 0.2.16", + "smallvec", + "winapi", ] [[package]] @@ -2895,7 +3152,7 @@ version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "libc", "redox_syscall 0.3.5", "smallvec", @@ -2908,6 +3165,12 @@ version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" +[[package]] +name = "percent-encoding" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31010dd2e1ac33d5b46a5b413495239882813e0369f8ed8a5e266f173602f831" + [[package]] name = "percent-encoding" version = "2.3.0" @@ -2932,7 +3195,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d5285893bb5eb82e6aaf5d59ee909a06a16737a8970984dd7746ba9283498d6" dependencies = [ "phf_shared", - "rand", + "rand 0.8.5", ] [[package]] @@ -3021,15 +3284,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "978385d59daf9269189d052ca8a84c1acfd0715c0599a5d5188d4acc078ca46a" dependencies = [ "backtrace", - "cfg-if", + "cfg-if 1.0.0", "criterion", "findshlibs", "inferno", "libc", "log", - "nix 0.26.2", + "nix 0.26.4", "once_cell", - "parking_lot", + "parking_lot 0.12.1", "smallvec", "symbolic-demangle", "tempfile", @@ -3098,6 +3361,15 @@ dependencies = [ "unicode-width", ] +[[package]] +name = "proc-macro-crate" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d6ea3c4595b96363c13943497db34af4460fb474a95c43f4446ad341b8c9785" +dependencies = [ + "toml 0.5.11", +] + [[package]] name = "proc-macro-error" version = "1.0.4" @@ -3148,10 +3420,10 @@ dependencies = [ "bitflags 2.3.3", "lazy_static", "num-traits", - "rand", - "rand_chacha", + "rand 0.8.5", + "rand_chacha 0.3.1", "rand_xorshift", - "regex-syntax", + "regex-syntax 0.7.4", "rusty-fork", "tempfile", "unarray", @@ -3217,6 +3489,19 @@ dependencies = [ "nibble_vec", ] +[[package]] +name = "rand" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" +dependencies = [ + "getrandom 0.1.16", + "libc", + "rand_chacha 0.2.2", + "rand_core 0.5.1", + "rand_hc", +] + [[package]] name = "rand" version = "0.8.5" @@ -3224,8 +3509,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", - "rand_chacha", - "rand_core", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" +dependencies = [ + "ppv-lite86", + "rand_core 0.5.1", ] [[package]] @@ -3235,7 +3530,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_core" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" +dependencies = [ + "getrandom 0.1.16", ] [[package]] @@ -3244,7 +3548,16 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom", + "getrandom 0.2.10", +] + +[[package]] +name = "rand_hc" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" +dependencies = [ + "rand_core 0.5.1", ] [[package]] @@ -3253,7 +3566,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f" dependencies = [ - "rand_core", + "rand_core 0.6.4", ] [[package]] @@ -3262,7 +3575,7 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa" dependencies = [ - "rand_core", + "rand_core 0.6.4", ] [[package]] @@ -3309,7 +3622,7 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" dependencies = [ - "getrandom", + "getrandom 0.2.10", "redox_syscall 0.2.16", "thiserror", ] @@ -3335,7 +3648,7 @@ dependencies = [ "aho-corasick", "memchr", "regex-automata 0.3.3", - "regex-syntax", + "regex-syntax 0.7.4", ] [[package]] @@ -3343,6 +3656,9 @@ name = "regex-automata" version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", +] [[package]] name = "regex-automata" @@ -3352,9 +3668,15 @@ checksum = "39354c10dd07468c2e73926b23bb9c2caca74c5501e38a35da70406f1d923310" dependencies = [ "aho-corasick", "memchr", - "regex-syntax", + "regex-syntax 0.7.4", ] +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + [[package]] name = "regex-syntax" version = "0.7.4" @@ -3388,7 +3710,7 @@ version = "0.11.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e9ad3fe7488d7e34558a2033d45a0c90b72d97b4f80705666fea71472e2e6a1" dependencies = [ - "base64", + "base64 0.21.2", "bytes", "encoding_rs", "futures-core", @@ -3403,7 +3725,7 @@ dependencies = [ "log", "mime", "once_cell", - "percent-encoding", + "percent-encoding 2.3.0", "pin-project-lite", "rustls", "rustls-pemfile", @@ -3413,7 +3735,7 @@ dependencies = [ "tokio", "tokio-rustls", "tower-service", - "url", + "url 2.4.0", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", @@ -3421,6 +3743,19 @@ dependencies = [ "winreg", ] +[[package]] +name = "rexpect" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01ff60778f96fb5a48adbe421d21bf6578ed58c0872d712e7e08593c195adff8" +dependencies = [ + "comma", + "nix 0.25.1", + "regex", + "tempfile", + "thiserror", +] + [[package]] name = "rfc6979" version = "0.3.1" @@ -3573,7 +3908,7 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2" dependencies = [ - "base64", + "base64 0.21.2", ] [[package]] @@ -3611,7 +3946,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "db7826789c0e25614b03e5a54a0717a86f9ff6e6e5247f92b369472869320039" dependencies = [ "bitflags 1.3.2", - "cfg-if", + "cfg-if 1.0.0", "clipboard-win", "dirs-next", "fd-lock", @@ -3868,7 +4203,7 @@ version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1402f54f9a3b9e2efe71c1cea24e648acce55887983553eeb858cf3115acfd49" dependencies = [ - "base64", + "base64 0.21.2", "chrono", "hex", "indexmap 1.9.3", @@ -3891,13 +4226,38 @@ dependencies = [ "syn 2.0.26", ] +[[package]] +name = "serial_test" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e56dd856803e253c8f298af3f4d7eb0ae5e23a737252cd90bb4f3b435033b2d" +dependencies = [ + "dashmap", + "futures 0.3.28", + "lazy_static", + "log", + "parking_lot 0.12.1", + "serial_test_derive", +] + +[[package]] +name = "serial_test_derive" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91d129178576168c589c9ec973feedf7d3126c01ac2bf08795109aa35b69fb8f" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.26", +] + [[package]] name = "sha2" version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "cpufeatures", "digest", ] @@ -3953,7 +4313,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" dependencies = [ "digest", - "rand_core", + "rand_core 0.6.4", ] [[package]] @@ -4085,12 +4445,6 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" -[[package]] -name = "static_assertions" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" - [[package]] name = "str-buf" version = "1.0.6" @@ -4208,7 +4562,7 @@ version = "3.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "fastrand", "redox_syscall 0.3.5", "rustix", @@ -4301,7 +4655,7 @@ version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "once_cell", ] @@ -4396,6 +4750,31 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-stream" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.6.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36943ee01a6d67977dd3f84a5a1d2efeb4ada3a1ae771cadfaa535d9d9fc6507" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "log", + "pin-project-lite", + "tokio", +] + [[package]] name = "tokio-util" version = "0.7.8" @@ -4411,6 +4790,15 @@ dependencies = [ "tracing", ] +[[package]] +name = "toml" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" +dependencies = [ + "serde", +] + [[package]] name = "toml" version = "0.7.6" @@ -4540,14 +4928,31 @@ version = "0.3.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" dependencies = [ + "matchers", "nu-ansi-term", + "once_cell", + "regex", "sharded-slab", "smallvec", "thread_local", + "tracing", "tracing-core", "tracing-log", ] +[[package]] +name = "tracing-web" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e6a141feebd51f8d91ebfd785af50fca223c570b86852166caa3b141defe7c" +dependencies = [ + "js-sys", + "tracing-core", + "tracing-subscriber", + "wasm-bindgen", + "web-sys", +] + [[package]] name = "trie-rs" version = "0.1.1" @@ -4575,6 +4980,15 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" +[[package]] +name = "unicase" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" +dependencies = [ + "version_check", +] + [[package]] name = "unicode-bidi" version = "0.3.13" @@ -4626,6 +5040,17 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" +[[package]] +name = "url" +version = "1.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd4e7c0d531266369519a4aa4f399d748bd37043b00bde1e4ff1f60a120b355a" +dependencies = [ + "idna 0.1.5", + "matches", + "percent-encoding 1.0.1", +] + [[package]] name = "url" version = "2.4.0" @@ -4633,8 +5058,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb" dependencies = [ "form_urlencoded", - "idna", - "percent-encoding", + "idna 0.4.0", + "percent-encoding 2.3.0", "serde", ] @@ -4700,6 +5125,12 @@ dependencies = [ "try-lock", ] +[[package]] +name = "wasi" +version = "0.9.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" + [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" @@ -4712,7 +5143,7 @@ version = "0.2.86" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5bba0e8cb82ba49ff4e229459ff22a191bbe9a1cb3a341610c9c33efc27ddf73" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "serde", "serde_json", "wasm-bindgen-macro", @@ -4733,36 +5164,13 @@ dependencies = [ "wasm-bindgen-shared", ] -[[package]] -name = "wasm-bindgen-downcast" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dac026d43bcca6e7ce1c0956ba68f59edf6403e8e930a5d891be72c31a44340" -dependencies = [ - "js-sys", - "once_cell", - "wasm-bindgen", - "wasm-bindgen-downcast-macros", -] - -[[package]] -name = "wasm-bindgen-downcast-macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5020cfa87c7cecefef118055d44e3c1fc122c7ec25701d528ee458a0b45f38f" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "wasm-bindgen-futures" version = "0.4.36" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d1985d03709c53167ce907ff394f5316aa22cb4e12761295c5dc57dacb6297e" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "js-sys", "wasm-bindgen", "web-sys", @@ -4830,25 +5238,14 @@ dependencies = [ "leb128", ] -[[package]] -name = "wasm-logger" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "074649a66bb306c8f2068c9016395fa65d8e08d2affcbf95acf3c24c3ab19718" -dependencies = [ - "log", - "wasm-bindgen", - "web-sys", -] - [[package]] name = "wasmer" -version = "4.2.3" +version = "4.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50cb1ae2956aac1fbbcf334c543c1143cdf7d5b0a5fb6c3d23a17bf37dd1f47b" +checksum = "ce45cc009177ca345a6d041f9062305ad467d15e7d41494f5b81ab46d62d7a58" dependencies = [ "bytes", - "cfg-if", + "cfg-if 1.0.0", "derivative", "indexmap 1.9.3", "js-sys", @@ -4860,7 +5257,6 @@ dependencies = [ "target-lexicon", "thiserror", "wasm-bindgen", - "wasm-bindgen-downcast", "wasmer-compiler", "wasmer-compiler-cranelift", "wasmer-derive", @@ -4874,13 +5270,13 @@ dependencies = [ [[package]] name = "wasmer-compiler" -version = "4.2.3" +version = "4.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12fd9aeef339095798d1e04957d5657d97490b1112f145cbf08b98f6393b4a0a" +checksum = "e044f6140c844602b920deb4526aea3cc9c0d7cf23f00730bb9b2034669f522a" dependencies = [ "backtrace", "bytes", - "cfg-if", + "cfg-if 1.0.0", "enum-iterator", "enumset", "lazy_static", @@ -4901,9 +5297,9 @@ dependencies = [ [[package]] name = "wasmer-compiler-cranelift" -version = "4.2.3" +version = "4.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "344f5f1186c122756232fe7f156cc8d2e7bf333d5a658e81e25efa3415c26d07" +checksum = "32ce02358eb44a149d791c1d6648fb7f8b2f99cd55e3c4eef0474653ec8cc889" dependencies = [ "cranelift-codegen", "cranelift-entity", @@ -4920,9 +5316,9 @@ dependencies = [ [[package]] name = "wasmer-derive" -version = "4.2.3" +version = "4.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ac8c1f2dc0ed3c7412a5546e468365184a461f8ce7dfe2a707b621724339f91" +checksum = "c782d80401edb08e1eba206733f7859db6c997fc5a7f5fb44edc3ecd801468f6" dependencies = [ "proc-macro-error", "proc-macro2", @@ -4932,9 +5328,9 @@ dependencies = [ [[package]] name = "wasmer-types" -version = "4.2.3" +version = "4.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a57ecbf218c0a9348d4dfbdac0f9d42d9201ae276dffb13e61ea4ff939ecce7" +checksum = "fd09e80d4d74bb9fd0ce6c3c106b1ceba1a050f9948db9d9b78ae53c172d6157" dependencies = [ "bytecheck", "enum-iterator", @@ -4948,13 +5344,13 @@ dependencies = [ [[package]] name = "wasmer-vm" -version = "4.2.3" +version = "4.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60c3513477bc0097250f6e34a640e2a903bb0ee57e6bb518c427f72c06ac7728" +checksum = "bdcd8a4fd36414a7b6a003dbfbd32393bce3e155d715dd877c05c1b7a41d224d" dependencies = [ "backtrace", "cc", - "cfg-if", + "cfg-if 1.0.0", "corosensei", "crossbeam-queue", "dashmap", @@ -4965,7 +5361,7 @@ dependencies = [ "lazy_static", "libc", "mach", - "memoffset 0.8.0", + "memoffset 0.9.0", "more-asserts", "region", "scopeguard", @@ -4987,7 +5383,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f2ea896273ea99b15132414be1da01ab0d8836415083298ecaffbe308eaac87a" dependencies = [ "indexmap 1.9.3", - "url", + "url 2.4.0", ] [[package]] @@ -5257,7 +5653,7 @@ version = "0.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "windows-sys 0.48.0", ] diff --git a/noir/Cargo.toml b/noir/Cargo.toml index aaf060552e7..5469a63f3dd 100644 --- a/noir/Cargo.toml +++ b/noir/Cargo.toml @@ -31,7 +31,7 @@ members = [ "acvm-repo/brillig", "acvm-repo/brillig_vm", "acvm-repo/blackbox_solver", - "acvm-repo/barretenberg_blackbox_solver", + "acvm-repo/bn254_blackbox_solver", ] default-members = ["tooling/nargo_cli"] resolver = "2" @@ -42,7 +42,7 @@ version = "0.22.0" # x-release-please-end authors = ["The Noir Team "] edition = "2021" -rust-version = "1.66" +rust-version = "1.71.1" license = "MIT OR Apache-2.0" repository = "https://github.com/noir-lang/noir/" @@ -56,7 +56,7 @@ stdlib = { version = "0.37.1", package = "acvm_stdlib", path = "acvm-repo/stdlib brillig = { version = "0.38.0", path = "acvm-repo/brillig", default-features = false } brillig_vm = { version = "0.38.0", path = "acvm-repo/brillig_vm", default-features = false } acvm_blackbox_solver = { version = "0.38.0", path = "acvm-repo/blackbox_solver", default-features = false } -barretenberg_blackbox_solver = { version = "0.38.0", path = "acvm-repo/barretenberg_blackbox_solver", default-features = false } +bn254_blackbox_solver = { version = "0.38.0", path = "acvm-repo/bn254_blackbox_solver", default-features = false } # Noir compiler workspace dependencies arena = { path = "compiler/utils/arena" } @@ -93,6 +93,8 @@ gloo-utils = { version = "0.1", features = ["serde"] } js-sys = "0.3.62" getrandom = "0.2" +# Debugger +dap = "0.4.1-alpha1" cfg-if = "1.0.0" clap = { version = "4.3.19", features = ["derive", "env"] } @@ -119,10 +121,12 @@ const_format = "0.2.30" num-bigint = "0.4" num-traits = "0.2" similar-asserts = "1.5.0" -log = "0.4.17" tempfile = "3.6.0" +jsonrpc = { version = "0.16.0", features = ["minreq_http"] } tracing = "0.1.40" +tracing-web = "0.1.3" +tracing-subscriber = { version = "0.3.18", features = ["env-filter"] } [profile.dev] # This is required to be able to run `cargo test` in acvm_js due to the `locals exceeds maximum` error. diff --git a/noir/README.md b/noir/README.md index 2fc47f16fef..038ce5691cd 100644 --- a/noir/README.md +++ b/noir/README.md @@ -6,7 +6,7 @@ Noir is a Domain Specific Language for SNARK proving systems. It has been design ## Quick Start -Read the installation section [here](https://noir-lang.org/getting_started/nargo_installation). +Read the installation section [here](https://noir-lang.org/docs/dev/getting_started/installation/). Once you have read through the documentation, you can visit [Awesome Noir](https://github.com/noir-lang/awesome-noir) to run some of the examples that others have created. @@ -58,7 +58,7 @@ This crate's minimum supported rustc version is 1.71.1. ## Working on this project -This project uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. Please follow [our guidelines](https://noir-lang.org/getting_started/nargo_installation/#option-3-compile-from-source) to setup your environment for working on the project. +This project uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. Please follow [our guidelines](https://noir-lang.org/docs/dev/getting_started/installation/other_install_methods#option-3-compile-from-source) to setup your environment for working on the project. ### Building against a different local/remote version of Barretenberg diff --git a/noir/acvm-repo/CHANGELOG.md b/noir/acvm-repo/CHANGELOG.md index 0bd38fd3307..4dc900d87a8 100644 --- a/noir/acvm-repo/CHANGELOG.md +++ b/noir/acvm-repo/CHANGELOG.md @@ -977,7 +977,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 * require `Backend` to implement `Default` trait ([#223](https://github.com/noir-lang/acvm/issues/223)) * Make GeneralOptimizer crate visible ([#220](https://github.com/noir-lang/acvm/issues/220)) * return `PartialWitnessGeneratorStatus` from `PartialWitnessGenerator.solve` ([#213](https://github.com/noir-lang/acvm/issues/213)) -* organise operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) +* organize operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) ### Features @@ -1000,7 +1000,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Miscellaneous Chores * Make GeneralOptimizer crate visible ([#220](https://github.com/noir-lang/acvm/issues/220)) ([64bb346](https://github.com/noir-lang/acvm/commit/64bb346524428a0ce196826ea1e5ccde08ad6201)) -* organise operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) ([a619df6](https://github.com/noir-lang/acvm/commit/a619df614bbb9b2518b788b42a7553b069823a0f)) +* organize operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) ([a619df6](https://github.com/noir-lang/acvm/commit/a619df614bbb9b2518b788b42a7553b069823a0f)) ## [0.9.0](https://github.com/noir-lang/acvm/compare/root-v0.8.1...root-v0.9.0) (2023-04-07) @@ -1114,7 +1114,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 * **acir:** make PublicInputs use a BTreeSet rather than Vec ([#99](https://github.com/noir-lang/acvm/issues/99)) * refactor ToRadix to ToRadixLe and ToRadixBe ([#58](https://github.com/noir-lang/acvm/issues/58)) * **acir:** Add keccak256 Opcode ([#91](https://github.com/noir-lang/acvm/issues/91)) -* reorganise compiler in terms of optimisers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) +* reorganize compiler in terms of optimisers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) ### Features @@ -1138,7 +1138,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Miscellaneous Chores * refactor ToRadix to ToRadixLe and ToRadixBe ([#58](https://github.com/noir-lang/acvm/issues/58)) ([2427a27](https://github.com/noir-lang/acvm/commit/2427a275048e598c6d651cce8348a4c55148f235)) -* reorganise compiler in terms of optimisers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) ([9329307](https://github.com/noir-lang/acvm/commit/9329307e054de202cfc55207162ad952b70d515e)) +* reorganize compiler in terms of optimisers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) ([9329307](https://github.com/noir-lang/acvm/commit/9329307e054de202cfc55207162ad952b70d515e)) ## [0.4.1] - 2023-02-08 diff --git a/noir/acvm-repo/acir/CHANGELOG.md b/noir/acvm-repo/acir/CHANGELOG.md index e31ee66379a..6551fc85262 100644 --- a/noir/acvm-repo/acir/CHANGELOG.md +++ b/noir/acvm-repo/acir/CHANGELOG.md @@ -404,7 +404,7 @@ * replace `MerkleMembership` opcode with `ComputeMerkleRoot` ([#233](https://github.com/noir-lang/acvm/issues/233)) * return `PartialWitnessGeneratorStatus` from `PartialWitnessGenerator.solve` ([#213](https://github.com/noir-lang/acvm/issues/213)) -* organise operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) +* organize operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) ### Features @@ -415,7 +415,7 @@ ### Miscellaneous Chores -* organise operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) ([a619df6](https://github.com/noir-lang/acvm/commit/a619df614bbb9b2518b788b42a7553b069823a0f)) +* organize operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) ([a619df6](https://github.com/noir-lang/acvm/commit/a619df614bbb9b2518b788b42a7553b069823a0f)) ## [0.9.0](https://github.com/noir-lang/acvm/compare/acir-v0.8.1...acir-v0.9.0) (2023-04-07) @@ -507,7 +507,7 @@ * **acir:** make PublicInputs use a BTreeSet rather than Vec ([#99](https://github.com/noir-lang/acvm/issues/99)) * refactor ToRadix to ToRadixLe and ToRadixBe ([#58](https://github.com/noir-lang/acvm/issues/58)) * **acir:** Add keccak256 Opcode ([#91](https://github.com/noir-lang/acvm/issues/91)) -* reorganise compiler in terms of optimisers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) +* reorganize compiler in terms of optimisers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) ### Features @@ -518,4 +518,4 @@ ### Miscellaneous Chores * refactor ToRadix to ToRadixLe and ToRadixBe ([#58](https://github.com/noir-lang/acvm/issues/58)) ([2427a27](https://github.com/noir-lang/acvm/commit/2427a275048e598c6d651cce8348a4c55148f235)) -* reorganise compiler in terms of optimisers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) ([9329307](https://github.com/noir-lang/acvm/commit/9329307e054de202cfc55207162ad952b70d515e)) +* reorganize compiler in terms of optimisers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) ([9329307](https://github.com/noir-lang/acvm/commit/9329307e054de202cfc55207162ad952b70d515e)) diff --git a/noir/acvm-repo/acir/acir_docs.md b/noir/acvm-repo/acir/acir_docs.md index eb532c9ae0c..a124bfbc37f 100644 --- a/noir/acvm-repo/acir/acir_docs.md +++ b/noir/acvm-repo/acir/acir_docs.md @@ -6,14 +6,14 @@ This document describes the purpose of ACIR, what it is and how ACIR programs ca ## Introduction The purpose of ACIR is to make the link between a generic proving system, such as Aztec's Barretenberg, and a frontend, such as Noir, which describes user-specific computations. -More precisely, Noir is a programming language for zero-knowledge proofs (ZKP) which allows users to write programs in an intuitive way using a high-level language close to Rust syntax. Noir is able to generate a proof of execution of a Noir program, using an external proving system. However, proving systems uses specific low-level constrain-based languages. Similarly, frontends have their own internal representation in order to represent user programs. +More precisely, Noir is a programming language for zero-knowledge proofs (ZKP) which allows users to write programs in an intuitive way using a high-level language close to Rust syntax. Noir is able to generate a proof of execution of a Noir program, using an external proving system. However, proving systems use specific low-level constrain-based languages. Similarly, frontends have their own internal representation in order to represent user programs. The goal of ACIR is to provide a generic open-source intermediate representation close to proving system 'languages', but agnostic to a specific proving system, that can be used both by proving system as well as a target for frontends. So, at the end of the day, an ACIR program is just another representation of a program, dedicated to proving systems. ## Abstract Circuit Intermediate Representation ACIR stands for abstract circuit intermediate representation: - **abstract circuit**: circuits are a simple computation model where basic computation units, named gates, are connected with wires. Data flows through the wires while gates compute output wires based on their input. More formally, they are directed acyclic graphs (DAG) where the vertices are the gates and the edges are the wires. Due to the immutability nature of the wires (their value does not change during an execution), they are well suited for describing computations for ZKPs. Furthermore, we do not lose any expressiveness when using a circuit as it is well known that any bounded computation can be translated into an arithmetic circuit (i.e a circuit with only addition and multiplication gates). -The term abstract here simply mean that we do not refer to an actual physical circuit (such as an electronic circuit). Furthermore, we will not exactly use the circuit model, but another model even better suited to ZKPs, the constraint model (see below). +The term abstract here simply means that we do not refer to an actual physical circuit (such as an electronic circuit). Furthermore, we will not exactly use the circuit model, but another model even better suited to ZKPs, the constraint model (see below). - **intermediate representation**: The ACIR representation is intermediate because it lies between a frontend and its proving system. ACIR bytecode makes the link between noir compiler output and the proving system backend input. ## The constraint model @@ -32,18 +32,18 @@ For instance, if input_wire_1 and input_wire_2 values are supplied as 3 and 8, t In summary, the workflow is the following: 1. user program -> (compilation) ACIR, a list of opcodes which constrain (partial) witnesses 2. user inputs + ACIR -> (execution/solving) assign values to all the (partial) witnesses -3. witness assignement + ACIR -> (proving system) proof +3. witness assignment + ACIR -> (proving system) proof Although the ordering of opcode does not matter in theory, since a system of equations is not dependent on its ordering, in practice it matters a lot for the solving (i.e the performance of the execution). ACIR opcodes **must be ordered** so that each opcode can be resolved one after the other. -The values of the witnesses lie in the scalar field of the proving system. We will refer to it as FieldElement or ACIR field. The proving system need the values of all the partial witnesses and all the constraints in order to generate a proof. +The values of the witnesses lie in the scalar field of the proving system. We will refer to it as FieldElement or ACIR field. The proving system needs the values of all the partial witnesses and all the constraints in order to generate a proof. *Remark*: The value of a partial witness is unique and fixed throughout a program execution, although in some rare cases, multiple values are possible for a same execution and witness (when there are several valid solutions to the constraints). Having multiple possible values for a witness may indicate that the circuit is not safe. -*Remark*: Why do we use the term partial witnesses? It is because the proving system may create other constraints and witnesses (especially with BlackBoxFuncCall, see below). A proof refers to a full witness assignements and their constraints. ACIR opcodes and their partial witnesses are still an intermediate representation before getting the full list of constraints and witnesses. For the sake of simplicity, we will refer to witness instead of partial witness from now on. +*Remark*: Why do we use the term partial witnesses? It is because the proving system may create other constraints and witnesses (especially with BlackBoxFuncCall, see below). A proof refers to a full witness assignments and their constraints. ACIR opcodes and their partial witnesses are still an intermediate representation before getting the full list of constraints and witnesses. For the sake of simplicity, we will refer to witness instead of partial witness from now on. ## ACIR Reference @@ -51,18 +51,18 @@ We assume here that the proving system is Barretenberg. Some parameters may slig Some opcodes have inputs and outputs, which means that the output is constrained to be the result of the opcode computation from the inputs. The solver expects that all inputs are known when solving such opcodes. -Some opcodes are not constrained, which mean they will not be used by the proving system and are only used by the solver. +Some opcodes are not constrained, which means they will not be used by the proving system and are only used by the solver. Finally, some opcodes will have a predicate, whose value is 0 or 1. Its purpose is to nullify the opcode when the value is 0, so that it has no effect. Note that removing the opcode is not a solution because this modifies the circuit (the circuit being mainly the list of the opcodes). -*Remark*: Opcodes operate on witnesses, but we will see that some opcode work on Arithmetic expressions of witnesses. We call an arithmetic expression a linear combination of witnesses and/or products of two witnesses (and also a constant term). A single witness is a (simple) arithmetic expression, and conversly, an arithmetic expression can be turned into a single witness using an arithmetic opcode (see below). So basically, using witnesses or arithmetic expressions is equivalent, but the latter can avoid the creation of witness in some cases. +*Remark*: Opcodes operate on witnesses, but we will see that some opcode work on expressions of witnesses. We call an expression a linear combination of witnesses and/or products of two witnesses (and also a constant term). A single witness is a (simple) expression, and conversely, an expression can be turned into a single witness using an assert-zero opcode (see below). So basically, using witnesses or expressions is equivalent, but the latter can avoid the creation of witness in some cases. -### Arithmetic opcode -An arithmetic opcode adds the constraint that P(w) = 0, where w=(w_1,..w_n) is a tuple of n witnesses, and P is a multi-variate polynomial of total degree at most 2. +### AssertZero opcode +An AssertZero opcode adds the constraint that P(w) = 0, where w=(w_1,..w_n) is a tuple of n witnesses, and P is a multi-variate polynomial of total degree at most 2. The coefficients ${q_M}_{i,j}, q_i,q_c$ of the polynomial are known values which define the opcode. -A general expression of arithmetic opcode is the following: $\sum_{i,j} {q_M}_{i,j}w_iw_j + \sum_i q_iw_i +q_c = 0$ +A general expression of assert-zero opcode is the following: $\sum_{i,j} {q_M}_{i,j}w_iw_j + \sum_i q_iw_i +q_c = 0$ -An arithmetic opcode can be used to: +An assert-zero opcode can be used to: - **express a constraint** on witnesses; for instance to express that a witness $w$ is a boolean, you can add the opcode: $w*w-w=0$ - or, to **compute the value** of an arithmetic operation of some inputs. For instance, to multiply two witnesses $x$ and $y$, you would use the opcode $z-x*y=0$, which would constraint $z$ to be $x*y$. @@ -70,8 +70,8 @@ An arithmetic opcode can be used to: The solver expects that at most one witness is not known when executing the opcode. ### BlackBoxFuncCall opcode -These opcodes represent a specific computation. Even if any computation can be done using only arithmetic opcodes, it is not always efficient. Some proving systems, and in particular the proving system from Aztec, can implement several computations more efficiently using for instance look-up tables. The BlackBoxFuncCall opcode is used to ask the proving system to handle the computation by itself. -All black box functions takes as input a tuple (witness, num_bits), where num_bits is a constant representing the bit size of the input witness, and they have one or several witnesses as output. +These opcodes represent a specific computation. Even if any computation can be done using only assert-zero opcodes, it is not always efficient. Some proving systems, and in particular the proving system from Aztec, can implement several computations more efficiently using for instance look-up tables. The BlackBoxFuncCall opcode is used to ask the proving system to handle the computation by itself. +All black box functions take as input a tuple (witness, num_bits), where num_bits is a constant representing the bit size of the input witness, and they have one or several witnesses as output. Some more advanced computations assume that the proving system has an 'embedded curve'. It is a curve that cycle with the main curve of the proving system, i.e the scalar field of the embedded curve is the base field of the main one, and vice-versa. The curves used by the proving system are dependent on the proving system (and/or its configuration). Aztec's Barretenberg uses BN254 as the main curve and Grumpkin as the embedded curve. The black box functions supported by ACIR are: @@ -91,11 +91,11 @@ input: (witness, bit_size) **SHA256**: computes sha256 of the inputs - inputs are a byte array, i.e a vector of (FieldElement, 8) -- output is a byte array of len 32, i.e a vector of 32 (FieldElement, 8), constrainted to be the sha256 of the inputs. +- output is a byte array of len 32, i.e a vector of 32 (FieldElement, 8), constrained to be the sha256 of the inputs. **Blake2s**: computes the Blake2s hash of the inputs, as specified in https://tools.ietf.org/html/rfc7693 - inputs are a byte array, i.e a vector of (FieldElement, 8) -- output is a byte array of length 32, i.e a vector of 32 (FieldElement, 8), constrainted to be the blake2s of the inputs. +- output is a byte array of length 32, i.e a vector of 32 (FieldElement, 8), constrained to be the blake2s of the inputs. **SchnorrVerify**: Verify a Schnorr signature over the embedded curve @@ -114,7 +114,7 @@ The proving system decides how the message is to be hashed. Barretenberg uses Bl - output: 2 witnesses representing the x,y coordinates of the resulting Grumpkin point - domain separator: a constant public value (a field element) that you can use so that the commitment also depends on the domain separator. Noir uses 0 as domain separator. -The backend should handle proper conversion between the inputs being ACIR field elements and the scalar field of the embedded curve. In the case of Aztec's Barretenberg, the latter is bigger than the ACIR field so it is straightforward. The Peredersen generators are managed by the proving system. +The backend should handle proper conversion between the inputs being ACIR field elements and the scalar field of the embedded curve. In the case of Aztec's Barretenberg, the latter is bigger than the ACIR field so it is straightforward. The Pedersen generators are managed by the proving system. **PedersenHash**: Computes a Pedersen commitments of the inputs and their number, using generators of the embedded curve @@ -163,7 +163,7 @@ $a=low+high*2^{128},$ with $low, high < 2^{128}$ - verification_key: Vector of (FieldElement, 254) representing the verification key of the circuit being verified - public_inputs: Vector of (FieldElement, 254) representing the public inputs corresponding to the proof being verified - key_hash: one (FieldElement, 254). It should be the hash of the verification key. Barretenberg expects the Pedersen hash of the verification key -- input_aggregation_object: an optional vector of (FieldElement, 254). It is a blob of data specific to the proving sytem. +- input_aggregation_object: an optional vector of (FieldElement, 254). It is a blob of data specific to the proving system. - output_aggregation_object: Some witnesses returned by the function, representing some data internal to the proving system. This black box function does not fully verify a proof, what it does is verifying that the key_hash is indeed a hash of verification_key, allowing the user to use the verification key as private inputs and only have the key_hash as public input, which is more performant. @@ -179,8 +179,8 @@ This opcode is used as a hint for the solver when executing (solving) the circui - bytecode: assembly code representing the computation to perform within this opcode. The noir assembly specification is not part of this document. - predicate: an arithmetic expression that disable the opcode when it is null. -Let's see an example with euclidian division. -The normal way to compute a/b, where a and b are 8-bits integers, is to implement Euclid algorithm which computes in a loop (or recursively) modulos of the kind 'a mod b'. Doing this computation requires a lot of steps to be properly implemented in ACIR, especially the loop with a condition. However, euclidian division can be easily constrained with one arithmetic opcode: a = bq+r, assuming q is 8 bits and r bincodeSerialize() const; - static Arithmetic bincodeDeserialize(std::vector); + static AssertZero bincodeDeserialize(std::vector); }; struct BlackBoxFuncCall { @@ -818,7 +818,7 @@ namespace Circuit { static MemoryInit bincodeDeserialize(std::vector); }; - std::variant value; + std::variant value; friend bool operator==(const Opcode&, const Opcode&); std::vector bincodeSerialize() const; @@ -4268,20 +4268,20 @@ Circuit::Opcode serde::Deserializable::deserialize(Deserializer namespace Circuit { - inline bool operator==(const Opcode::Arithmetic &lhs, const Opcode::Arithmetic &rhs) { + inline bool operator==(const Opcode::AssertZero &lhs, const Opcode::AssertZero &rhs) { if (!(lhs.value == rhs.value)) { return false; } return true; } - inline std::vector Opcode::Arithmetic::bincodeSerialize() const { + inline std::vector Opcode::AssertZero::bincodeSerialize() const { auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); + serde::Serializable::serialize(*this, serializer); return std::move(serializer).bytes(); } - inline Opcode::Arithmetic Opcode::Arithmetic::bincodeDeserialize(std::vector input) { + inline Opcode::AssertZero Opcode::AssertZero::bincodeDeserialize(std::vector input) { auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); + auto value = serde::Deserializable::deserialize(deserializer); if (deserializer.get_buffer_offset() < input.size()) { throw serde::deserialization_error("Some input bytes were not read"); } @@ -4292,14 +4292,14 @@ namespace Circuit { template <> template -void serde::Serializable::serialize(const Circuit::Opcode::Arithmetic &obj, Serializer &serializer) { +void serde::Serializable::serialize(const Circuit::Opcode::AssertZero &obj, Serializer &serializer) { serde::Serializable::serialize(obj.value, serializer); } template <> template -Circuit::Opcode::Arithmetic serde::Deserializable::deserialize(Deserializer &deserializer) { - Circuit::Opcode::Arithmetic obj; +Circuit::Opcode::AssertZero serde::Deserializable::deserialize(Deserializer &deserializer) { + Circuit::Opcode::AssertZero obj; obj.value = serde::Deserializable::deserialize(deserializer); return obj; } diff --git a/noir/acvm-repo/acir/src/circuit/mod.rs b/noir/acvm-repo/acir/src/circuit/mod.rs index e42bebc52ac..48c6b14ce69 100644 --- a/noir/acvm-repo/acir/src/circuit/mod.rs +++ b/noir/acvm-repo/acir/src/circuit/mod.rs @@ -335,7 +335,7 @@ mod tests { let circuit = Circuit { current_witness_index: 0, opcodes: vec![ - Opcode::Arithmetic(crate::native_types::Expression { + Opcode::AssertZero(crate::native_types::Expression { mul_terms: vec![], linear_combinations: vec![], q_c: FieldElement::from(8u128), diff --git a/noir/acvm-repo/acir/src/circuit/opcodes.rs b/noir/acvm-repo/acir/src/circuit/opcodes.rs index 0e15fe3757c..ac5ea0b8a69 100644 --- a/noir/acvm-repo/acir/src/circuit/opcodes.rs +++ b/noir/acvm-repo/acir/src/circuit/opcodes.rs @@ -13,7 +13,7 @@ pub use memory_operation::{BlockId, MemOp}; #[derive(Clone, PartialEq, Eq, Serialize, Deserialize)] pub enum Opcode { - Arithmetic(Expression), + AssertZero(Expression), /// Calls to "gadgets" which rely on backends implementing support for specialized constraints. /// /// Often used for exposing more efficient implementations of SNARK-unfriendly computations. @@ -36,7 +36,7 @@ pub enum Opcode { impl std::fmt::Display for Opcode { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - Opcode::Arithmetic(expr) => { + Opcode::AssertZero(expr) => { write!(f, "EXPR [ ")?; for i in &expr.mul_terms { write!(f, "({}, _{}, _{}) ", i.0, i.1.witness_index(), i.2.witness_index())?; diff --git a/noir/acvm-repo/acir/src/native_types/expression/mod.rs b/noir/acvm-repo/acir/src/native_types/expression/mod.rs index fe729720663..402aa3eb3a6 100644 --- a/noir/acvm-repo/acir/src/native_types/expression/mod.rs +++ b/noir/acvm-repo/acir/src/native_types/expression/mod.rs @@ -8,7 +8,7 @@ mod ordering; // In the addition polynomial // We can have arbitrary fan-in/out, so we need more than wL,wR and wO -// When looking at the arithmetic opcode for the quotient polynomial in standard plonk +// When looking at the assert-zero opcode for the quotient polynomial in standard plonk // You can think of it as fan-in 2 and fan out-1 , or you can think of it as fan-in 1 and fan-out 2 // // In the multiplication polynomial @@ -16,7 +16,7 @@ mod ordering; #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)] pub struct Expression { // To avoid having to create intermediate variables pre-optimization - // We collect all of the multiplication terms in the arithmetic opcode + // We collect all of the multiplication terms in the assert-zero opcode // A multiplication term if of the form q_M * wL * wR // Hence this vector represents the following sum: q_M1 * wL1 * wR1 + q_M2 * wL2 * wR2 + .. + pub mul_terms: Vec<(FieldElement, Witness, Witness)>, @@ -42,7 +42,7 @@ impl std::fmt::Display for Expression { if let Some(witness) = self.to_witness() { write!(f, "x{}", witness.witness_index()) } else { - write!(f, "%{:?}%", crate::circuit::opcodes::Opcode::Arithmetic(self.clone())) + write!(f, "%{:?}%", crate::circuit::opcodes::Opcode::AssertZero(self.clone())) } } } @@ -178,7 +178,13 @@ impl Expression { self.linear_combinations.sort_by(|a, b| a.1.cmp(&b.1)); } - /// Checks if this polynomial can fit into one arithmetic identity + /// Checks if this expression can fit into one arithmetic identity + /// TODO: This needs to be reworded, arithmetic identity only makes sense in the context + /// TODO of PLONK, whereas we want expressions to be generic. + /// TODO: We just need to reword it to say exactly what its doing and + /// TODO then reference the fact that this is what plonk will accept. + /// TODO alternatively, we can define arithmetic identity in the context of expressions + /// TODO and then reference that. pub fn fits_in_one_identity(&self, width: usize) -> bool { // A Polynomial with more than one mul term cannot fit into one opcode if self.mul_terms.len() > 1 { diff --git a/noir/acvm-repo/acir/tests/test_program_serialization.rs b/noir/acvm-repo/acir/tests/test_program_serialization.rs index ff69ba34437..1f25b665573 100644 --- a/noir/acvm-repo/acir/tests/test_program_serialization.rs +++ b/noir/acvm-repo/acir/tests/test_program_serialization.rs @@ -24,7 +24,7 @@ use brillig::{HeapArray, RegisterIndex, RegisterOrMemory}; #[test] fn addition_circuit() { - let addition = Opcode::Arithmetic(Expression { + let addition = Opcode::AssertZero(Expression { mul_terms: Vec::new(), linear_combinations: vec![ (FieldElement::one(), Witness(1)), diff --git a/noir/acvm-repo/acir_field/src/generic_ark.rs b/noir/acvm-repo/acir_field/src/generic_ark.rs index 5c70d3cda37..542e291982b 100644 --- a/noir/acvm-repo/acir_field/src/generic_ark.rs +++ b/noir/acvm-repo/acir_field/src/generic_ark.rs @@ -2,6 +2,7 @@ use ark_ff::PrimeField; use ark_ff::Zero; use num_bigint::BigUint; use serde::{Deserialize, Serialize}; +use std::borrow::Cow; // XXX: Switch out for a trait and proper implementations // This implementation is in-efficient, can definitely remove hex usage and Iterator instances for trivial functionality @@ -125,8 +126,8 @@ impl<'de, T: ark_ff::PrimeField> Deserialize<'de> for FieldElement { where D: serde::Deserializer<'de>, { - let s = <&str>::deserialize(deserializer)?; - match Self::from_hex(s) { + let s: Cow<'de, str> = Deserialize::deserialize(deserializer)?; + match Self::from_hex(&s) { Some(value) => Ok(value), None => Err(serde::de::Error::custom(format!("Invalid hex for FieldElement: {s}",))), } diff --git a/noir/acvm-repo/acvm/CHANGELOG.md b/noir/acvm-repo/acvm/CHANGELOG.md index 29a4aa93adc..2a340f649f5 100644 --- a/noir/acvm-repo/acvm/CHANGELOG.md +++ b/noir/acvm-repo/acvm/CHANGELOG.md @@ -537,7 +537,7 @@ * require `Backend` to implement `Default` trait ([#223](https://github.com/noir-lang/acvm/issues/223)) * Make GeneralOptimizer crate visible ([#220](https://github.com/noir-lang/acvm/issues/220)) * return `PartialWitnessGeneratorStatus` from `PartialWitnessGenerator.solve` ([#213](https://github.com/noir-lang/acvm/issues/213)) -* organise operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) +* organize operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) ### Features @@ -557,7 +557,7 @@ ### Miscellaneous Chores * Make GeneralOptimizer crate visible ([#220](https://github.com/noir-lang/acvm/issues/220)) ([64bb346](https://github.com/noir-lang/acvm/commit/64bb346524428a0ce196826ea1e5ccde08ad6201)) -* organise operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) ([a619df6](https://github.com/noir-lang/acvm/commit/a619df614bbb9b2518b788b42a7553b069823a0f)) +* organize operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) ([a619df6](https://github.com/noir-lang/acvm/commit/a619df614bbb9b2518b788b42a7553b069823a0f)) ## [0.9.0](https://github.com/noir-lang/acvm/compare/acvm-v0.8.1...acvm-v0.9.0) (2023-04-07) @@ -641,7 +641,7 @@ * update `ProofSystemCompiler` to not take ownership of keys ([#111](https://github.com/noir-lang/acvm/issues/111)) * update `ProofSystemCompiler` methods to take `&Circuit` ([#108](https://github.com/noir-lang/acvm/issues/108)) * refactor ToRadix to ToRadixLe and ToRadixBe ([#58](https://github.com/noir-lang/acvm/issues/58)) -* reorganise compiler in terms of optimisers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) +* reorganize compiler in terms of optimisers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) ### Features @@ -659,4 +659,4 @@ ### Miscellaneous Chores * refactor ToRadix to ToRadixLe and ToRadixBe ([#58](https://github.com/noir-lang/acvm/issues/58)) ([2427a27](https://github.com/noir-lang/acvm/commit/2427a275048e598c6d651cce8348a4c55148f235)) -* reorganise compiler in terms of optimisers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) ([9329307](https://github.com/noir-lang/acvm/commit/9329307e054de202cfc55207162ad952b70d515e)) +* reorganize compiler in terms of optimisers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) ([9329307](https://github.com/noir-lang/acvm/commit/9329307e054de202cfc55207162ad952b70d515e)) diff --git a/noir/acvm-repo/acvm/Cargo.toml b/noir/acvm-repo/acvm/Cargo.toml index 5fdf44cbd5e..f5819d6fa34 100644 --- a/noir/acvm-repo/acvm/Cargo.toml +++ b/noir/acvm-repo/acvm/Cargo.toml @@ -16,7 +16,7 @@ repository.workspace = true num-bigint.workspace = true num-traits.workspace = true thiserror.workspace = true -log.workspace = true +tracing.workspace = true acir.workspace = true brillig_vm.workspace = true diff --git a/noir/acvm-repo/acvm/src/compiler/optimizers/mod.rs b/noir/acvm-repo/acvm/src/compiler/optimizers/mod.rs index 85a97c2c7dc..923756580b3 100644 --- a/noir/acvm-repo/acvm/src/compiler/optimizers/mod.rs +++ b/noir/acvm-repo/acvm/src/compiler/optimizers/mod.rs @@ -6,6 +6,7 @@ mod unused_memory; pub(crate) use general::GeneralOptimizer; pub(crate) use redundant_range::RangeOptimizer; +use tracing::info; use self::unused_memory::UnusedMemoryOptimizer; @@ -23,16 +24,17 @@ pub fn optimize(acir: Circuit) -> (Circuit, AcirTransformationMap) { } /// Applies [`ProofSystemCompiler`][crate::ProofSystemCompiler] independent optimizations to a [`Circuit`]. +#[tracing::instrument(level = "trace", name = "optimize_acir" skip(acir))] pub(super) fn optimize_internal(acir: Circuit) -> (Circuit, Vec) { - log::trace!("Start circuit optimization"); + info!("Number of opcodes before: {}", acir.opcodes.len()); // General optimizer pass let opcodes: Vec = acir .opcodes .into_iter() .map(|opcode| { - if let Opcode::Arithmetic(arith_expr) = opcode { - Opcode::Arithmetic(GeneralOptimizer::optimize(arith_expr)) + if let Opcode::AssertZero(arith_expr) = opcode { + Opcode::AssertZero(GeneralOptimizer::optimize(arith_expr)) } else { opcode } @@ -54,7 +56,7 @@ pub(super) fn optimize_internal(acir: Circuit) -> (Circuit, Vec) { let (acir, acir_opcode_positions) = range_optimizer.replace_redundant_ranges(acir_opcode_positions); - log::trace!("Finish circuit optimization"); + info!("Number of opcodes after: {}", acir.opcodes.len()); (acir, acir_opcode_positions) } diff --git a/noir/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs b/noir/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs index 766d3674113..5d19f9629ba 100644 --- a/noir/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs +++ b/noir/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs @@ -48,25 +48,51 @@ impl RangeOptimizer { /// only store the fact that we have constrained it to /// be 16 bits. fn collect_ranges(circuit: &Circuit) -> BTreeMap { - let mut witness_to_bit_sizes = BTreeMap::new(); + let mut witness_to_bit_sizes: BTreeMap = BTreeMap::new(); for opcode in &circuit.opcodes { - // Extract the witness index and number of bits, - // if it is a range constraint - let (witness, num_bits) = match extract_range_opcode(opcode) { - Some(func_inputs) => func_inputs, - None => continue, + let Some((witness, num_bits)) = (match opcode { + Opcode::AssertZero(expr) => { + // If the opcode is constraining a witness to be equal to a value then it can be considered + // as a range opcode for the number of bits required to hold that value. + if expr.is_degree_one_univariate() { + let (k, witness) = expr.linear_combinations[0]; + let constant = expr.q_c; + let witness_value = -constant / k; + + if witness_value.is_zero() { + Some((witness, 0)) + } else { + // We subtract off 1 bit from the implied witness value to give the weakest range constraint + // which would be stricter than the constraint imposed by this opcode. + let implied_range_constraint_bits = witness_value.num_bits() - 1; + Some((witness, implied_range_constraint_bits)) + } + } else { + None + } + } + + + Opcode::BlackBoxFuncCall(BlackBoxFuncCall::RANGE { + input: FunctionInput { witness, num_bits }, + }) => { + Some((*witness, *num_bits)) + } + + _ => None, + }) else { + continue; }; // Check if the witness has already been recorded and if the witness // size is more than the current one, we replace it - let should_replace = match witness_to_bit_sizes.get(&witness).copied() { - Some(old_range_bits) => old_range_bits > num_bits, - None => true, - }; - if should_replace { - witness_to_bit_sizes.insert(witness, num_bits); - } + witness_to_bit_sizes + .entry(witness) + .and_modify(|old_range_bits| { + *old_range_bits = std::cmp::min(*old_range_bits, num_bits); + }) + .or_insert(num_bits); } witness_to_bit_sizes } @@ -116,23 +142,17 @@ impl RangeOptimizer { /// Extract the range opcode from the `Opcode` enum /// Returns None, if `Opcode` is not the range opcode. fn extract_range_opcode(opcode: &Opcode) -> Option<(Witness, u32)> { - // Range constraints are blackbox function calls - // so we first extract the function call - let func_call = match opcode { - acir::circuit::Opcode::BlackBoxFuncCall(func_call) => func_call, - _ => return None, - }; - - // Skip if it is not a range constraint - match func_call { - BlackBoxFuncCall::RANGE { input } => Some((input.witness, input.num_bits)), + match opcode { + Opcode::BlackBoxFuncCall(BlackBoxFuncCall::RANGE { input }) => { + Some((input.witness, input.num_bits)) + } _ => None, } } fn optimized_range_opcode(witness: Witness, num_bits: u32) -> Opcode { if num_bits == 1 { - Opcode::Arithmetic(Expression { + Opcode::AssertZero(Expression { mul_terms: vec![(FieldElement::one(), witness, witness)], linear_combinations: vec![(-FieldElement::one(), witness)], q_c: FieldElement::zero(), @@ -234,16 +254,29 @@ mod tests { #[test] fn non_range_opcodes() { // The optimizer should not remove or change non-range opcodes - // The four Arithmetic opcodes should remain unchanged. + // The four AssertZero opcodes should remain unchanged. let mut circuit = test_circuit(vec![(Witness(1), 16), (Witness(1), 16)]); - circuit.opcodes.push(Opcode::Arithmetic(Expression::default())); - circuit.opcodes.push(Opcode::Arithmetic(Expression::default())); - circuit.opcodes.push(Opcode::Arithmetic(Expression::default())); - circuit.opcodes.push(Opcode::Arithmetic(Expression::default())); + circuit.opcodes.push(Opcode::AssertZero(Expression::default())); + circuit.opcodes.push(Opcode::AssertZero(Expression::default())); + circuit.opcodes.push(Opcode::AssertZero(Expression::default())); + circuit.opcodes.push(Opcode::AssertZero(Expression::default())); let acir_opcode_positions = circuit.opcodes.iter().enumerate().map(|(i, _)| i).collect(); let optimizer = RangeOptimizer::new(circuit); let (optimized_circuit, _) = optimizer.replace_redundant_ranges(acir_opcode_positions); assert_eq!(optimized_circuit.opcodes.len(), 5); } + + #[test] + fn constant_implied_ranges() { + // The optimizer should use knowledge about constant witness assignments to remove range opcodes. + let mut circuit = test_circuit(vec![(Witness(1), 16)]); + + circuit.opcodes.push(Opcode::AssertZero(Witness(1).into())); + let acir_opcode_positions = circuit.opcodes.iter().enumerate().map(|(i, _)| i).collect(); + let optimizer = RangeOptimizer::new(circuit); + let (optimized_circuit, _) = optimizer.replace_redundant_ranges(acir_opcode_positions); + assert_eq!(optimized_circuit.opcodes.len(), 1); + assert_eq!(optimized_circuit.opcodes[0], Opcode::AssertZero(Witness(1).into())); + } } diff --git a/noir/acvm-repo/acvm/src/compiler/transformers/csat.rs b/noir/acvm-repo/acvm/src/compiler/transformers/csat.rs index 0d1ab87aae5..9e2e3091c74 100644 --- a/noir/acvm-repo/acvm/src/compiler/transformers/csat.rs +++ b/noir/acvm-repo/acvm/src/compiler/transformers/csat.rs @@ -62,7 +62,7 @@ impl CSatTransformer { } // Still missing dead witness optimization. - // To do this, we will need the whole set of arithmetic opcodes + // To do this, we will need the whole set of assert-zero opcodes // I think it can also be done before the local optimization seen here, as dead variables will come from the user pub(crate) fn transform( &mut self, @@ -84,7 +84,7 @@ impl CSatTransformer { opcode } - // This optimization will search for combinations of terms which can be represented in a single arithmetic opcode + // This optimization will search for combinations of terms which can be represented in a single assert-zero opcode // Case 1 : qM * wL * wR + qL * wL + qR * wR + qO * wO + qC // This polynomial does not require any further optimizations, it can be safely represented in one opcode // ie a polynomial with 1 mul(bi-variate) term and 3 (univariate) terms where 2 of those terms match the bivariate term @@ -93,13 +93,13 @@ impl CSatTransformer { // // // Case 2: qM * wL * wR + qL * wL + qR * wR + qO * wO + qC + qM2 * wL2 * wR2 + qL * wL2 + qR * wR2 + qO * wO2 + qC2 - // This polynomial cannot be represented using one arithmetic opcode. + // This polynomial cannot be represented using one assert-zero opcode. // // This algorithm will first extract the first full opcode(if possible): // t = qM * wL * wR + qL * wL + qR * wR + qO * wO + qC // // The polynomial now looks like so t + qM2 * wL2 * wR2 + qL * wL2 + qR * wR2 + qO * wO2 + qC2 - // This polynomial cannot be represented using one arithmetic opcode. + // This polynomial cannot be represented using one assert-zero opcode. // // This algorithm will then extract the second full opcode(if possible): // t2 = qM2 * wL2 * wR2 + qL * wL2 + qR * wR2 + qO * wO2 + qC2 @@ -121,7 +121,7 @@ impl CSatTransformer { // If the opcode only has one mul term, then this algorithm cannot optimize it any further // Either it can be represented in a single arithmetic equation or it's fan-in is too large and we need intermediate variables for those // large-fan-in optimization is not this algorithms purpose. - // If the opcode has 0 mul terms, then it is an add opcode and similarly it can either fit into a single arithmetic opcode or it has a large fan-in + // If the opcode has 0 mul terms, then it is an add opcode and similarly it can either fit into a single assert-zero opcode or it has a large fan-in if opcode.mul_terms.len() <= 1 { return opcode; } @@ -194,7 +194,7 @@ impl CSatTransformer { } } - // Now we have used up 2 spaces in our arithmetic opcode. The width now dictates, how many more we can add + // Now we have used up 2 spaces in our assert-zero opcode. The width now dictates, how many more we can add let mut remaining_space = self.width - 2 - 1; // We minus 1 because we need an extra space to contain the intermediate variable // Keep adding terms until we have no more left, or we reach the width let mut remaining_linear_terms = @@ -325,7 +325,7 @@ impl CSatTransformer { // Then use intermediate variables again to squash the fan-in, so that it can fit into the appropriate width // First check if this polynomial actually needs a partial opcode optimization - // There is the chance that it fits perfectly within the arithmetic opcode + // There is the chance that it fits perfectly within the assert-zero opcode if opcode.fits_in_one_identity(self.width) { return opcode; } diff --git a/noir/acvm-repo/acvm/src/compiler/transformers/mod.rs b/noir/acvm-repo/acvm/src/compiler/transformers/mod.rs index 003124f8b63..7f4e6540e1a 100644 --- a/noir/acvm-repo/acvm/src/compiler/transformers/mod.rs +++ b/noir/acvm-repo/acvm/src/compiler/transformers/mod.rs @@ -37,13 +37,12 @@ pub fn transform( /// Applies [`ProofSystemCompiler`][crate::ProofSystemCompiler] specific optimizations to a [`Circuit`]. /// /// Accepts an injected `acir_opcode_positions` to allow transformations to be applied directly after optimizations. +#[tracing::instrument(level = "trace", name = "transform_acir", skip(acir, acir_opcode_positions))] pub(super) fn transform_internal( acir: Circuit, expression_width: ExpressionWidth, acir_opcode_positions: Vec, ) -> (Circuit, Vec) { - log::trace!("Start circuit transformation"); - let mut transformer = match &expression_width { crate::ExpressionWidth::Unbounded => { let transformer = R1CSTransformer::new(acir); @@ -63,7 +62,7 @@ pub(super) fn transform_internal( // TODO or at the very least, we could put all of it inside of CSatOptimizer pass let mut new_acir_opcode_positions: Vec = Vec::with_capacity(acir_opcode_positions.len()); - // Optimize the arithmetic gates by reducing them into the correct width and + // Optimize the assert-zero gates by reducing them into the correct width and // creating intermediate variables when necessary let mut transformed_opcodes = Vec::new(); @@ -73,7 +72,7 @@ pub(super) fn transform_internal( let mut intermediate_variables: IndexMap = IndexMap::new(); for (index, opcode) in acir.opcodes.into_iter().enumerate() { match opcode { - Opcode::Arithmetic(arith_expr) => { + Opcode::AssertZero(arith_expr) => { let len = intermediate_variables.len(); let arith_expr = transformer.transform( @@ -96,7 +95,7 @@ pub(super) fn transform_internal( new_opcodes.push(arith_expr); for opcode in new_opcodes { new_acir_opcode_positions.push(acir_opcode_positions[index]); - transformed_opcodes.push(Opcode::Arithmetic(opcode)); + transformed_opcodes.push(Opcode::AssertZero(opcode)); } } Opcode::BlackBoxFuncCall(ref func) => { @@ -207,7 +206,5 @@ pub(super) fn transform_internal( ..acir }; - log::trace!("Finish circuit transformation"); - (acir, new_acir_opcode_positions) } diff --git a/noir/acvm-repo/acvm/src/pwg/arithmetic.rs b/noir/acvm-repo/acvm/src/pwg/arithmetic.rs index 93a39fb249c..81462ea495e 100644 --- a/noir/acvm-repo/acvm/src/pwg/arithmetic.rs +++ b/noir/acvm-repo/acvm/src/pwg/arithmetic.rs @@ -5,9 +5,9 @@ use acir::{ use super::{insert_value, ErrorLocation, OpcodeNotSolvable, OpcodeResolutionError}; -/// An Arithmetic solver will take a Circuit's arithmetic opcodes with witness assignments +/// An Expression solver will take a Circuit's assert-zero opcodes with witness assignments /// and create the other witness variables -pub(super) struct ArithmeticSolver; +pub(super) struct ExpressionSolver; #[allow(clippy::enum_variant_names)] pub(super) enum OpcodeStatus { @@ -22,17 +22,17 @@ pub(crate) enum MulTerm { Solved(FieldElement), } -impl ArithmeticSolver { +impl ExpressionSolver { /// Derives the rest of the witness based on the initial low level variables pub(super) fn solve( initial_witness: &mut WitnessMap, opcode: &Expression, ) -> Result<(), OpcodeResolutionError> { - let opcode = &ArithmeticSolver::evaluate(opcode, initial_witness); + let opcode = &ExpressionSolver::evaluate(opcode, initial_witness); // Evaluate multiplication term - let mul_result = ArithmeticSolver::solve_mul_term(opcode, initial_witness); + let mul_result = ExpressionSolver::solve_mul_term(opcode, initial_witness); // Evaluate the fan-in terms - let opcode_status = ArithmeticSolver::solve_fan_in_term(opcode, initial_witness); + let opcode_status = ExpressionSolver::solve_fan_in_term(opcode, initial_witness); match (mul_result, opcode_status) { (MulTerm::TooManyUnknowns, _) | (_, OpcodeStatus::OpcodeUnsolvable) => { @@ -126,7 +126,7 @@ impl ArithmeticSolver { } } - /// Returns the evaluation of the multiplication term in the arithmetic opcode + /// Returns the evaluation of the multiplication term in the expression /// If the witness values are not known, then the function returns a None /// XXX: Do we need to account for the case where 5xy + 6x = 0 ? We do not know y, but it can be solved given x . But I believe x can be solved with another opcode /// XXX: What about making a mul opcode = a constant 5xy + 7 = 0 ? This is the same as the above. @@ -135,11 +135,11 @@ impl ArithmeticSolver { // We are assuming it has been optimized. match arith_opcode.mul_terms.len() { 0 => MulTerm::Solved(FieldElement::zero()), - 1 => ArithmeticSolver::solve_mul_term_helper( + 1 => ExpressionSolver::solve_mul_term_helper( &arith_opcode.mul_terms[0], witness_assignments, ), - _ => panic!("Mul term in the arithmetic opcode must contain either zero or one term"), + _ => panic!("Mul term in the assert-zero opcode must contain either zero or one term"), } } @@ -186,7 +186,7 @@ impl ArithmeticSolver { let mut result = FieldElement::zero(); for term in arith_opcode.linear_combinations.iter() { - let value = ArithmeticSolver::solve_fan_in_term_helper(term, witness_assignments); + let value = ExpressionSolver::solve_fan_in_term_helper(term, witness_assignments); match value { Some(a) => result += a, None => { @@ -212,7 +212,7 @@ impl ArithmeticSolver { pub(super) fn evaluate(expr: &Expression, initial_witness: &WitnessMap) -> Expression { let mut result = Expression::default(); for &(c, w1, w2) in &expr.mul_terms { - let mul_result = ArithmeticSolver::solve_mul_term_helper(&(c, w1, w2), initial_witness); + let mul_result = ExpressionSolver::solve_mul_term_helper(&(c, w1, w2), initial_witness); match mul_result { MulTerm::OneUnknown(v, w) => { if !v.is_zero() { @@ -228,7 +228,7 @@ impl ArithmeticSolver { } } for &(c, w) in &expr.linear_combinations { - if let Some(f) = ArithmeticSolver::solve_fan_in_term_helper(&(c, w), initial_witness) { + if let Some(f) = ExpressionSolver::solve_fan_in_term_helper(&(c, w), initial_witness) { result.q_c += f; } else if !c.is_zero() { result.linear_combinations.push((c, w)); @@ -240,7 +240,7 @@ impl ArithmeticSolver { } #[test] -fn arithmetic_smoke_test() { +fn expression_solver_smoke_test() { let a = Witness(0); let b = Witness(1); let c = Witness(2); @@ -274,8 +274,8 @@ fn arithmetic_smoke_test() { values.insert(c, FieldElement::from(1_i128)); values.insert(d, FieldElement::from(1_i128)); - assert_eq!(ArithmeticSolver::solve(&mut values, &opcode_a), Ok(())); - assert_eq!(ArithmeticSolver::solve(&mut values, &opcode_b), Ok(())); + assert_eq!(ExpressionSolver::solve(&mut values, &opcode_a), Ok(())); + assert_eq!(ExpressionSolver::solve(&mut values, &opcode_b), Ok(())); assert_eq!(values.get(&a).unwrap(), &FieldElement::from(4_i128)); } diff --git a/noir/acvm-repo/acvm/src/pwg/memory_op.rs b/noir/acvm-repo/acvm/src/pwg/memory_op.rs index 42951dfa3c1..c1da2cd95cf 100644 --- a/noir/acvm-repo/acvm/src/pwg/memory_op.rs +++ b/noir/acvm-repo/acvm/src/pwg/memory_op.rs @@ -6,7 +6,7 @@ use acir::{ FieldElement, }; -use super::{arithmetic::ArithmeticSolver, get_value, insert_value, witness_to_value}; +use super::{arithmetic::ExpressionSolver, get_value, insert_value, witness_to_value}; use super::{ErrorLocation, OpcodeResolutionError}; type MemoryIndex = u32; @@ -75,7 +75,7 @@ impl MemoryOpSolver { // // In read operations, this corresponds to the witness index at which the value from memory will be written. // In write operations, this corresponds to the expression which will be written to memory. - let value = ArithmeticSolver::evaluate(&op.value, initial_witness); + let value = ExpressionSolver::evaluate(&op.value, initial_witness); // `operation == 0` implies a read operation. (`operation == 1` implies write operation). let is_read_operation = operation.is_zero(); diff --git a/noir/acvm-repo/acvm/src/pwg/mod.rs b/noir/acvm-repo/acvm/src/pwg/mod.rs index 859ad010dcd..41b96572658 100644 --- a/noir/acvm-repo/acvm/src/pwg/mod.rs +++ b/noir/acvm-repo/acvm/src/pwg/mod.rs @@ -10,7 +10,7 @@ use acir::{ }; use acvm_blackbox_solver::BlackBoxResolutionError; -use self::{arithmetic::ArithmeticSolver, directives::solve_directives, memory_op::MemoryOpSolver}; +use self::{arithmetic::ExpressionSolver, directives::solve_directives, memory_op::MemoryOpSolver}; use crate::BlackBoxFunctionSolver; use thiserror::Error; @@ -69,8 +69,8 @@ pub enum StepResult<'a, B: BlackBoxFunctionSolver> { // The most common being that one of its input has not been // assigned a value. // -// TODO: ExpressionHasTooManyUnknowns is specific for arithmetic expressions -// TODO: we could have a error enum for arithmetic failure cases in that module +// TODO: ExpressionHasTooManyUnknowns is specific for expression solver +// TODO: we could have a error enum for expression solver failure cases in that module // TODO that can be converted into an OpcodeNotSolvable or OpcodeResolutionError enum #[derive(Clone, PartialEq, Eq, Debug, Error)] pub enum OpcodeNotSolvable { @@ -253,7 +253,7 @@ impl<'a, B: BlackBoxFunctionSolver> ACVM<'a, B> { let opcode = &self.opcodes[self.instruction_pointer]; let resolution = match opcode { - Opcode::Arithmetic(expr) => ArithmeticSolver::solve(&mut self.witness_map, expr), + Opcode::AssertZero(expr) => ExpressionSolver::solve(&mut self.witness_map, expr), Opcode::BlackBoxFuncCall(bb_func) => { blackbox::solve(self.backend, &mut self.witness_map, bb_func) } @@ -397,7 +397,7 @@ pub fn get_value( expr: &Expression, initial_witness: &WitnessMap, ) -> Result { - let expr = ArithmeticSolver::evaluate(expr, initial_witness); + let expr = ExpressionSolver::evaluate(expr, initial_witness); match expr.to_const() { Some(value) => Ok(value), None => Err(OpcodeResolutionError::OpcodeNotSolvable( diff --git a/noir/acvm-repo/acvm/tests/solver.rs b/noir/acvm-repo/acvm/tests/solver.rs index d578555c5dc..b4011a994a5 100644 --- a/noir/acvm-repo/acvm/tests/solver.rs +++ b/noir/acvm-repo/acvm/tests/solver.rs @@ -111,18 +111,18 @@ fn inversion_brillig_oracle_equivalence() { let opcodes = vec![ Opcode::Brillig(brillig_data), - Opcode::Arithmetic(Expression { + Opcode::AssertZero(Expression { mul_terms: vec![], linear_combinations: vec![(fe_1, w_x), (fe_1, w_y), (-fe_1, w_z)], q_c: fe_0, }), // Opcode::Directive(Directive::Invert { x: w_z, result: w_z_inverse }), - Opcode::Arithmetic(Expression { + Opcode::AssertZero(Expression { mul_terms: vec![(fe_1, w_z, w_z_inverse)], linear_combinations: vec![], q_c: -fe_1, }), - Opcode::Arithmetic(Expression { + Opcode::AssertZero(Expression { mul_terms: vec![], linear_combinations: vec![(-fe_1, w_oracle), (fe_1, w_z_inverse)], q_c: fe_0, @@ -238,18 +238,18 @@ fn double_inversion_brillig_oracle() { let opcodes = vec![ Opcode::Brillig(brillig_data), - Opcode::Arithmetic(Expression { + Opcode::AssertZero(Expression { mul_terms: vec![], linear_combinations: vec![(fe_1, w_x), (fe_1, w_y), (-fe_1, w_z)], q_c: fe_0, }), // Opcode::Directive(Directive::Invert { x: w_z, result: w_z_inverse }), - Opcode::Arithmetic(Expression { + Opcode::AssertZero(Expression { mul_terms: vec![(fe_1, w_z, w_z_inverse)], linear_combinations: vec![], q_c: -fe_1, }), - Opcode::Arithmetic(Expression { + Opcode::AssertZero(Expression { mul_terms: vec![], linear_combinations: vec![(-fe_1, w_oracle), (fe_1, w_z_inverse)], q_c: fe_0, @@ -377,9 +377,9 @@ fn oracle_dependent_execution() { }; let opcodes = vec![ - Opcode::Arithmetic(equality_check), + Opcode::AssertZero(equality_check), Opcode::Brillig(brillig_data), - Opcode::Arithmetic(inverse_equality_check), + Opcode::AssertZero(inverse_equality_check), ]; let witness_assignments = @@ -516,7 +516,7 @@ fn unsatisfied_opcode_resolved() { values.insert(c, FieldElement::from(1_i128)); values.insert(d, FieldElement::from(2_i128)); - let opcodes = vec![Opcode::Arithmetic(opcode_a)]; + let opcodes = vec![Opcode::AssertZero(opcode_a)]; let mut acvm = ACVM::new(&StubbedBackend, &opcodes, values); let solver_status = acvm.solve(); assert_eq!( @@ -595,7 +595,7 @@ fn unsatisfied_opcode_resolved_brillig() { values.insert(w_y, FieldElement::from(1_i128)); values.insert(w_result, FieldElement::from(0_i128)); - let opcodes = vec![brillig_opcode, Opcode::Arithmetic(opcode_a)]; + let opcodes = vec![brillig_opcode, Opcode::AssertZero(opcode_a)]; let mut acvm = ACVM::new(&StubbedBackend, &opcodes, values); let solver_status = acvm.solve(); @@ -630,7 +630,7 @@ fn memory_operations() { predicate: None, }; - let expression = Opcode::Arithmetic(Expression { + let expression = Opcode::AssertZero(Expression { mul_terms: Vec::new(), linear_combinations: vec![ (FieldElement::one(), Witness(7)), diff --git a/noir/acvm-repo/acvm_js/Cargo.toml b/noir/acvm-repo/acvm_js/Cargo.toml index 22bd6e5aa7e..e8d46b9717e 100644 --- a/noir/acvm-repo/acvm_js/Cargo.toml +++ b/noir/acvm-repo/acvm_js/Cargo.toml @@ -19,17 +19,17 @@ crate-type = ["cdylib"] cfg-if = "1.0.0" [target.'cfg(target_arch = "wasm32")'.dependencies] -acvm = { path = "../acvm", default-features = false } -barretenberg_blackbox_solver = { path = "../barretenberg_blackbox_solver", default-features = false } +acvm.workspace = true +bn254_blackbox_solver = { workspace = true, optional = true } wasm-bindgen.workspace = true wasm-bindgen-futures.workspace = true console_error_panic_hook.workspace = true gloo-utils.workspace = true -js-sys.workspace = true -log.workspace = true +js-sys.workspace = true +tracing-subscriber.workspace = true +tracing-web.workspace = true serde = { version = "1.0.136", features = ["derive"] } -wasm-logger = "0.2.0" const-str = "0.5.5" [build-dependencies] @@ -41,5 +41,5 @@ wasm-bindgen-test.workspace = true [features] default = ["bn254"] -bn254 = ["acvm/bn254", "barretenberg_blackbox_solver/bn254"] -bls12_381 = ["acvm/bls12_381", "barretenberg_blackbox_solver/bls12_381"] +bn254 = ["acvm/bn254", "dep:bn254_blackbox_solver"] +bls12_381 = ["acvm/bls12_381"] diff --git a/noir/acvm-repo/acvm_js/package.json b/noir/acvm-repo/acvm_js/package.json index 9f265dd676a..2d878e961da 100644 --- a/noir/acvm-repo/acvm_js/package.json +++ b/noir/acvm-repo/acvm_js/package.json @@ -1,16 +1,21 @@ { "name": "@noir-lang/acvm_js", "version": "0.38.0", - "repository": { - "type": "git", - "url": "https://github.com/noir-lang/acvm.git" - }, "publishConfig": { "access": "public" }, - "collaborators": [ + "contributors": [ "The Noir Team " ], + "homepage": "https://noir-lang.org/", + "repository": { + "url": "https://github.com/noir-lang/noir.git", + "directory": "acvm_repo/acvm_js", + "type": "git" + }, + "bugs": { + "url": "https://github.com/noir-lang/noir/issues" + }, "license": "MIT", "main": "./nodejs/acvm_js.js", "types": "./web/acvm_js.d.ts", diff --git a/noir/acvm-repo/acvm_js/src/execute.rs b/noir/acvm-repo/acvm_js/src/execute.rs index 81e2a11ed5a..439a929cc98 100644 --- a/noir/acvm-repo/acvm_js/src/execute.rs +++ b/noir/acvm-repo/acvm_js/src/execute.rs @@ -2,8 +2,7 @@ use acvm::{ acir::circuit::Circuit, pwg::{ACVMStatus, ErrorLocation, OpcodeResolutionError, ACVM}, }; -#[allow(deprecated)] -use barretenberg_blackbox_solver::BarretenbergSolver; +use bn254_blackbox_solver::Bn254BlackBoxSolver; use js_sys::Error; use wasm_bindgen::prelude::wasm_bindgen; @@ -14,13 +13,11 @@ use crate::{ }; #[wasm_bindgen] -#[allow(deprecated)] -pub struct WasmBlackBoxFunctionSolver(BarretenbergSolver); +pub struct WasmBlackBoxFunctionSolver(Bn254BlackBoxSolver); impl WasmBlackBoxFunctionSolver { async fn initialize() -> WasmBlackBoxFunctionSolver { - #[allow(deprecated)] - WasmBlackBoxFunctionSolver(BarretenbergSolver::initialize().await) + WasmBlackBoxFunctionSolver(Bn254BlackBoxSolver::initialize().await) } } diff --git a/noir/acvm-repo/acvm_js/src/lib.rs b/noir/acvm-repo/acvm_js/src/lib.rs index ba2a37bf984..88afd1767c9 100644 --- a/noir/acvm-repo/acvm_js/src/lib.rs +++ b/noir/acvm-repo/acvm_js/src/lib.rs @@ -24,7 +24,7 @@ cfg_if::cfg_if! { pub use compression::{compress_witness, decompress_witness}; pub use execute::{execute_circuit, execute_circuit_with_black_box_solver, create_black_box_solver}; pub use js_witness_map::JsWitnessMap; - pub use logging::{init_log_level, LogLevel}; + pub use logging::init_log_level; pub use public_witness::{get_public_parameters_witness, get_public_witness, get_return_witness}; pub use js_execution_error::JsExecutionError; } diff --git a/noir/acvm-repo/acvm_js/src/logging.rs b/noir/acvm-repo/acvm_js/src/logging.rs index d939c5f8367..f5d71fae067 100644 --- a/noir/acvm-repo/acvm_js/src/logging.rs +++ b/noir/acvm-repo/acvm_js/src/logging.rs @@ -1,31 +1,26 @@ -use js_sys::JsString; -use log::Level; -use std::str::FromStr; +use tracing_subscriber::prelude::*; +use tracing_subscriber::EnvFilter; +use tracing_web::MakeWebConsoleWriter; use wasm_bindgen::prelude::*; -#[wasm_bindgen(typescript_custom_section)] -const LOG_LEVEL: &'static str = r#" -export type LogLevel = "OFF" | "ERROR" | "WARN" | "INFO" | "DEBUG" | "TRACE"; -"#; - -#[wasm_bindgen] -extern "C" { - #[wasm_bindgen(extends = JsString, typescript_type = "LogLevel")] - pub type LogLevel; -} - /// Sets the package's logging level. /// /// @param {LogLevel} level - The maximum level of logging to be emitted. #[wasm_bindgen(js_name = initLogLevel, skip_jsdoc)] -pub fn init_log_level(level: LogLevel) { +pub fn init_log_level(filter: String) { // Set the static variable from Rust use std::sync::Once; - let log_level = level.as_string().unwrap(); - let log_level = Level::from_str(&log_level).unwrap_or(Level::Error); + let filter: EnvFilter = + filter.parse().expect("Could not parse log filter while initializing logger"); + static SET_HOOK: Once = Once::new(); SET_HOOK.call_once(|| { - wasm_logger::init(wasm_logger::Config::new(log_level)); + let fmt_layer = tracing_subscriber::fmt::layer() + .with_ansi(false) + .without_time() + .with_writer(MakeWebConsoleWriter::new()); + + tracing_subscriber::registry().with(fmt_layer.with_filter(filter)).init(); }); } diff --git a/noir/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts b/noir/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts index 925c1a07eb8..259c51ed1c6 100644 --- a/noir/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts +++ b/noir/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts @@ -53,7 +53,7 @@ it('successfully processes simple brillig foreign call opcodes', async () => { expect(observedInputs).to.be.deep.eq(oracleCallInputs); // If incorrect value is written into circuit then execution should halt due to unsatisfied constraint in - // arithmetic opcode. Nevertheless, check that returned value was inserted correctly. + // assert-zero opcode. Nevertheless, check that returned value was inserted correctly. expect(solved_witness).to.be.deep.eq(expectedWitnessMap); }); @@ -79,7 +79,7 @@ it('successfully processes complex brillig foreign call opcodes', async () => { expect(observedInputs).to.be.deep.eq(oracleCallInputs); // If incorrect value is written into circuit then execution should halt due to unsatisfied constraint in - // arithmetic opcode. Nevertheless, check that returned value was inserted correctly. + // assert-zero opcode. Nevertheless, check that returned value was inserted correctly. expect(solved_witness).to.be.deep.eq(expectedWitnessMap); }); diff --git a/noir/acvm-repo/acvm_js/test/node/execute_circuit.test.ts b/noir/acvm-repo/acvm_js/test/node/execute_circuit.test.ts index b28b9e72591..adee3c15312 100644 --- a/noir/acvm-repo/acvm_js/test/node/execute_circuit.test.ts +++ b/noir/acvm-repo/acvm_js/test/node/execute_circuit.test.ts @@ -46,7 +46,7 @@ it('successfully processes simple brillig foreign call opcodes', async () => { expect(observedInputs).to.be.deep.eq(oracleCallInputs); // If incorrect value is written into circuit then execution should halt due to unsatisfied constraint in - // arithmetic opcode. Nevertheless, check that returned value was inserted correctly. + // assert-zero opcode. Nevertheless, check that returned value was inserted correctly. expect(solved_witness).to.be.deep.eq(expectedWitnessMap); }); @@ -72,7 +72,7 @@ it('successfully processes complex brillig foreign call opcodes', async () => { expect(observedInputs).to.be.deep.eq(oracleCallInputs); // If incorrect value is written into circuit then execution should halt due to unsatisfied constraint in - // arithmetic opcode. Nevertheless, check that returned value was inserted correctly. + // assert-zero opcode. Nevertheless, check that returned value was inserted correctly. expect(solved_witness).to.be.deep.eq(expectedWitnessMap); }); diff --git a/noir/acvm-repo/barretenberg_blackbox_solver/CHANGELOG.md b/noir/acvm-repo/bn254_blackbox_solver/CHANGELOG.md similarity index 100% rename from noir/acvm-repo/barretenberg_blackbox_solver/CHANGELOG.md rename to noir/acvm-repo/bn254_blackbox_solver/CHANGELOG.md diff --git a/noir/acvm-repo/barretenberg_blackbox_solver/Cargo.toml b/noir/acvm-repo/bn254_blackbox_solver/Cargo.toml similarity index 87% rename from noir/acvm-repo/barretenberg_blackbox_solver/Cargo.toml rename to noir/acvm-repo/bn254_blackbox_solver/Cargo.toml index 01f5e6ca950..b98bb370f74 100644 --- a/noir/acvm-repo/barretenberg_blackbox_solver/Cargo.toml +++ b/noir/acvm-repo/bn254_blackbox_solver/Cargo.toml @@ -1,6 +1,6 @@ [package] -name = "barretenberg_blackbox_solver" -description = "A wrapper around a barretenberg WASM binary to execute black box functions for which there is no rust implementation" +name = "bn254_blackbox_solver" +description = "Solvers for black box functions which are specific for the bn254 curve" # x-release-please-start-version version = "0.38.0" # x-release-please-end @@ -54,4 +54,3 @@ reqwest = { version = "0.11.20", default-features = false, features = [ [features] default = ["bn254"] bn254 = ["acir/bn254"] -bls12_381 = ["acir/bls12_381"] diff --git a/noir/acvm-repo/barretenberg_blackbox_solver/build.rs b/noir/acvm-repo/bn254_blackbox_solver/build.rs similarity index 100% rename from noir/acvm-repo/barretenberg_blackbox_solver/build.rs rename to noir/acvm-repo/bn254_blackbox_solver/build.rs diff --git a/noir/acvm-repo/barretenberg_blackbox_solver/src/acvm_backend.wasm b/noir/acvm-repo/bn254_blackbox_solver/src/acvm_backend.wasm similarity index 100% rename from noir/acvm-repo/barretenberg_blackbox_solver/src/acvm_backend.wasm rename to noir/acvm-repo/bn254_blackbox_solver/src/acvm_backend.wasm diff --git a/noir/acvm-repo/barretenberg_blackbox_solver/src/fixed_base_scalar_mul.rs b/noir/acvm-repo/bn254_blackbox_solver/src/fixed_base_scalar_mul.rs similarity index 100% rename from noir/acvm-repo/barretenberg_blackbox_solver/src/fixed_base_scalar_mul.rs rename to noir/acvm-repo/bn254_blackbox_solver/src/fixed_base_scalar_mul.rs diff --git a/noir/acvm-repo/barretenberg_blackbox_solver/src/lib.rs b/noir/acvm-repo/bn254_blackbox_solver/src/lib.rs similarity index 82% rename from noir/acvm-repo/barretenberg_blackbox_solver/src/lib.rs rename to noir/acvm-repo/bn254_blackbox_solver/src/lib.rs index 5d2ab834536..e315c4650be 100644 --- a/noir/acvm-repo/barretenberg_blackbox_solver/src/lib.rs +++ b/noir/acvm-repo/bn254_blackbox_solver/src/lib.rs @@ -13,36 +13,32 @@ use wasm::Barretenberg; use self::wasm::{Pedersen, SchnorrSig}; -#[deprecated = "The `BarretenbergSolver` is a temporary solution and will be removed in future."] -pub struct BarretenbergSolver { +pub struct Bn254BlackBoxSolver { blackbox_vendor: Barretenberg, } -#[allow(deprecated)] -impl BarretenbergSolver { +impl Bn254BlackBoxSolver { #[cfg(target_arch = "wasm32")] - pub async fn initialize() -> BarretenbergSolver { + pub async fn initialize() -> Bn254BlackBoxSolver { let blackbox_vendor = Barretenberg::initialize().await; - BarretenbergSolver { blackbox_vendor } + Bn254BlackBoxSolver { blackbox_vendor } } #[cfg(not(target_arch = "wasm32"))] - pub fn new() -> BarretenbergSolver { + pub fn new() -> Bn254BlackBoxSolver { let blackbox_vendor = Barretenberg::new(); - BarretenbergSolver { blackbox_vendor } + Bn254BlackBoxSolver { blackbox_vendor } } } #[cfg(not(target_arch = "wasm32"))] -#[allow(deprecated)] -impl Default for BarretenbergSolver { +impl Default for Bn254BlackBoxSolver { fn default() -> Self { Self::new() } } -#[allow(deprecated)] -impl BlackBoxFunctionSolver for BarretenbergSolver { +impl BlackBoxFunctionSolver for Bn254BlackBoxSolver { fn schnorr_verify( &self, public_key_x: &FieldElement, @@ -57,7 +53,6 @@ impl BlackBoxFunctionSolver for BarretenbergSolver { let sig_s: [u8; 32] = signature[0..32].try_into().unwrap(); let sig_e: [u8; 32] = signature[32..64].try_into().unwrap(); - #[allow(deprecated)] self.blackbox_vendor.verify_signature(pub_key, sig_s, sig_e, message).map_err(|err| { BlackBoxResolutionError::Failed(BlackBoxFunc::SchnorrVerify, err.to_string()) }) diff --git a/noir/acvm-repo/barretenberg_blackbox_solver/src/wasm/barretenberg_structures.rs b/noir/acvm-repo/bn254_blackbox_solver/src/wasm/barretenberg_structures.rs similarity index 100% rename from noir/acvm-repo/barretenberg_blackbox_solver/src/wasm/barretenberg_structures.rs rename to noir/acvm-repo/bn254_blackbox_solver/src/wasm/barretenberg_structures.rs diff --git a/noir/acvm-repo/barretenberg_blackbox_solver/src/wasm/mod.rs b/noir/acvm-repo/bn254_blackbox_solver/src/wasm/mod.rs similarity index 100% rename from noir/acvm-repo/barretenberg_blackbox_solver/src/wasm/mod.rs rename to noir/acvm-repo/bn254_blackbox_solver/src/wasm/mod.rs diff --git a/noir/acvm-repo/barretenberg_blackbox_solver/src/wasm/pedersen.rs b/noir/acvm-repo/bn254_blackbox_solver/src/wasm/pedersen.rs similarity index 100% rename from noir/acvm-repo/barretenberg_blackbox_solver/src/wasm/pedersen.rs rename to noir/acvm-repo/bn254_blackbox_solver/src/wasm/pedersen.rs diff --git a/noir/acvm-repo/barretenberg_blackbox_solver/src/wasm/schnorr.rs b/noir/acvm-repo/bn254_blackbox_solver/src/wasm/schnorr.rs similarity index 100% rename from noir/acvm-repo/barretenberg_blackbox_solver/src/wasm/schnorr.rs rename to noir/acvm-repo/bn254_blackbox_solver/src/wasm/schnorr.rs diff --git a/noir/aztec_macros/src/lib.rs b/noir/aztec_macros/src/lib.rs index 2ed2f66789e..3bba3c7adfc 100644 --- a/noir/aztec_macros/src/lib.rs +++ b/noir/aztec_macros/src/lib.rs @@ -500,7 +500,8 @@ fn generate_selector_impl(structure: &NoirStruct) -> TypeImpl { let struct_type = make_type(UnresolvedTypeData::Named(path(structure.name.clone()), vec![])); // TODO(https://github.com/AztecProtocol/aztec-packages/issues/3590): Make this point to aztec-nr once the issue is fixed. - let selector_path = chained_path!("protocol_types", "abis", "function_selector", "FunctionSelector"); + let selector_path = + chained_path!("protocol_types", "abis", "function_selector", "FunctionSelector"); let mut from_signature_path = selector_path.clone(); from_signature_path.segments.push(ident("from_signature")); @@ -510,7 +511,8 @@ fn generate_selector_impl(structure: &NoirStruct) -> TypeImpl { )))]); // Define `FunctionSelector` return type - let return_type = FunctionReturnType::Ty(make_type(UnresolvedTypeData::Named(selector_path, vec![]))); + let return_type = + FunctionReturnType::Ty(make_type(UnresolvedTypeData::Named(selector_path, vec![]))); let mut selector_fn_def = FunctionDefinition::normal( &ident("selector"), diff --git a/noir/compiler/fm/src/file_map.rs b/noir/compiler/fm/src/file_map.rs index 0cbdc535e40..c4d7002a082 100644 --- a/noir/compiler/fm/src/file_map.rs +++ b/noir/compiler/fm/src/file_map.rs @@ -30,7 +30,7 @@ impl From<&PathBuf> for PathString { PathString::from(pb.to_owned()) } } -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct FileMap { files: SimpleFiles, name_to_id: HashMap, diff --git a/noir/compiler/fm/src/lib.rs b/noir/compiler/fm/src/lib.rs index 4870a6c283b..55fb762479f 100644 --- a/noir/compiler/fm/src/lib.rs +++ b/noir/compiler/fm/src/lib.rs @@ -16,7 +16,7 @@ use std::{ }; pub const FILE_EXTENSION: &str = "nr"; - +#[derive(Clone)] pub struct FileManager { root: PathBuf, file_map: FileMap, diff --git a/noir/compiler/noirc_driver/Cargo.toml b/noir/compiler/noirc_driver/Cargo.toml index 7f431db4398..32c418b0adb 100644 --- a/noir/compiler/noirc_driver/Cargo.toml +++ b/noir/compiler/noirc_driver/Cargo.toml @@ -22,6 +22,6 @@ fm.workspace = true serde.workspace = true fxhash.workspace = true rust-embed = "6.6.0" -log.workspace = true +tracing.workspace = true aztec_macros = { path = "../../aztec_macros" } diff --git a/noir/compiler/noirc_driver/src/lib.rs b/noir/compiler/noirc_driver/src/lib.rs index 24b159568f2..7136580b770 100644 --- a/noir/compiler/noirc_driver/src/lib.rs +++ b/noir/compiler/noirc_driver/src/lib.rs @@ -4,7 +4,7 @@ #![warn(clippy::semicolon_if_nothing_returned)] use clap::Args; -use fm::FileId; +use fm::{FileId, FileManager}; use iter_extended::vecmap; use noirc_abi::{AbiParameter, AbiType, ContractEvent}; use noirc_errors::{CustomDiagnostic, FileDiagnostic}; @@ -18,6 +18,7 @@ use noirc_frontend::monomorphization::monomorphize; use noirc_frontend::node_interner::FuncId; use serde::{Deserialize, Serialize}; use std::path::Path; +use tracing::info; mod abi_gen; mod contract; @@ -81,17 +82,36 @@ pub type ErrorsAndWarnings = Vec; /// Helper type for connecting a compilation artifact to the errors or warnings which were produced during compilation. pub type CompilationResult = Result<(T, Warnings), ErrorsAndWarnings>; -/// Adds the file from the file system at `Path` to the crate graph as a root file -pub fn prepare_crate(context: &mut Context, file_name: &Path) -> CrateId { +/// Helper method to return a file manager instance with the stdlib already added +/// +/// TODO: This should become the canonical way to create a file manager and +/// TODO if we use a File manager trait, we can move file manager into this crate +/// TODO as a module +pub fn file_manager_with_stdlib(root: &Path) -> FileManager { + let mut file_manager = FileManager::new(root); + + add_stdlib_source_to_file_manager(&mut file_manager); + + file_manager +} + +/// Adds the source code for the stdlib into the file manager +fn add_stdlib_source_to_file_manager(file_manager: &mut FileManager) { // Add the stdlib contents to the file manager, since every package automatically has a dependency // on the stdlib. For other dependencies, we read the package.Dependencies file to add their file // contents to the file manager. However since the dependency on the stdlib is implicit, we need // to manually add it here. let stdlib_paths_with_source = stdlib::stdlib_paths_with_source(); for (path, source) in stdlib_paths_with_source { - context.file_manager.add_file_with_source_canonical_path(Path::new(&path), source); + file_manager.add_file_with_source_canonical_path(Path::new(&path), source); } +} +/// Adds the file from the file system at `Path` to the crate graph as a root file +/// +/// Note: This methods adds the stdlib as a dependency to the crate. +/// This assumes that the stdlib has already been added to the file manager. +pub fn prepare_crate(context: &mut Context, file_name: &Path) -> CrateId { let path_to_std_lib_file = Path::new(STD_CRATE_NAME).join("lib.nr"); let std_file_id = context .file_manager @@ -141,14 +161,13 @@ pub fn add_dep( /// /// This returns a (possibly empty) vector of any warnings found on success. /// On error, this returns a non-empty vector of warnings and error messages, with at least one error. +#[tracing::instrument(level = "trace", skip(context))] pub fn check_crate( context: &mut Context, crate_id: CrateId, deny_warnings: bool, disable_macros: bool, ) -> CompilationResult<()> { - log::trace!("Start checking crate"); - let macros: Vec<&dyn MacroProcessor> = if disable_macros { vec![] } else { @@ -162,8 +181,6 @@ pub fn check_crate( diagnostic.in_file(file_id) })); - log::trace!("Finish checking crate"); - if has_errors(&errors, deny_warnings) { Err(errors) } else { @@ -358,6 +375,7 @@ fn compile_contract_inner( /// Compile the current crate using `main_function` as the entrypoint. /// /// This function assumes [`check_crate`] is called beforehand. +#[tracing::instrument(level = "trace", skip_all, fields(function_name = context.function_name(&main_function)))] pub fn compile_no_check( context: &Context, options: &CompileOptions, @@ -376,7 +394,7 @@ pub fn compile_no_check( force_compile || options.print_acir || options.show_brillig || options.show_ssa; if !force_compile && hashes_match { - log::info!("Program matches existing artifact, returning early"); + info!("Program matches existing artifact, returning early"); return Ok(cached_program.expect("cache must exist for hashes to match")); } let visibility = program.return_visibility; diff --git a/noir/compiler/noirc_errors/Cargo.toml b/noir/compiler/noirc_errors/Cargo.toml index 812a507550c..02e97b2c670 100644 --- a/noir/compiler/noirc_errors/Cargo.toml +++ b/noir/compiler/noirc_errors/Cargo.toml @@ -15,4 +15,4 @@ fm.workspace = true chumsky.workspace = true serde.workspace = true serde_with = "3.2.0" -log.workspace = true +tracing.workspace = true \ No newline at end of file diff --git a/noir/compiler/noirc_errors/src/debug_info.rs b/noir/compiler/noirc_errors/src/debug_info.rs index 3ae5c193e39..ee40ced19bf 100644 --- a/noir/compiler/noirc_errors/src/debug_info.rs +++ b/noir/compiler/noirc_errors/src/debug_info.rs @@ -38,8 +38,8 @@ impl DebugInfo { /// The [`OpcodeLocation`]s are generated with the ACIR, but passing the ACIR through a transformation step /// renders the old `OpcodeLocation`s invalid. The AcirTransformationMap is able to map the old `OpcodeLocation` to the new ones. /// Note: One old `OpcodeLocation` might have transformed into more than one new `OpcodeLocation`. + #[tracing::instrument(level = "trace", skip(self, update_map))] pub fn update_acir(&mut self, update_map: AcirTransformationMap) { - log::trace!("Start debug info update"); let old_locations = mem::take(&mut self.locations); for (old_opcode_location, source_locations) in old_locations { @@ -47,7 +47,6 @@ impl DebugInfo { self.locations.insert(new_opcode_location, source_locations.clone()); }); } - log::trace!("Finish debug info update"); } pub fn opcode_location(&self, loc: &OpcodeLocation) -> Option> { diff --git a/noir/compiler/noirc_evaluator/Cargo.toml b/noir/compiler/noirc_evaluator/Cargo.toml index 6e7152c6d71..a8f0e8d83a9 100644 --- a/noir/compiler/noirc_evaluator/Cargo.toml +++ b/noir/compiler/noirc_evaluator/Cargo.toml @@ -17,4 +17,4 @@ thiserror.workspace = true num-bigint = "0.4" im = { version = "15.1", features = ["serde"] } serde.workspace = true -log.workspace = true +tracing.workspace = true \ No newline at end of file diff --git a/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs b/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs index d7f83c32cdf..a6d3220fa85 100644 --- a/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs +++ b/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs @@ -79,6 +79,28 @@ pub(crate) fn convert_black_box_call( ) } } + BlackBoxFunc::EcdsaSecp256r1 => { + if let ( + [BrilligVariable::BrilligArray(public_key_x), BrilligVariable::BrilligArray(public_key_y), BrilligVariable::BrilligArray(signature), message], + [BrilligVariable::Simple(result_register)], + ) = (function_arguments, function_results) + { + let message_hash_vector = + convert_array_or_vector(brillig_context, message, bb_func); + brillig_context.black_box_op_instruction(BlackBoxOp::EcdsaSecp256r1 { + hashed_msg: message_hash_vector.to_heap_vector(), + public_key_x: public_key_x.to_heap_array(), + public_key_y: public_key_y.to_heap_array(), + signature: signature.to_heap_array(), + result: *result_register, + }); + } else { + unreachable!( + "ICE: EcdsaSecp256r1 expects four array arguments and one register result" + ) + } + } + BlackBoxFunc::PedersenCommitment => { if let ( [message, BrilligVariable::Simple(domain_separator)], @@ -147,7 +169,24 @@ pub(crate) fn convert_black_box_call( ) } } - _ => unimplemented!("ICE: Black box function {:?} is not implemented", bb_func), + BlackBoxFunc::AND => { + unreachable!("ICE: `BlackBoxFunc::AND` calls should be transformed into a `BinaryOp`") + } + BlackBoxFunc::XOR => { + unreachable!("ICE: `BlackBoxFunc::XOR` calls should be transformed into a `BinaryOp`") + } + BlackBoxFunc::RANGE => unreachable!( + "ICE: `BlackBoxFunc::RANGE` calls should be transformed into a `Instruction::Cast`" + ), + BlackBoxFunc::RecursiveAggregation => unimplemented!( + "ICE: `BlackBoxFunc::RecursiveAggregation` is not implemented by the Brillig VM" + ), + BlackBoxFunc::Blake3 => { + unimplemented!("ICE: `BlackBoxFunc::Blake3` is not implemented by the Brillig VM") + } + BlackBoxFunc::Keccakf1600 => { + unimplemented!("ICE: `BlackBoxFunc::Keccakf1600` is not implemented by the Brillig VM") + } } } diff --git a/noir/compiler/noirc_evaluator/src/errors.rs b/noir/compiler/noirc_evaluator/src/errors.rs index 42818e8b19d..33ecc794f76 100644 --- a/noir/compiler/noirc_evaluator/src/errors.rs +++ b/noir/compiler/noirc_evaluator/src/errors.rs @@ -106,7 +106,7 @@ pub enum InternalError { #[error("ICE: Undeclared AcirVar")] UndeclaredAcirVar { call_stack: CallStack }, #[error("ICE: Expected {expected:?}, found {found:?}")] - UnExpected { expected: String, found: String, call_stack: CallStack }, + Unexpected { expected: String, found: String, call_stack: CallStack }, } impl RuntimeError { @@ -119,7 +119,7 @@ impl RuntimeError { | InternalError::MissingArg { call_stack, .. } | InternalError::NotAConstant { call_stack, .. } | InternalError::UndeclaredAcirVar { call_stack } - | InternalError::UnExpected { call_stack, .. }, + | InternalError::Unexpected { call_stack, .. }, ) | RuntimeError::FailedConstraint { call_stack, .. } | RuntimeError::IndexOutOfBounds { call_stack, .. } diff --git a/noir/compiler/noirc_evaluator/src/ssa.rs b/noir/compiler/noirc_evaluator/src/ssa.rs index 6a02a5f6edc..deffe84baea 100644 --- a/noir/compiler/noirc_evaluator/src/ssa.rs +++ b/noir/compiler/noirc_evaluator/src/ssa.rs @@ -23,6 +23,7 @@ use noirc_errors::debug_info::DebugInfo; use noirc_frontend::{ hir_def::function::FunctionSignature, monomorphization::ast::Program, Visibility, }; +use tracing::{span, Level}; use self::{acir_gen::GeneratedAcir, ssa_gen::Ssa}; @@ -42,7 +43,8 @@ pub(crate) fn optimize_into_acir( ) -> Result { let abi_distinctness = program.return_distinctness; - log::trace!("Start SSA generation"); + let ssa_gen_span = span!(Level::TRACE, "ssa_generation"); + let ssa_gen_span_guard = ssa_gen_span.enter(); let ssa_builder = SsaBuilder::new(program, print_ssa_passes)? .run_pass(Ssa::defunctionalize, "After Defunctionalization:") .run_pass(Ssa::inline_functions, "After Inlining:") @@ -70,28 +72,23 @@ pub(crate) fn optimize_into_acir( let ssa = ssa_builder .run_pass(Ssa::fill_internal_slices, "After Fill Internal Slice Dummy Data:") .finish(); - log::trace!("Finish SSA generation"); + drop(ssa_gen_span_guard); let last_array_uses = ssa.find_last_array_uses(); - log::trace!("Start ACIR generation"); - let acir = ssa.into_acir(brillig, abi_distinctness, &last_array_uses); - log::trace!("Finish ACIR generation"); - - acir + ssa.into_acir(brillig, abi_distinctness, &last_array_uses) } /// Compiles the [`Program`] into [`ACIR`][acvm::acir::circuit::Circuit]. /// /// The output ACIR is is backend-agnostic and so must go through a transformation pass before usage in proof generation. #[allow(clippy::type_complexity)] +#[tracing::instrument(level = "trace", skip_all)] pub fn create_circuit( program: Program, enable_ssa_logging: bool, enable_brillig_logging: bool, ) -> Result<(Circuit, DebugInfo, Vec, Vec, Vec), RuntimeError> { - log::trace!("Start circuit generation"); - let func_sig = program.main_function_signature.clone(); let mut generated_acir = optimize_into_acir(program, enable_ssa_logging, enable_brillig_logging)?; @@ -133,8 +130,6 @@ pub fn create_circuit( let (optimized_circuit, transformation_map) = acvm::compiler::optimize(circuit); debug_info.update_acir(transformation_map); - log::trace!("Finish circuit generation"); - Ok((optimized_circuit, debug_info, input_witnesses, return_witnesses, warnings)) } diff --git a/noir/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs b/noir/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs index e039a7793c0..ddafc0bb570 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs @@ -307,6 +307,7 @@ impl AcirContext { inverse_code, vec![AcirValue::Var(var, AcirType::field())], vec![AcirType::field()], + true, )?; let inverted_var = Self::expect_one_var(results); @@ -368,9 +369,34 @@ impl AcirContext { rhs: AcirVar, typ: AcirType, ) -> Result { - let inputs = vec![AcirValue::Var(lhs, typ.clone()), AcirValue::Var(rhs, typ)]; - let outputs = self.black_box_function(BlackBoxFunc::XOR, inputs, 1)?; - Ok(outputs[0]) + let lhs_expr = self.var_to_expression(lhs)?; + let rhs_expr = self.var_to_expression(rhs)?; + + if lhs_expr == rhs_expr { + // x ^ x == 0 + let zero = self.add_constant(FieldElement::zero()); + return Ok(zero); + } else if lhs_expr.is_zero() { + // 0 ^ x == x + return Ok(rhs); + } else if rhs_expr.is_zero() { + // x ^ 0 == x + return Ok(lhs); + } + + let bit_size = typ.bit_size(); + if bit_size == 1 { + // Operands are booleans. + // + // a ^ b == a + b - 2*a*b + let sum = self.add_var(lhs, rhs)?; + let prod = self.mul_var(lhs, rhs)?; + self.add_mul_var(sum, -FieldElement::from(2_i128), prod) + } else { + let inputs = vec![AcirValue::Var(lhs, typ.clone()), AcirValue::Var(rhs, typ)]; + let outputs = self.black_box_function(BlackBoxFunc::XOR, inputs, 1)?; + Ok(outputs[0]) + } } /// Returns an `AcirVar` that is the AND result of `lhs` & `rhs`. @@ -380,6 +406,18 @@ impl AcirContext { rhs: AcirVar, typ: AcirType, ) -> Result { + let lhs_expr = self.var_to_expression(lhs)?; + let rhs_expr = self.var_to_expression(rhs)?; + + if lhs_expr == rhs_expr { + // x & x == x + return Ok(lhs); + } else if lhs_expr.is_zero() || rhs_expr.is_zero() { + // x & 0 == 0 and 0 & x == 0 + let zero = self.add_constant(FieldElement::zero()); + return Ok(zero); + } + let bit_size = typ.bit_size(); if bit_size == 1 { // Operands are booleans. @@ -398,6 +436,16 @@ impl AcirContext { rhs: AcirVar, typ: AcirType, ) -> Result { + let lhs_expr = self.var_to_expression(lhs)?; + let rhs_expr = self.var_to_expression(rhs)?; + if lhs_expr.is_zero() { + // 0 | x == x + return Ok(rhs); + } else if rhs_expr.is_zero() { + // x | 0 == x + return Ok(lhs); + } + let bit_size = typ.bit_size(); if bit_size == 1 { // Operands are booleans @@ -407,15 +455,11 @@ impl AcirContext { self.sub_var(sum, mul) } else { // Implement OR in terms of AND - // max - ((max - a) AND (max -b)) - // Subtracting from max flips the bits, so this is effectively: - // (NOT a) NAND (NOT b) - let max = self.add_constant((1_u128 << bit_size) - 1); - let a = self.sub_var(max, lhs)?; - let b = self.sub_var(max, rhs)?; - let inputs = vec![AcirValue::Var(a, typ.clone()), AcirValue::Var(b, typ)]; - let outputs = self.black_box_function(BlackBoxFunc::AND, inputs, 1)?; - self.sub_var(max, outputs[0]) + // (NOT a) NAND (NOT b) => a OR b + let a = self.not_var(lhs, typ.clone())?; + let b = self.not_var(rhs, typ.clone())?; + let a_and_b = self.and_var(a, b, typ.clone())?; + self.not_var(a_and_b, typ) } } @@ -487,8 +531,19 @@ impl AcirContext { let lhs_data = self.vars[&lhs].clone(); let rhs_data = self.vars[&rhs].clone(); let result = match (lhs_data, rhs_data) { + // (x * 1) == (1 * x) == x + (AcirVarData::Const(constant), _) if constant.is_one() => rhs, + (_, AcirVarData::Const(constant)) if constant.is_one() => lhs, + + // (x * 0) == (0 * x) == 0 + (AcirVarData::Const(constant), _) | (_, AcirVarData::Const(constant)) + if constant.is_zero() => + { + self.add_constant(FieldElement::zero()) + } + (AcirVarData::Const(lhs_constant), AcirVarData::Const(rhs_constant)) => { - self.add_data(AcirVarData::Const(lhs_constant * rhs_constant)) + self.add_constant(lhs_constant * rhs_constant) } (AcirVarData::Witness(witness), AcirVarData::Const(constant)) | (AcirVarData::Const(constant), AcirVarData::Witness(witness)) => { @@ -654,6 +709,7 @@ impl AcirContext { AcirValue::Var(rhs, AcirType::unsigned(bit_size)), ], vec![AcirType::unsigned(max_q_bits), AcirType::unsigned(max_rhs_bits)], + true, )? .try_into() .expect("quotient only returns two values"); @@ -852,9 +908,7 @@ impl AcirContext { // Unsigned to signed: derive q and r from q1,r1 and the signs of lhs and rhs // Quotient sign is lhs sign * rhs sign, whose resulting sign bit is the XOR of the sign bits - let sign_sum = self.add_var(lhs_leading, rhs_leading)?; - let sign_prod = self.mul_var(lhs_leading, rhs_leading)?; - let q_sign = self.add_mul_var(sign_sum, -FieldElement::from(2_i128), sign_prod)?; + let q_sign = self.xor_var(lhs_leading, rhs_leading, AcirType::unsigned(1))?; let quotient = self.two_complement(q1, q_sign, bit_size)?; let remainder = self.two_complement(r1, lhs_leading, bit_size)?; @@ -936,6 +990,58 @@ impl AcirContext { Ok(remainder) } + /// Returns an 'AcirVar' containing the boolean value lhs diff<2^n, because the 2-complement representation keeps the ordering (e.g in 8 bits -1 is 255 > -2 = 254) + /// If not, lhs positive => diff > 2^n + /// and lhs negative => diff <= 2^n => diff < 2^n (because signs are not the same, so lhs != rhs and so diff != 2^n) + pub(crate) fn less_than_signed( + &mut self, + lhs: AcirVar, + rhs: AcirVar, + bit_count: u32, + ) -> Result { + let pow_last = self.add_constant(FieldElement::from(1_u128 << (bit_count - 1))); + let pow = self.add_constant(FieldElement::from(1_u128 << (bit_count))); + + // We check whether the inputs have same sign or not by computing the XOR of their bit sign + + // Predicate is always active as `pow_last` is known to be non-zero. + let one = self.add_constant(1_u128); + let lhs_sign = self.div_var( + lhs, + pow_last, + AcirType::NumericType(NumericType::Unsigned { bit_size: bit_count }), + one, + )?; + let rhs_sign = self.div_var( + rhs, + pow_last, + AcirType::NumericType(NumericType::Unsigned { bit_size: bit_count }), + one, + )?; + let same_sign = self.xor_var( + lhs_sign, + rhs_sign, + AcirType::NumericType(NumericType::Signed { bit_size: 1 }), + )?; + + // We compute the input difference + let no_underflow = self.add_var(lhs, pow)?; + let diff = self.sub_var(no_underflow, rhs)?; + + // We check the 'bit sign' of the difference + let diff_sign = self.less_than_var(diff, pow, bit_count + 1)?; + + // Then the result is simply diff_sign XOR same_sign (can be checked with a truth table) + self.xor_var( + diff_sign, + same_sign, + AcirType::NumericType(NumericType::Signed { bit_size: 1 }), + ) + } + /// Returns an `AcirVar` which will be `1` if lhs >= rhs /// and `0` otherwise. pub(crate) fn more_than_eq_var( @@ -943,7 +1049,6 @@ impl AcirContext { lhs: AcirVar, rhs: AcirVar, max_bits: u32, - predicate: AcirVar, ) -> Result { // Returns a `Witness` that is constrained to be: // - `1` if lhs >= rhs @@ -968,6 +1073,7 @@ impl AcirContext { // // TODO: perhaps this should be a user error, instead of an assert assert!(max_bits + 1 < FieldElement::max_num_bits()); + let two_max_bits = self .add_constant(FieldElement::from(2_i128).pow(&FieldElement::from(max_bits as i128))); let diff = self.sub_var(lhs, rhs)?; @@ -997,13 +1103,11 @@ impl AcirContext { // let k = b - a // - 2^{max_bits} - k == q * 2^{max_bits} + r // - This is only the case when q == 0 and r == 2^{max_bits} - k - // - let (q, _) = self.euclidean_division_var( - comparison_evaluation, - two_max_bits, - max_bits + 1, - predicate, - )?; + + // Predicate is always active as we know `two_max_bits` is always non-zero. + let one = self.add_constant(1_u128); + let (q, _) = + self.euclidean_division_var(comparison_evaluation, two_max_bits, max_bits + 1, one)?; Ok(q) } @@ -1014,11 +1118,10 @@ impl AcirContext { lhs: AcirVar, rhs: AcirVar, bit_size: u32, - predicate: AcirVar, ) -> Result { // Flip the result of calling more than equal method to // compute less than. - let comparison = self.more_than_eq_var(lhs, rhs, bit_size, predicate)?; + let comparison = self.more_than_eq_var(lhs, rhs, bit_size)?; let one = self.add_constant(FieldElement::one()); self.sub_var(one, comparison) // comparison_negated @@ -1209,6 +1312,7 @@ impl AcirContext { generated_brillig: GeneratedBrillig, inputs: Vec, outputs: Vec, + attempt_execution: bool, ) -> Result, InternalError> { let b_inputs = try_vecmap(inputs, |i| match i { AcirValue::Var(var, _) => Ok(BrilligInputs::Single(self.var_to_expression(var)?)), @@ -1228,10 +1332,15 @@ impl AcirContext { // Optimistically try executing the brillig now, if we can complete execution they just return the results. // This is a temporary measure pending SSA optimizations being applied to Brillig which would remove constant-input opcodes (See #2066) - if let Some(brillig_outputs) = - self.execute_brillig(&generated_brillig.byte_code, &b_inputs, &outputs) - { - return Ok(brillig_outputs); + // + // We do _not_ want to do this in the situation where the `main` function is unconstrained, as if execution succeeds + // the entire program will be replaced with witness constraints to its outputs. + if attempt_execution { + if let Some(brillig_outputs) = + self.execute_brillig(&generated_brillig.byte_code, &b_inputs, &outputs) + { + return Ok(brillig_outputs); + } } // Otherwise we must generate ACIR for it and execute at runtime. @@ -1414,7 +1523,7 @@ impl AcirContext { bit_size: u32, predicate: AcirVar, ) -> Result<(), RuntimeError> { - let lhs_less_than_rhs = self.more_than_eq_var(rhs, lhs, bit_size, predicate)?; + let lhs_less_than_rhs = self.more_than_eq_var(rhs, lhs, bit_size)?; self.maybe_eq_predicate(lhs_less_than_rhs, predicate) } diff --git a/noir/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs b/noir/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs index c9f7ee51e97..bea0a5e3158 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs @@ -372,7 +372,7 @@ impl GeneratedAcir { /// If `expr` is not zero, then the constraint system will /// fail upon verification. pub(crate) fn assert_is_zero(&mut self, expr: Expression) { - self.push_opcode(AcirOpcode::Arithmetic(expr)); + self.push_opcode(AcirOpcode::AssertZero(expr)); } /// Returns a `Witness` that is constrained to be: @@ -552,7 +552,7 @@ impl GeneratedAcir { // Constrain the network output to out_expr for (b, o) in b.iter().zip(out_expr) { - self.push_opcode(AcirOpcode::Arithmetic(b - o)); + self.push_opcode(AcirOpcode::AssertZero(b - o)); } Ok(()) } diff --git a/noir/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/noir/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index d73bb514e02..edf0461430f 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -156,6 +156,7 @@ impl AcirValue { } impl Ssa { + #[tracing::instrument(level = "trace", skip_all)] pub(crate) fn into_acir( self, brillig: Brillig, @@ -265,11 +266,14 @@ impl Context { let code = self.gen_brillig_for(main_func, &brillig)?; + // We specifically do not attempt execution of the brillig code being generated as this can result in it being + // replaced with constraints on witnesses to the program outputs. let output_values = self.acir_context.brillig( self.current_side_effects_enabled_var, code, inputs, outputs, + false, )?; let output_vars: Vec<_> = output_values .iter() @@ -301,7 +305,7 @@ impl Context { let len = if matches!(typ, Type::Array(_, _)) { typ.flattened_size() } else { - return Err(InternalError::UnExpected { + return Err(InternalError::Unexpected { expected: "Block params should be an array".to_owned(), found: format!("Instead got {:?}", typ), call_stack: self.acir_context.get_call_stack(), @@ -488,7 +492,7 @@ impl Context { let outputs: Vec = vecmap(result_ids, |result_id| dfg.type_of_value(*result_id).into()); - let output_values = self.acir_context.brillig(self.current_side_effects_enabled_var, code, inputs, outputs)?; + let output_values = self.acir_context.brillig(self.current_side_effects_enabled_var, code, inputs, outputs, true)?; // Compiler sanity check assert_eq!(result_ids.len(), output_values.len(), "ICE: The number of Brillig output values should match the result ids in SSA"); @@ -639,7 +643,7 @@ impl Context { Instruction::ArrayGet { array, index } => (array, index, None), Instruction::ArraySet { array, index, value, .. } => (array, index, Some(value)), _ => { - return Err(InternalError::UnExpected { + return Err(InternalError::Unexpected { expected: "Instruction should be an ArrayGet or ArraySet".to_owned(), found: format!("Instead got {:?}", dfg[instruction]), call_stack: self.acir_context.get_call_stack(), @@ -696,7 +700,7 @@ impl Context { match self.convert_value(array, dfg) { AcirValue::Var(acir_var, _) => { - return Err(RuntimeError::InternalError(InternalError::UnExpected { + return Err(RuntimeError::InternalError(InternalError::Unexpected { expected: "an array value".to_string(), found: format!("{acir_var:?}"), call_stack: self.acir_context.get_call_stack(), @@ -787,7 +791,7 @@ impl Context { let slice_sizes = if store_type.contains_slice_element() { self.compute_slice_sizes(store, None, dfg); self.slice_sizes.get(&store).cloned().ok_or_else(|| { - InternalError::UnExpected { + InternalError::Unexpected { expected: "Store value should have slice sizes computed".to_owned(), found: "Missing key in slice sizes map".to_owned(), call_stack: self.acir_context.get_call_stack(), @@ -1012,7 +1016,7 @@ impl Context { let array = match dfg[instruction] { Instruction::ArraySet { array, .. } => array, _ => { - return Err(InternalError::UnExpected { + return Err(InternalError::Unexpected { expected: "Instruction should be an ArraySet".to_owned(), found: format!("Instead got {:?}", dfg[instruction]), call_stack: self.acir_context.get_call_stack(), @@ -1234,7 +1238,7 @@ impl Context { } } _ => { - return Err(InternalError::UnExpected { + return Err(InternalError::Unexpected { expected: "AcirValue::DynamicArray or AcirValue::Array" .to_owned(), found: format!("{:?}", array_acir_value), @@ -1245,7 +1249,7 @@ impl Context { } } _ => { - return Err(InternalError::UnExpected { + return Err(InternalError::Unexpected { expected: "array or instruction".to_owned(), found: format!("{:?}", &dfg[array_id]), call_stack: self.acir_context.get_call_stack(), @@ -1255,7 +1259,7 @@ impl Context { }; } _ => { - return Err(InternalError::UnExpected { + return Err(InternalError::Unexpected { expected: "array or slice".to_owned(), found: array_typ.to_string(), call_stack: self.acir_context.get_call_stack(), @@ -1512,12 +1516,12 @@ impl Context { ) -> Result { match self.convert_value(value_id, dfg) { AcirValue::Var(acir_var, _) => Ok(acir_var), - AcirValue::Array(array) => Err(InternalError::UnExpected { + AcirValue::Array(array) => Err(InternalError::Unexpected { expected: "a numeric value".to_string(), found: format!("{array:?}"), call_stack: self.acir_context.get_call_stack(), }), - AcirValue::DynamicArray(_) => Err(InternalError::UnExpected { + AcirValue::DynamicArray(_) => Err(InternalError::Unexpected { expected: "a numeric value".to_string(), found: "an array".to_string(), call_stack: self.acir_context.get_call_stack(), @@ -1569,12 +1573,12 @@ impl Context { // Note: that this produces unnecessary constraints when // this Eq instruction is being used for a constrain statement BinaryOp::Eq => self.acir_context.eq_var(lhs, rhs), - BinaryOp::Lt => self.acir_context.less_than_var( - lhs, - rhs, - bit_count, - self.current_side_effects_enabled_var, - ), + BinaryOp::Lt => match binary_type { + AcirType::NumericType(NumericType::Signed { .. }) => { + self.acir_context.less_than_signed(lhs, rhs, bit_count) + } + _ => self.acir_context.less_than_var(lhs, rhs, bit_count), + }, BinaryOp::Xor => self.acir_context.xor_var(lhs, rhs, binary_type), BinaryOp::And => self.acir_context.and_var(lhs, rhs, binary_type), BinaryOp::Or => self.acir_context.or_var(lhs, rhs, binary_type), @@ -2135,19 +2139,11 @@ impl Context { let current_index = self.acir_context.add_constant(i); // Check that we are above the lower bound of the insertion index - let greater_eq_than_idx = self.acir_context.more_than_eq_var( - current_index, - flat_user_index, - 64, - self.current_side_effects_enabled_var, - )?; + let greater_eq_than_idx = + self.acir_context.more_than_eq_var(current_index, flat_user_index, 64)?; // Check that we are below the upper bound of the insertion index - let less_than_idx = self.acir_context.less_than_var( - current_index, - max_flat_user_index, - 64, - self.current_side_effects_enabled_var, - )?; + let less_than_idx = + self.acir_context.less_than_var(current_index, max_flat_user_index, 64)?; // Read from the original slice the value we want to insert into our new slice. // We need to make sure that we read the previous element when our current index is greater than insertion index. @@ -2322,7 +2318,6 @@ impl Context { current_index, flat_user_index, 64, - self.current_side_effects_enabled_var, )?; let shifted_value_pred = diff --git a/noir/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs b/noir/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs index 56a22fd4107..b972afa2990 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs @@ -266,15 +266,6 @@ impl FunctionBuilder { self.insert_instruction(Instruction::Call { func, arguments }, Some(result_types)).results() } - /// Insert ssa instructions which computes lhs << rhs by doing lhs*2^rhs - pub(crate) fn insert_shift_left(&mut self, lhs: ValueId, rhs: ValueId) -> ValueId { - let base = self.field_constant(FieldElement::from(2_u128)); - let pow = self.pow(base, rhs); - let typ = self.current_function.dfg.type_of_value(lhs); - let pow = self.insert_cast(pow, typ); - self.insert_binary(lhs, BinaryOp::Mul, pow) - } - /// Insert ssa instructions which computes lhs << rhs by doing lhs*2^rhs /// and truncate the result to bit_size pub(crate) fn insert_wrapping_shift_left( @@ -308,8 +299,9 @@ impl FunctionBuilder { let one = self.numeric_constant(FieldElement::one(), Type::unsigned(1)); let predicate = self.insert_binary(overflow, BinaryOp::Eq, one); let predicate = self.insert_cast(predicate, typ.clone()); - - let pow = self.pow(base, rhs); + // we can safely cast to unsigned because overflow_checks prevent bit-shift with a negative value + let rhs_unsigned = self.insert_cast(rhs, Type::unsigned(bit_size)); + let pow = self.pow(base, rhs_unsigned); let pow = self.insert_cast(pow, typ); (FieldElement::max_num_bits(), self.insert_binary(predicate, BinaryOp::Mul, pow)) }; @@ -323,9 +315,16 @@ impl FunctionBuilder { } /// Insert ssa instructions which computes lhs >> rhs by doing lhs/2^rhs - pub(crate) fn insert_shift_right(&mut self, lhs: ValueId, rhs: ValueId) -> ValueId { + pub(crate) fn insert_shift_right( + &mut self, + lhs: ValueId, + rhs: ValueId, + bit_size: u32, + ) -> ValueId { let base = self.field_constant(FieldElement::from(2_u128)); - let pow = self.pow(base, rhs); + // we can safely cast to unsigned because overflow_checks prevent bit-shift with a negative value + let rhs_unsigned = self.insert_cast(rhs, Type::unsigned(bit_size)); + let pow = self.pow(base, rhs_unsigned); self.insert_binary(lhs, BinaryOp::Div, pow) } diff --git a/noir/compiler/noirc_evaluator/src/ssa/opt/array_use.rs b/noir/compiler/noirc_evaluator/src/ssa/opt/array_use.rs index cfa97cee551..0bb8b0112b6 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/opt/array_use.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/opt/array_use.rs @@ -14,6 +14,7 @@ impl Ssa { /// Map arrays with the last instruction that uses it /// For this we simply process all the instructions in execution order /// and update the map whenever there is a match + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn find_last_array_uses(&self) -> HashMap { let mut array_use = HashMap::default(); for func in self.functions.values() { diff --git a/noir/compiler/noirc_evaluator/src/ssa/opt/assert_constant.rs b/noir/compiler/noirc_evaluator/src/ssa/opt/assert_constant.rs index cd3a509a62e..a3608f89612 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/opt/assert_constant.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/opt/assert_constant.rs @@ -21,6 +21,7 @@ impl Ssa { /// seen by loop unrolling. Furthermore, this pass cannot be a part of loop unrolling /// since we must go through every instruction to find all references to `assert_constant` /// while loop unrolling only touches blocks with loops in them. + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn evaluate_assert_constant(mut self) -> Result { for function in self.functions.values_mut() { for block in function.reachable_blocks() { diff --git a/noir/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs b/noir/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs index f48e6f2a129..e944d7d99d8 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs @@ -40,6 +40,7 @@ impl Ssa { /// Performs constant folding on each instruction. /// /// See [`constant_folding`][self] module for more information. + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn fold_constants(mut self) -> Ssa { for function in self.functions.values_mut() { constant_fold(function); diff --git a/noir/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs b/noir/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs index 62b335be1e2..b7f154397a6 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs @@ -52,6 +52,7 @@ struct DefunctionalizationContext { } impl Ssa { + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn defunctionalize(mut self) -> Ssa { // Find all functions used as value that share the same signature let variants = find_variants(&self); diff --git a/noir/compiler/noirc_evaluator/src/ssa/opt/die.rs b/noir/compiler/noirc_evaluator/src/ssa/opt/die.rs index 492e96dc08c..029adb10054 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/opt/die.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/opt/die.rs @@ -17,6 +17,7 @@ use crate::ssa::{ impl Ssa { /// Performs Dead Instruction Elimination (DIE) to remove any instructions with /// unused results. + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn dead_instruction_elimination(mut self) -> Ssa { for function in self.functions.values_mut() { dead_instruction_elimination(function); diff --git a/noir/compiler/noirc_evaluator/src/ssa/opt/fill_internal_slices.rs b/noir/compiler/noirc_evaluator/src/ssa/opt/fill_internal_slices.rs index f5e9598114c..5ee8e42fe3a 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/opt/fill_internal_slices.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/opt/fill_internal_slices.rs @@ -60,6 +60,7 @@ use acvm::FieldElement; use fxhash::FxHashMap as HashMap; impl Ssa { + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn fill_internal_slices(mut self) -> Ssa { for function in self.functions.values_mut() { // This pass is only necessary for generating ACIR and thus we should not diff --git a/noir/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs b/noir/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs index f0ad610948c..fdd7c66684c 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs @@ -163,6 +163,7 @@ impl Ssa { /// This pass will modify any instructions with side effects in particular, often multiplying /// them by jump conditions to maintain correctness even when all branches of a jmpif are inlined. /// For more information, see the module-level comment at the top of this file. + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn flatten_cfg(mut self) -> Ssa { flatten_function_cfg(self.main_mut()); self @@ -640,8 +641,25 @@ impl<'f> Context<'f> { match instruction { Instruction::Constrain(lhs, rhs, message) => { // Replace constraint `lhs == rhs` with `condition * lhs == condition * rhs`. - let lhs = self.handle_constrain_arg_side_effects(lhs, condition, &call_stack); - let rhs = self.handle_constrain_arg_side_effects(rhs, condition, &call_stack); + + // Condition needs to be cast to argument type in order to multiply them together. + let argument_type = self.inserter.function.dfg.type_of_value(lhs); + // Sanity check that we're not constraining non-primitive types + assert!(matches!(argument_type, Type::Numeric(_))); + + let casted_condition = self.insert_instruction( + Instruction::Cast(condition, argument_type), + call_stack.clone(), + ); + + let lhs = self.insert_instruction( + Instruction::binary(BinaryOp::Mul, lhs, casted_condition), + call_stack.clone(), + ); + let rhs = self.insert_instruction( + Instruction::binary(BinaryOp::Mul, rhs, casted_condition), + call_stack, + ); Instruction::Constrain(lhs, rhs, message) } @@ -672,90 +690,6 @@ impl<'f> Context<'f> { } } - /// Given the arguments of a constrain instruction, multiplying them by the branch's condition - /// requires special handling in the case of complex types. - fn handle_constrain_arg_side_effects( - &mut self, - argument: ValueId, - condition: ValueId, - call_stack: &CallStack, - ) -> ValueId { - let argument_type = self.inserter.function.dfg.type_of_value(argument); - - match &argument_type { - Type::Numeric(_) => { - // Condition needs to be cast to argument type in order to multiply them together. - let casted_condition = self.insert_instruction( - Instruction::Cast(condition, argument_type), - call_stack.clone(), - ); - - self.insert_instruction( - Instruction::binary(BinaryOp::Mul, argument, casted_condition), - call_stack.clone(), - ) - } - Type::Array(_, _) => { - self.handle_array_constrain_arg(argument_type, argument, condition, call_stack) - } - Type::Slice(_) => { - panic!("Cannot use slices directly in a constrain statement") - } - Type::Reference(_) => { - panic!("Cannot use references directly in a constrain statement") - } - Type::Function => { - panic!("Cannot use functions directly in a constrain statement") - } - } - } - - fn handle_array_constrain_arg( - &mut self, - typ: Type, - argument: ValueId, - condition: ValueId, - call_stack: &CallStack, - ) -> ValueId { - let mut new_array = im::Vector::new(); - - let (element_types, len) = match &typ { - Type::Array(elements, len) => (elements, *len), - _ => panic!("Expected array type"), - }; - - for i in 0..len { - for (element_index, element_type) in element_types.iter().enumerate() { - let index = ((i * element_types.len() + element_index) as u128).into(); - let index = self.inserter.function.dfg.make_constant(index, Type::field()); - - let typevars = Some(vec![element_type.clone()]); - - let mut get_element = |array, typevars| { - let get = Instruction::ArrayGet { array, index }; - self.inserter - .function - .dfg - .insert_instruction_and_results( - get, - self.inserter.function.entry_block(), - typevars, - CallStack::new(), - ) - .first() - }; - - let element = get_element(argument, typevars); - - new_array.push_back( - self.handle_constrain_arg_side_effects(element, condition, call_stack), - ); - } - } - - self.inserter.function.dfg.make_array(new_array, typ) - } - fn undo_stores_in_then_branch(&mut self, then_branch: &Branch) { for (address, store) in &then_branch.store_values { let address = *address; diff --git a/noir/compiler/noirc_evaluator/src/ssa/opt/inlining.rs b/noir/compiler/noirc_evaluator/src/ssa/opt/inlining.rs index b4f12b2f897..776f22b2877 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/opt/inlining.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/opt/inlining.rs @@ -36,6 +36,7 @@ impl Ssa { /// changes. This is because if the function's id later becomes known by a later /// pass, we would need to re-run all of inlining anyway to inline it, so we might /// as well save the work for later instead of performing it twice. + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn inline_functions(mut self) -> Ssa { self.functions = btree_map(get_entry_point_functions(&self), |entry_point| { let new_function = InlineContext::new(&self, entry_point).inline_all(&self); diff --git a/noir/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs b/noir/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs index ce205c8d883..0a49ca4ecca 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs @@ -86,6 +86,7 @@ use self::block::{Block, Expression}; impl Ssa { /// Attempts to remove any load instructions that recover values that are already available in /// scope, and attempts to remove stores that are subsequently redundant. + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn mem2reg(mut self) -> Ssa { for function in self.functions.values_mut() { let mut context = PerFunctionContext::new(function); diff --git a/noir/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs b/noir/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs index d491afc3d26..a31def8fd98 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs @@ -29,6 +29,7 @@ impl Ssa { /// only 1 successor then (2) also will be applied. /// /// Currently, 1 and 4 are unimplemented. + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn simplify_cfg(mut self) -> Self { for function in self.functions.values_mut() { simplify_function(function); diff --git a/noir/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs b/noir/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs index 50c2f5b1524..2fe9ea13cec 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs @@ -36,6 +36,7 @@ use fxhash::FxHashMap as HashMap; impl Ssa { /// Unroll all loops in each SSA function. /// If any loop cannot be unrolled, it is left as-is or in a partially unrolled state. + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn unroll_loops(mut self) -> Result { for function in self.functions.values_mut() { // Loop unrolling in brillig can lead to a code explosion currently. This can diff --git a/noir/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs b/noir/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs index 501a03bcb5c..5724bf56e8e 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs @@ -326,8 +326,8 @@ impl<'a> FunctionContext<'a> { self.check_signed_overflow(result, lhs, rhs, operator, bit_size, location); self.builder.insert_cast(result, result_type) } - BinaryOpKind::ShiftLeft => { - unreachable!("shift is not supported for signed integer") + BinaryOpKind::ShiftLeft | BinaryOpKind::ShiftRight => { + self.check_shift_overflow(result, rhs, bit_size, location, true) } _ => unreachable!("operator {} should not overflow", operator), } @@ -343,8 +343,10 @@ impl<'a> FunctionContext<'a> { if operator == BinaryOpKind::Multiply && bit_size == 1 { result - } else if operator == BinaryOpKind::ShiftLeft { - self.check_left_shift_overflow(result, rhs, bit_size, location) + } else if operator == BinaryOpKind::ShiftLeft + || operator == BinaryOpKind::ShiftRight + { + self.check_shift_overflow(result, rhs, bit_size, location, false) } else { let message = format!("attempt to {} with overflow", op_name); let range_constraint = Instruction::RangeCheck { @@ -360,26 +362,44 @@ impl<'a> FunctionContext<'a> { } } - /// Overflow checks for shift-left - /// We use Rust behavior for shift left: + /// Overflow checks for bit-shift + /// We use Rust behavior for bit-shift: /// If rhs is more or equal than the bit size, then we overflow - /// If not, we do not overflow and shift left with 0 when bits are falling out of the bit size - fn check_left_shift_overflow( + /// If not, we do not overflow and shift with 0 when bits are falling out of the bit size + fn check_shift_overflow( &mut self, result: ValueId, rhs: ValueId, bit_size: u32, location: Location, + is_signed: bool, ) -> ValueId { + let one = self.builder.numeric_constant(FieldElement::one(), Type::bool()); + let rhs = + if is_signed { self.builder.insert_cast(rhs, Type::unsigned(bit_size)) } else { rhs }; + // Bit-shift with a negative number is an overflow + if is_signed { + // We compute the sign of rhs. + let half_width = self.builder.numeric_constant( + FieldElement::from(2_i128.pow(bit_size - 1)), + Type::unsigned(bit_size), + ); + let sign = self.builder.insert_binary(rhs, BinaryOp::Lt, half_width); + self.builder.set_location(location).insert_constrain( + sign, + one, + Some("attempt to bit-shift with overflow".to_string()), + ); + } + let max = self .builder .numeric_constant(FieldElement::from(bit_size as i128), Type::unsigned(bit_size)); let overflow = self.builder.insert_binary(rhs, BinaryOp::Lt, max); - let one = self.builder.numeric_constant(FieldElement::one(), Type::bool()); self.builder.set_location(location).insert_constrain( overflow, one, - Some("attempt to left shift with overflow".to_owned()), + Some("attempt to bit-shift with overflow".to_owned()), ); self.builder.insert_truncate(result, bit_size, bit_size + 1) } @@ -466,7 +486,6 @@ impl<'a> FunctionContext<'a> { Some(message), ); } - BinaryOpKind::ShiftLeft => unreachable!("shift is not supported for signed integer"), _ => unreachable!("operator {} should not overflow", operator), } } @@ -482,19 +501,26 @@ impl<'a> FunctionContext<'a> { mut rhs: ValueId, location: Location, ) -> Values { + let result_type = self.builder.type_of_value(lhs); let mut result = match operator { BinaryOpKind::ShiftLeft => { - let result_type = self.builder.current_function.dfg.type_of_value(lhs); let bit_size = match result_type { Type::Numeric(NumericType::Signed { bit_size }) | Type::Numeric(NumericType::Unsigned { bit_size }) => bit_size, - _ => unreachable!("ICE: Truncation attempted on non-integer"), + _ => unreachable!("ICE: left-shift attempted on non-integer"), }; self.builder.insert_wrapping_shift_left(lhs, rhs, bit_size) } - BinaryOpKind::ShiftRight => self.builder.insert_shift_right(lhs, rhs), + BinaryOpKind::ShiftRight => { + let bit_size = match result_type { + Type::Numeric(NumericType::Signed { bit_size }) + | Type::Numeric(NumericType::Unsigned { bit_size }) => bit_size, + _ => unreachable!("ICE: right-shift attempted on non-integer"), + }; + self.builder.insert_shift_right(lhs, rhs, bit_size) + } BinaryOpKind::Equal | BinaryOpKind::NotEqual - if matches!(self.builder.type_of_value(lhs), Type::Array(..)) => + if matches!(result_type, Type::Array(..)) => { return self.insert_array_equality(lhs, operator, rhs, location) } diff --git a/noir/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs b/noir/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs index d7e6b8b0a3d..c00fbbbcb40 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs @@ -8,8 +8,8 @@ use context::SharedContext; use iter_extended::{try_vecmap, vecmap}; use noirc_errors::Location; use noirc_frontend::{ - monomorphization::ast::{self, Binary, Expression, Program}, - BinaryOpKind, Visibility, + monomorphization::ast::{self, Expression, Program}, + Visibility, }; use crate::{ @@ -653,24 +653,10 @@ impl<'a> FunctionContext<'a> { location: Location, assert_message: Option, ) -> Result { - match expr { - // If we're constraining an equality to be true then constrain the two sides directly. - Expression::Binary(Binary { lhs, operator: BinaryOpKind::Equal, rhs, .. }) => { - let lhs = self.codegen_non_tuple_expression(lhs)?; - let rhs = self.codegen_non_tuple_expression(rhs)?; - self.builder.set_location(location).insert_constrain(lhs, rhs, assert_message); - } + let expr = self.codegen_non_tuple_expression(expr)?; + let true_literal = self.builder.numeric_constant(true, Type::bool()); + self.builder.set_location(location).insert_constrain(expr, true_literal, assert_message); - _ => { - let expr = self.codegen_non_tuple_expression(expr)?; - let true_literal = self.builder.numeric_constant(true, Type::bool()); - self.builder.set_location(location).insert_constrain( - expr, - true_literal, - assert_message, - ); - } - } Ok(Self::unit_value()) } diff --git a/noir/compiler/noirc_frontend/Cargo.toml b/noir/compiler/noirc_frontend/Cargo.toml index aa3a8e9f6b8..80d767f7f2c 100644 --- a/noir/compiler/noirc_frontend/Cargo.toml +++ b/noir/compiler/noirc_frontend/Cargo.toml @@ -22,7 +22,7 @@ serde.workspace = true rustc-hash = "1.1.0" small-ord-set = "0.1.3" regex = "1.9.1" -log.workspace = true +tracing.workspace = true [dev-dependencies] strum = "0.24" diff --git a/noir/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/noir/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index ae061792125..8ada3faf756 100644 --- a/noir/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/noir/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -4,7 +4,7 @@ use crate::graph::CrateId; use crate::hir::def_map::{CrateDefMap, LocalModuleId, ModuleId}; use crate::hir::resolution::errors::ResolverError; -use crate::hir::resolution::import::{resolve_imports, ImportDirective}; +use crate::hir::resolution::import::{resolve_import, ImportDirective}; use crate::hir::resolution::resolver::Resolver; use crate::hir::resolution::{ collect_impls, collect_trait_impls, path_resolver, resolve_free_functions, resolve_globals, @@ -259,37 +259,33 @@ impl DefCollector { ); } - // Resolve unresolved imports collected from the crate - let (resolved, unresolved_imports) = - resolve_imports(crate_id, def_collector.collected_imports, &context.def_maps); - - { - let current_def_map = context.def_maps.get(&crate_id).unwrap(); - errors.extend(vecmap(unresolved_imports, |(error, module_id)| { - let file_id = current_def_map.file_id(module_id); - let error = DefCollectorErrorKind::PathResolutionError(error); - (error.into(), file_id) - })); - }; - - // Populate module namespaces according to the imports used - let current_def_map = context.def_maps.get_mut(&crate_id).unwrap(); - for resolved_import in resolved { - let name = resolved_import.name; - for ns in resolved_import.resolved_namespace.iter_defs() { - let result = current_def_map.modules[resolved_import.module_scope.0].import( - name.clone(), - ns, - resolved_import.is_prelude, - ); - - if let Err((first_def, second_def)) = result { - let err = DefCollectorErrorKind::Duplicate { - typ: DuplicateType::Import, - first_def, - second_def, - }; - errors.push((err.into(), root_file_id)); + // Resolve unresolved imports collected from the crate, one by one. + for collected_import in def_collector.collected_imports { + match resolve_import(crate_id, collected_import, &context.def_maps) { + Ok(resolved_import) => { + // Populate module namespaces according to the imports used + let current_def_map = context.def_maps.get_mut(&crate_id).unwrap(); + + let name = resolved_import.name; + for ns in resolved_import.resolved_namespace.iter_defs() { + let result = current_def_map.modules[resolved_import.module_scope.0] + .import(name.clone(), ns, resolved_import.is_prelude); + + if let Err((first_def, second_def)) = result { + let err = DefCollectorErrorKind::Duplicate { + typ: DuplicateType::Import, + first_def, + second_def, + }; + errors.push((err.into(), root_file_id)); + } + } + } + Err((error, module_id)) => { + let current_def_map = context.def_maps.get(&crate_id).unwrap(); + let file_id = current_def_map.file_id(module_id); + let error = DefCollectorErrorKind::PathResolutionError(error); + errors.push((error.into(), file_id)); } } } diff --git a/noir/compiler/noirc_frontend/src/hir/mod.rs b/noir/compiler/noirc_frontend/src/hir/mod.rs index adeca7cf2ba..3683b17a27c 100644 --- a/noir/compiler/noirc_frontend/src/hir/mod.rs +++ b/noir/compiler/noirc_frontend/src/hir/mod.rs @@ -10,6 +10,7 @@ use crate::node_interner::{FuncId, NodeInterner, StructId}; use def_map::{Contract, CrateDefMap}; use fm::FileManager; use noirc_errors::Location; +use std::borrow::Cow; use std::collections::BTreeMap; use self::def_map::TestFunction; @@ -17,11 +18,14 @@ use self::def_map::TestFunction; /// Helper object which groups together several useful context objects used /// during name resolution. Once name resolution is finished, only the /// def_interner is required for type inference and monomorphization. -pub struct Context { +pub struct Context<'file_manager> { pub def_interner: NodeInterner, pub crate_graph: CrateGraph, pub(crate) def_maps: BTreeMap, - pub file_manager: FileManager, + // In the WASM context, we take ownership of the file manager, + // which is why this needs to be a Cow. In all use-cases, the file manager + // is read-only however, once it has been passed to the Context. + pub file_manager: Cow<'file_manager, FileManager>, /// A map of each file that already has been visited from a prior `mod foo;` declaration. /// This is used to issue an error if a second `mod foo;` is declared to the same file. @@ -35,15 +39,24 @@ pub enum FunctionNameMatch<'a> { Contains(&'a str), } -impl Context { - pub fn new(file_manager: FileManager) -> Context { - let crate_graph = CrateGraph::default(); +impl Context<'_> { + pub fn new(file_manager: FileManager) -> Context<'static> { Context { def_interner: NodeInterner::default(), def_maps: BTreeMap::new(), visited_files: BTreeMap::new(), - crate_graph, - file_manager, + crate_graph: CrateGraph::default(), + file_manager: Cow::Owned(file_manager), + } + } + + pub fn from_ref_file_manager(file_manager: &FileManager) -> Context<'_> { + Context { + def_interner: NodeInterner::default(), + def_maps: BTreeMap::new(), + visited_files: BTreeMap::new(), + crate_graph: CrateGraph::default(), + file_manager: Cow::Borrowed(file_manager), } } @@ -181,14 +194,6 @@ impl Context { .collect() } - /// Returns the [Location] of the definition of the given Ident found at [Span] of the given [FileId]. - /// Returns [None] when definition is not found. - pub fn get_definition_location_from(&self, location: Location) -> Option { - let interner = &self.def_interner; - - interner.find_location_index(location).and_then(|index| interner.resolve_location(index)) - } - /// Return a Vec of all `contract` declarations in the source code and the functions they contain pub fn get_all_contracts(&self, crate_id: &CrateId) -> Vec { self.def_map(crate_id) diff --git a/noir/compiler/noirc_frontend/src/hir/resolution/import.rs b/noir/compiler/noirc_frontend/src/hir/resolution/import.rs index 41fdac746bd..e6ac33053a0 100644 --- a/noir/compiler/noirc_frontend/src/hir/resolution/import.rs +++ b/noir/compiler/noirc_frontend/src/hir/resolution/import.rs @@ -1,4 +1,3 @@ -use iter_extended::partition_results; use noirc_errors::{CustomDiagnostic, Span}; use crate::graph::CrateId; @@ -51,29 +50,27 @@ impl From for CustomDiagnostic { } } -pub fn resolve_imports( +pub fn resolve_import( crate_id: CrateId, - imports_to_resolve: Vec, + import_directive: ImportDirective, def_maps: &BTreeMap, -) -> (Vec, Vec<(PathResolutionError, LocalModuleId)>) { +) -> Result { let def_map = &def_maps[&crate_id]; - partition_results(imports_to_resolve, |import_directive| { - let allow_contracts = - allow_referencing_contracts(def_maps, crate_id, import_directive.module_id); - - let module_scope = import_directive.module_id; - let resolved_namespace = - resolve_path_to_ns(&import_directive, def_map, def_maps, allow_contracts) - .map_err(|error| (error, module_scope))?; - - let name = resolve_path_name(&import_directive); - Ok(ResolvedImport { - name, - resolved_namespace, - module_scope, - is_prelude: import_directive.is_prelude, - }) + let allow_contracts = + allow_referencing_contracts(def_maps, crate_id, import_directive.module_id); + + let module_scope = import_directive.module_id; + let resolved_namespace = + resolve_path_to_ns(&import_directive, def_map, def_maps, allow_contracts) + .map_err(|error| (error, module_scope))?; + + let name = resolve_path_name(&import_directive); + Ok(ResolvedImport { + name, + resolved_namespace, + module_scope, + is_prelude: import_directive.is_prelude, }) } diff --git a/noir/compiler/noirc_frontend/src/hir/resolution/resolver.rs b/noir/compiler/noirc_frontend/src/hir/resolution/resolver.rs index 9cd28d80784..91cfa5c6058 100644 --- a/noir/compiler/noirc_frontend/src/hir/resolution/resolver.rs +++ b/noir/compiler/noirc_frontend/src/hir/resolution/resolver.rs @@ -1253,8 +1253,10 @@ impl<'a> Resolver<'a> { if let Some((hir_expr, object_type)) = self.resolve_trait_generic_path(&path) { let expr_id = self.interner.push_expr(hir_expr); self.interner.push_expr_location(expr_id, expr.span, self.file); - self.interner - .select_impl_for_ident(expr_id, TraitImplKind::Assumed { object_type }); + self.interner.select_impl_for_expression( + expr_id, + TraitImplKind::Assumed { object_type }, + ); return expr_id; } else { // If the Path is being used as an Expression, then it is referring to a global from a separate module @@ -1313,10 +1315,12 @@ impl<'a> Resolver<'a> { ExpressionKind::Infix(infix) => { let lhs = self.resolve_expression(infix.lhs); let rhs = self.resolve_expression(infix.rhs); + let trait_id = self.interner.get_operator_trait_method(infix.operator.contents); HirExpression::Infix(HirInfixExpression { lhs, operator: HirBinaryOp::new(infix.operator, self.file), + trait_method_id: trait_id, rhs, }) } diff --git a/noir/compiler/noirc_frontend/src/hir/resolution/traits.rs b/noir/compiler/noirc_frontend/src/hir/resolution/traits.rs index 54d2630c722..40041b0fd00 100644 --- a/noir/compiler/noirc_frontend/src/hir/resolution/traits.rs +++ b/noir/compiler/noirc_frontend/src/hir/resolution/traits.rs @@ -52,6 +52,13 @@ pub(crate) fn resolve_traits( context.def_interner.update_trait(trait_id, |trait_def| { trait_def.set_methods(methods); }); + + // This check needs to be after the trait's methods are set since + // the interner may set `interner.ordering_type` based on the result type + // of the Cmp trait, if this is it. + if crate_id.is_stdlib() { + context.def_interner.try_add_operator_trait(trait_id); + } } res } diff --git a/noir/compiler/noirc_frontend/src/hir/type_check/expr.rs b/noir/compiler/noirc_frontend/src/hir/type_check/expr.rs index f7154895150..caa77852560 100644 --- a/noir/compiler/noirc_frontend/src/hir/type_check/expr.rs +++ b/noir/compiler/noirc_frontend/src/hir/type_check/expr.rs @@ -11,7 +11,7 @@ use crate::{ types::Type, }, node_interner::{DefinitionKind, ExprId, FuncId, TraitId, TraitImplKind, TraitMethodId}, - BinaryOpKind, Signedness, TypeBinding, TypeBindings, TypeVariableKind, UnaryOp, + BinaryOpKind, TypeBinding, TypeBindings, TypeVariableKind, UnaryOp, }; use super::{errors::TypeCheckError, TypeChecker}; @@ -136,11 +136,21 @@ impl<'interner> TypeChecker<'interner> { let rhs_span = self.interner.expr_span(&infix_expr.rhs); let span = lhs_span.merge(rhs_span); - self.infix_operand_type_rules(&lhs_type, &infix_expr.operator, &rhs_type, span) - .unwrap_or_else(|error| { + let operator = &infix_expr.operator; + match self.infix_operand_type_rules(&lhs_type, operator, &rhs_type, span) { + Ok((typ, use_impl)) => { + if use_impl { + let id = infix_expr.trait_method_id; + self.verify_trait_constraint(&lhs_type, id.trait_id, *expr_id, span); + self.typecheck_operator_method(*expr_id, id, &lhs_type, span); + } + typ + } + Err(error) => { self.errors.push(error); Type::Error - }) + } + } } HirExpression::Index(index_expr) => self.check_index_expression(expr_id, index_expr), HirExpression::Call(call_expr) => { @@ -294,7 +304,7 @@ impl<'interner> TypeChecker<'interner> { // We must also remember to apply these substitutions to the object_type // referenced by the selected trait impl, if one has yet to be selected. - let impl_kind = self.interner.get_selected_impl_for_ident(*expr_id); + let impl_kind = self.interner.get_selected_impl_for_expression(*expr_id); if let Some(TraitImplKind::Assumed { object_type }) = impl_kind { let the_trait = self.interner.get_trait(method.trait_id); let object_type = object_type.substitute(&bindings); @@ -302,8 +312,10 @@ impl<'interner> TypeChecker<'interner> { the_trait.self_type_typevar_id, (the_trait.self_type_typevar.clone(), object_type.clone()), ); - self.interner - .select_impl_for_ident(*expr_id, TraitImplKind::Assumed { object_type }); + self.interner.select_impl_for_expression( + *expr_id, + TraitImplKind::Assumed { object_type }, + ); } self.interner.store_instantiation_bindings(*expr_id, bindings); @@ -323,7 +335,7 @@ impl<'interner> TypeChecker<'interner> { span: Span, ) { match self.interner.lookup_trait_implementation(object_type, trait_id) { - Ok(impl_kind) => self.interner.select_impl_for_ident(function_ident_id, impl_kind), + Ok(impl_kind) => self.interner.select_impl_for_expression(function_ident_id, impl_kind), Err(erroring_constraints) => { // Don't show any errors where try_get_trait returns None. // This can happen if a trait is used that was never declared. @@ -753,19 +765,22 @@ impl<'interner> TypeChecker<'interner> { None } + // Given a binary comparison operator and another type. This method will produce the output type + // and a boolean indicating whether to use the trait impl corresponding to the operator + // or not. A value of false indicates the caller to use a primitive operation for this + // operator, while a true value indicates a user-provided trait impl is required. fn comparator_operand_type_rules( &mut self, lhs_type: &Type, rhs_type: &Type, op: &HirBinaryOp, span: Span, - ) -> Result { - use crate::BinaryOpKind::{Equal, NotEqual}; + ) -> Result<(Type, bool), TypeCheckError> { use Type::*; match (lhs_type, rhs_type) { // Avoid reporting errors multiple times - (Error, _) | (_, Error) => Ok(Bool), + (Error, _) | (_, Error) => Ok((Bool, false)), // Matches on TypeVariable must be first to follow any type // bindings. @@ -791,7 +806,7 @@ impl<'interner> TypeChecker<'interner> { || other == &Type::Error { Type::apply_type_bindings(bindings); - Ok(Bool) + Ok((Bool, false)) } else { Err(TypeCheckError::TypeMismatchWithSource { expected: lhs_type.clone(), @@ -816,36 +831,23 @@ impl<'interner> TypeChecker<'interner> { span, }); } - Ok(Bool) - } - (Integer(..), FieldElement) | (FieldElement, Integer(..)) => { - Err(TypeCheckError::IntegerAndFieldBinaryOperation { span }) - } - (Integer(..), typ) | (typ, Integer(..)) => { - Err(TypeCheckError::IntegerTypeMismatch { typ: typ.clone(), span }) + Ok((Bool, false)) } (FieldElement, FieldElement) => { if op.kind.is_valid_for_field_type() { - Ok(Bool) + Ok((Bool, false)) } else { Err(TypeCheckError::FieldComparison { span }) } } // <= and friends are technically valid for booleans, just not very useful - (Bool, Bool) => Ok(Bool), + (Bool, Bool) => Ok((Bool, false)), // Special-case == and != for arrays (Array(x_size, x_type), Array(y_size, y_type)) - if matches!(op.kind, Equal | NotEqual) => + if matches!(op.kind, BinaryOpKind::Equal | BinaryOpKind::NotEqual) => { - self.unify(x_type, y_type, || TypeCheckError::TypeMismatchWithSource { - expected: lhs_type.clone(), - actual: rhs_type.clone(), - source: Source::ArrayElements, - span: op.location.span, - }); - self.unify(x_size, y_size, || TypeCheckError::TypeMismatchWithSource { expected: lhs_type.clone(), actual: rhs_type.clone(), @@ -853,19 +855,9 @@ impl<'interner> TypeChecker<'interner> { span: op.location.span, }); - Ok(Bool) - } - (lhs @ NamedGeneric(binding_a, _), rhs @ NamedGeneric(binding_b, _)) => { - if binding_a == binding_b { - return Ok(Bool); - } - Err(TypeCheckError::TypeMismatchWithSource { - expected: lhs.clone(), - actual: rhs.clone(), - source: Source::Comparison, - span, - }) + self.comparator_operand_type_rules(x_type, y_type, op, span) } + (String(x_size), String(y_size)) => { self.unify(x_size, y_size, || TypeCheckError::TypeMismatchWithSource { expected: *x_size.clone(), @@ -874,14 +866,17 @@ impl<'interner> TypeChecker<'interner> { source: Source::StringLen, }); - Ok(Bool) + Ok((Bool, false)) + } + (lhs, rhs) => { + self.unify(lhs, rhs, || TypeCheckError::TypeMismatchWithSource { + expected: lhs.clone(), + actual: rhs.clone(), + span: op.location.span, + source: Source::Binary, + }); + Ok((Bool, true)) } - (lhs, rhs) => Err(TypeCheckError::TypeMismatchWithSource { - expected: lhs.clone(), - actual: rhs.clone(), - source: Source::Comparison, - span, - }), } } @@ -1041,13 +1036,16 @@ impl<'interner> TypeChecker<'interner> { } // Given a binary operator and another type. This method will produce the output type + // and a boolean indicating whether to use the trait impl corresponding to the operator + // or not. A value of false indicates the caller to use a primitive operation for this + // operator, while a true value indicates a user-provided trait impl is required. fn infix_operand_type_rules( &mut self, lhs_type: &Type, op: &HirBinaryOp, rhs_type: &Type, span: Span, - ) -> Result { + ) -> Result<(Type, bool), TypeCheckError> { if op.kind.is_comparator() { return self.comparator_operand_type_rules(lhs_type, rhs_type, op, span); } @@ -1055,7 +1053,7 @@ impl<'interner> TypeChecker<'interner> { use Type::*; match (lhs_type, rhs_type) { // An error type on either side will always return an error - (Error, _) | (_, Error) => Ok(Error), + (Error, _) | (_, Error) => Ok((Error, false)), // Matches on TypeVariable must be first so that we follow any type // bindings. @@ -1096,7 +1094,7 @@ impl<'interner> TypeChecker<'interner> { || other == &Type::Error { Type::apply_type_bindings(bindings); - Ok(other.clone()) + Ok((other.clone(), false)) } else { Err(TypeCheckError::TypeMismatchWithSource { expected: lhs_type.clone(), @@ -1121,31 +1119,8 @@ impl<'interner> TypeChecker<'interner> { span, }); } - if op.is_bit_shift() - && (*sign_x == Signedness::Signed || *sign_y == Signedness::Signed) - { - Err(TypeCheckError::InvalidInfixOp { kind: "Signed integer", span }) - } else { - Ok(Integer(*sign_x, *bit_width_x)) - } + Ok((Integer(*sign_x, *bit_width_x), false)) } - (Integer(..), FieldElement) | (FieldElement, Integer(..)) => { - Err(TypeCheckError::IntegerAndFieldBinaryOperation { span }) - } - (Integer(..), typ) | (typ, Integer(..)) => { - Err(TypeCheckError::IntegerTypeMismatch { typ: typ.clone(), span }) - } - // These types are not supported in binary operations - (Array(..), _) | (_, Array(..)) => { - Err(TypeCheckError::InvalidInfixOp { kind: "Arrays", span }) - } - (Struct(..), _) | (_, Struct(..)) => { - Err(TypeCheckError::InvalidInfixOp { kind: "Structs", span }) - } - (Tuple(_), _) | (_, Tuple(_)) => { - Err(TypeCheckError::InvalidInfixOp { kind: "Tuples", span }) - } - // The result of two Fields is always a witness (FieldElement, FieldElement) => { if op.is_bitwise() { @@ -1154,17 +1129,20 @@ impl<'interner> TypeChecker<'interner> { if op.is_modulo() { return Err(TypeCheckError::FieldModulo { span }); } - Ok(FieldElement) + Ok((FieldElement, false)) } - (Bool, Bool) => Ok(Bool), + (Bool, Bool) => Ok((Bool, false)), - (lhs, rhs) => Err(TypeCheckError::TypeMismatchWithSource { - expected: lhs.clone(), - actual: rhs.clone(), - source: Source::BinOp(op.kind), - span, - }), + (lhs, rhs) => { + self.unify(lhs, rhs, || TypeCheckError::TypeMismatchWithSource { + expected: lhs.clone(), + actual: rhs.clone(), + span: op.location.span, + source: Source::Binary, + }); + Ok((lhs.clone(), true)) + } } } @@ -1216,6 +1194,57 @@ impl<'interner> TypeChecker<'interner> { } } } + + /// Prerequisite: verify_trait_constraint of the operator's trait constraint. + /// + /// Although by this point the operator is expected to already have a trait impl, + /// we still need to match the operator's type against the method's instantiated type + /// to ensure the instantiation bindings are correct and the monomorphizer can + /// re-apply the needed bindings. + fn typecheck_operator_method( + &mut self, + expr_id: ExprId, + trait_method_id: TraitMethodId, + object_type: &Type, + span: Span, + ) { + let the_trait = self.interner.get_trait(trait_method_id.trait_id); + + let method = &the_trait.methods[trait_method_id.method_index]; + let (method_type, mut bindings) = method.typ.instantiate(self.interner); + + match method_type { + Type::Function(args, _, _) => { + // We can cheat a bit and match against only the object type here since no operator + // overload uses other generic parameters or return types aside from the object type. + let expected_object_type = &args[0]; + self.unify(object_type, expected_object_type, || TypeCheckError::TypeMismatch { + expected_typ: expected_object_type.to_string(), + expr_typ: object_type.to_string(), + expr_span: span, + }); + } + other => { + unreachable!("Expected operator method to have a function type, but found {other}") + } + } + + // We must also remember to apply these substitutions to the object_type + // referenced by the selected trait impl, if one has yet to be selected. + let impl_kind = self.interner.get_selected_impl_for_expression(expr_id); + if let Some(TraitImplKind::Assumed { object_type }) = impl_kind { + let the_trait = self.interner.get_trait(trait_method_id.trait_id); + let object_type = object_type.substitute(&bindings); + bindings.insert( + the_trait.self_type_typevar_id, + (the_trait.self_type_typevar.clone(), object_type.clone()), + ); + self.interner + .select_impl_for_expression(expr_id, TraitImplKind::Assumed { object_type }); + } + + self.interner.store_instantiation_bindings(expr_id, bindings); + } } /// Taken from: https://stackoverflow.com/a/47127500 diff --git a/noir/compiler/noirc_frontend/src/hir/type_check/mod.rs b/noir/compiler/noirc_frontend/src/hir/type_check/mod.rs index 95991047091..092e8631f1b 100644 --- a/noir/compiler/noirc_frontend/src/hir/type_check/mod.rs +++ b/noir/compiler/noirc_frontend/src/hir/type_check/mod.rs @@ -241,7 +241,7 @@ mod test { function::{FuncMeta, HirFunction}, stmt::HirStatement, }; - use crate::node_interner::{DefinitionKind, FuncId, NodeInterner}; + use crate::node_interner::{DefinitionKind, FuncId, NodeInterner, TraitId, TraitMethodId}; use crate::{ hir::{ def_map::{CrateDefMap, LocalModuleId, ModuleDefId}, @@ -254,6 +254,7 @@ mod test { #[test] fn basic_let() { let mut interner = NodeInterner::default(); + interner.populate_dummy_operator_traits(); // Safety: The FileId in a location isn't used for tests let file = FileId::default(); @@ -284,7 +285,9 @@ mod test { // Create Infix let operator = HirBinaryOp { location, kind: BinaryOpKind::Add }; - let expr = HirInfixExpression { lhs: x_expr_id, operator, rhs: y_expr_id }; + let trait_id = TraitId(ModuleId::dummy_id()); + let trait_method_id = TraitMethodId { trait_id, method_index: 0 }; + let expr = HirInfixExpression { lhs: x_expr_id, operator, rhs: y_expr_id, trait_method_id }; let expr_id = interner.push_expr(HirExpression::Infix(expr)); interner.push_expr_location(expr_id, Span::single_char(0), file); @@ -469,6 +472,7 @@ mod test { ) { let (program, errors) = parse_program(src); let mut interner = NodeInterner::default(); + interner.populate_dummy_operator_traits(); assert_eq!( errors.len(), diff --git a/noir/compiler/noirc_frontend/src/hir_def/expr.rs b/noir/compiler/noirc_frontend/src/hir_def/expr.rs index ef1c3af7ac0..7c04398ca88 100644 --- a/noir/compiler/noirc_frontend/src/hir_def/expr.rs +++ b/noir/compiler/noirc_frontend/src/hir_def/expr.rs @@ -101,6 +101,12 @@ pub struct HirInfixExpression { pub lhs: ExprId, pub operator: HirBinaryOp, pub rhs: ExprId, + + /// The trait method id for the operator trait method that corresponds to this operator. + /// For derived operators like `!=`, this will lead to the method `Eq::eq`. For these + /// cases, it is up to the monomorphization pass to insert the appropriate `not` operation + /// after the call to `Eq::eq` to get the result of the `!=` operator. + pub trait_method_id: TraitMethodId, } /// This is always a struct field access `my_struct.field` diff --git a/noir/compiler/noirc_frontend/src/monomorphization/ast.rs b/noir/compiler/noirc_frontend/src/monomorphization/ast.rs index 5a5f07b0a38..42a618e7d77 100644 --- a/noir/compiler/noirc_frontend/src/monomorphization/ast.rs +++ b/noir/compiler/noirc_frontend/src/monomorphization/ast.rs @@ -352,7 +352,7 @@ impl std::fmt::Display for Type { }; write!(f, "fn({}) -> {}{}", args.join(", "), ret, closure_env_text) } - Type::Slice(element) => write!(f, "[{element}"), + Type::Slice(element) => write!(f, "[{element}]"), Type::MutableReference(element) => write!(f, "&mut {element}"), } } diff --git a/noir/compiler/noirc_frontend/src/monomorphization/mod.rs b/noir/compiler/noirc_frontend/src/monomorphization/mod.rs index 52b8d5bfd79..bb0972987e4 100644 --- a/noir/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/noir/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -26,7 +26,7 @@ use crate::{ }, node_interner::{self, DefinitionKind, NodeInterner, StmtId, TraitImplKind, TraitMethodId}, token::FunctionAttribute, - ContractFunctionType, FunctionKind, Type, TypeBinding, TypeBindings, TypeVariableKind, + ContractFunctionType, FunctionKind, Type, TypeBinding, TypeBindings, TypeVariableKind, UnaryOp, Visibility, }; @@ -91,8 +91,8 @@ type HirType = crate::Type; /// Note that there is no requirement on the `main` function that can be passed into /// this function. Typically, this is the function named "main" in the source project, /// but it can also be, for example, an arbitrary test function for running `nargo test`. +#[tracing::instrument(level = "trace", skip(main, interner))] pub fn monomorphize(main: node_interner::FuncId, interner: &NodeInterner) -> Program { - log::trace!("Start monomorphization"); let mut monomorphizer = Monomorphizer::new(interner); let function_sig = monomorphizer.compile_main(main); @@ -108,7 +108,6 @@ pub fn monomorphize(main: node_interner::FuncId, interner: &NodeInterner) -> Pro let functions = vecmap(monomorphizer.finished_functions, |(_, f)| f); let FuncMeta { return_distinctness, return_visibility, .. } = interner.function_meta(&main); - log::trace!("Finish monomorphization"); Program::new( functions, function_sig, @@ -239,6 +238,7 @@ impl<'interner> Monomorphizer<'interner> { }); let parameters = self.parameters(meta.parameters); + let body = self.expr(body_expr_id); let unconstrained = modifiers.is_unconstrained || matches!(modifiers.contract_function_type, Some(ContractFunctionType::Open)); @@ -357,11 +357,37 @@ impl<'interner> Monomorphizer<'interner> { } HirExpression::Infix(infix) => { - let lhs = Box::new(self.expr(infix.lhs)); - let rhs = Box::new(self.expr(infix.rhs)); + let lhs = self.expr(infix.lhs); + let rhs = self.expr(infix.rhs); let operator = infix.operator.kind; let location = self.interner.expr_location(&expr); - ast::Expression::Binary(ast::Binary { lhs, rhs, operator, location }) + + if self.interner.get_selected_impl_for_expression(expr).is_some() { + // If an impl was selected for this infix operator, replace it + // with a method call to the appropriate trait impl method. + let lhs_type = self.interner.id_type(infix.lhs); + let args = vec![lhs_type.clone(), lhs_type]; + + // If this is a comparison operator, the result is a boolean but + // the actual method call returns an Ordering + use crate::BinaryOpKind::*; + let ret = if matches!(operator, Less | LessEqual | Greater | GreaterEqual) { + self.interner.ordering_type() + } else { + self.interner.id_type(expr) + }; + + let env = Box::new(Type::Unit); + let function_type = Type::Function(args, Box::new(ret.clone()), env); + + let method = infix.trait_method_id; + let func = self.resolve_trait_method_reference(expr, function_type, method); + self.create_operator_impl_call(func, lhs, infix.operator, rhs, ret, location) + } else { + let lhs = Box::new(lhs); + let rhs = Box::new(rhs); + ast::Expression::Binary(ast::Binary { lhs, rhs, operator, location }) + } } HirExpression::Index(index) => self.index(expr, index), @@ -401,13 +427,8 @@ impl<'interner> Monomorphizer<'interner> { HirExpression::Lambda(lambda) => self.lambda(lambda, expr), HirExpression::TraitMethodReference(method) => { - if let Type::Function(_, _, _) = self.interner.id_type(expr) { - self.resolve_trait_method_reference(expr, method) - } else { - unreachable!( - "Calling a non-function, this should've been caught in typechecking" - ); - } + let function_type = self.interner.id_type(expr); + self.resolve_trait_method_reference(expr, function_type, method) } HirExpression::MethodCall(hir_method_call) => { @@ -827,13 +848,12 @@ impl<'interner> Monomorphizer<'interner> { fn resolve_trait_method_reference( &mut self, expr_id: node_interner::ExprId, + function_type: HirType, method: TraitMethodId, ) -> ast::Expression { - let function_type = self.interner.id_type(expr_id); - let trait_impl = self .interner - .get_selected_impl_for_ident(expr_id) + .get_selected_impl_for_expression(expr_id) .expect("ICE: missing trait impl - should be caught during type checking"); let hir_func_id = match trait_impl { @@ -954,7 +974,7 @@ impl<'interner> Monomorphizer<'interner> { } /// Adds a function argument that contains type metadata that is required to tell - /// `println` how to convert values passed to an foreign call back to a human-readable string. + /// `println` how to convert values passed to an foreign call back to a human-readable string. /// The values passed to an foreign call will be a simple list of field elements, /// thus requiring extra metadata to correctly decode this list of elements. /// @@ -1088,7 +1108,7 @@ impl<'interner> Monomorphizer<'interner> { function_type: HirType, ) -> FuncId { let new_id = self.next_function_id(); - self.define_global(id, function_type, new_id); + self.define_global(id, function_type.clone(), new_id); let bindings = self.interner.get_instantiation_bindings(expr_id); let bindings = self.follow_bindings(bindings); @@ -1418,6 +1438,70 @@ impl<'interner> Monomorphizer<'interner> { ), }) } + + /// Call an operator overloading method for the given operator. + /// This function handles the special cases some operators have which don't map + /// 1 to 1 onto their operator function. For example: != requires a negation on + /// the result of its `eq` method, and the comparison operators each require a + /// conversion from the `Ordering` result to a boolean. + fn create_operator_impl_call( + &self, + func: ast::Expression, + lhs: ast::Expression, + operator: HirBinaryOp, + rhs: ast::Expression, + ret: Type, + location: Location, + ) -> ast::Expression { + let arguments = vec![lhs, rhs]; + let func = Box::new(func); + let return_type = self.convert_type(&ret); + + let mut result = + ast::Expression::Call(ast::Call { func, arguments, return_type, location }); + + use crate::BinaryOpKind::*; + match operator.kind { + // Negate the result of the == operation + NotEqual => { + result = ast::Expression::Unary(ast::Unary { + operator: UnaryOp::Not, + rhs: Box::new(result), + result_type: ast::Type::Bool, + location, + }); + } + // All the comparison operators require special handling since their `cmp` method + // returns an `Ordering` rather than a boolean value. + // + // (a < b) => a.cmp(b) == Ordering::Less + // (a <= b) => a.cmp(b) != Ordering::Greater + // (a > b) => a.cmp(b) == Ordering::Greater + // (a >= b) => a.cmp(b) != Ordering::Less + Less | LessEqual | Greater | GreaterEqual => { + // Comparing an Ordering directly to a field value in this way takes advantage + // of the fact the Ordering struct contains a single Field type, and our SSA + // pass will automatically unpack tuple values. + let ordering_value = if matches!(operator.kind, Less | GreaterEqual) { + FieldElement::zero() // Ordering::Less + } else { + 2u128.into() // Ordering::Greater + }; + + let operator = + if matches!(operator.kind, Less | Greater) { Equal } else { NotEqual }; + + let int_value = ast::Literal::Integer(ordering_value, ast::Type::Field, location); + let rhs = Box::new(ast::Expression::Literal(int_value)); + let lhs = Box::new(ast::Expression::ExtractTupleField(Box::new(result), 0)); + + result = ast::Expression::Binary(ast::Binary { lhs, operator, rhs, location }); + } + _ => (), + } + + result + } } fn unwrap_tuple_type(typ: &HirType) -> Vec { diff --git a/noir/compiler/noirc_frontend/src/node_interner.rs b/noir/compiler/noirc_frontend/src/node_interner.rs index 236f1e0b513..9082df1bcd5 100644 --- a/noir/compiler/noirc_frontend/src/node_interner.rs +++ b/noir/compiler/noirc_frontend/src/node_interner.rs @@ -21,8 +21,8 @@ use crate::hir_def::{ }; use crate::token::{Attributes, SecondaryAttribute}; use crate::{ - ContractFunctionType, FunctionDefinition, FunctionVisibility, Generics, Shared, TypeAliasType, - TypeBindings, TypeVariable, TypeVariableId, TypeVariableKind, + BinaryOpKind, ContractFunctionType, FunctionDefinition, FunctionVisibility, Generics, Shared, + TypeAliasType, TypeBindings, TypeVariable, TypeVariableId, TypeVariableKind, }; /// An arbitrary number to limit the recursion depth when searching for trait impls. @@ -110,6 +110,12 @@ pub struct NodeInterner { /// the context to get the concrete type of the object and select the correct impl itself. selected_trait_implementations: HashMap, + /// Holds the trait ids of the traits used for operator overloading + operator_traits: HashMap, + + /// The `Ordering` type is a semi-builtin type that is the result of the comparison traits. + ordering_type: Option, + /// Map from ExprId (referring to a Function/Method call) to its corresponding TypeBindings, /// filled out during type checking from instantiated variables. Used during monomorphization /// to map call site types back onto function parameter types, and undo this binding as needed. @@ -423,6 +429,8 @@ impl Default for NodeInterner { trait_implementations: Vec::new(), trait_implementation_map: HashMap::new(), selected_trait_implementations: HashMap::new(), + operator_traits: HashMap::new(), + ordering_type: None, instantiation_bindings: HashMap::new(), field_indices: HashMap::new(), next_type_variable_id: std::cell::Cell::new(0), @@ -1254,20 +1262,25 @@ impl NodeInterner { /// Tags the given identifier with the selected trait_impl so that monomorphization /// can later recover which impl was selected, or alternatively see if it needs to /// decide which impl to select (because the impl was Assumed). - pub fn select_impl_for_ident(&mut self, ident_id: ExprId, trait_impl: TraitImplKind) { + pub fn select_impl_for_expression(&mut self, ident_id: ExprId, trait_impl: TraitImplKind) { self.selected_trait_implementations.insert(ident_id, trait_impl); } - /// Retrieves the impl selected for a given IdentId during name resolution. - /// From type checking and on, the "ident" referred to is changed to a TraitMethodReference node. - pub fn get_selected_impl_for_ident(&self, ident_id: ExprId) -> Option { + /// Retrieves the impl selected for a given ExprId during name resolution. + pub fn get_selected_impl_for_expression(&self, ident_id: ExprId) -> Option { self.selected_trait_implementations.get(&ident_id).cloned() } + /// Returns the [Location] of the definition of the given Ident found at [Span] of the given [FileId]. + /// Returns [None] when definition is not found. + pub fn get_definition_location_from(&self, location: Location) -> Option { + self.find_location_index(location).and_then(|index| self.resolve_location(index)) + } + /// For a given [Index] we return [Location] to which we resolved to /// We currently return None for features not yet implemented /// TODO(#3659): LSP goto def should error when Ident at Location could not resolve - pub(crate) fn resolve_location(&self, index: impl Into) -> Option { + fn resolve_location(&self, index: impl Into) -> Option { let node = self.nodes.get(index.into())?; match node { @@ -1294,9 +1307,6 @@ impl NodeInterner { } HirExpression::Constructor(expr) => { let struct_type = &expr.r#type.borrow(); - - eprintln!("\n -> Resolve Constructor {struct_type:?}\n"); - Some(struct_type.location) } HirExpression::MemberAccess(expr_member_access) => { @@ -1322,35 +1332,103 @@ impl NodeInterner { let expr_lhs = &expr_member_access.lhs; let expr_rhs = &expr_member_access.rhs; - let found_ident = self.nodes.get(expr_lhs.into())?; - - let ident = match found_ident { - Node::Expression(HirExpression::Ident(ident)) => ident, + let lhs_self_struct = match self.id_type(expr_lhs) { + Type::Struct(struct_type, _) => struct_type, _ => return None, }; - let definition_info = self.definition(ident.id); - - let local_id = match definition_info.kind { - DefinitionKind::Local(Some(local_id)) => local_id, - _ => return None, - }; + let struct_type = lhs_self_struct.borrow(); + let field_names = struct_type.field_names(); - let constructor_expression = match self.nodes.get(local_id.into()) { - Some(Node::Expression(HirExpression::Constructor(constructor_expression))) => { - constructor_expression - } - _ => return None, + field_names.iter().find(|field_name| field_name.0 == expr_rhs.0).map(|found_field_name| { + Location::new(found_field_name.span(), struct_type.location.file) + }) + } + + /// Retrieves the trait id for a given binary operator. + /// All binary operators correspond to a trait - although multiple may correspond + /// to the same trait (such as `==` and `!=`). + /// `self.operator_traits` is expected to be filled before name resolution, + /// during definition collection. + pub fn get_operator_trait_method(&self, operator: BinaryOpKind) -> TraitMethodId { + let trait_id = self.operator_traits[&operator]; + + // Assume that the operator's method to be overloaded is the first method of the trait. + TraitMethodId { trait_id, method_index: 0 } + } + + /// Add the given trait as an operator trait if its name matches one of the + /// operator trait names (Add, Sub, ...). + pub fn try_add_operator_trait(&mut self, trait_id: TraitId) { + let the_trait = self.get_trait(trait_id); + + let operator = match the_trait.name.0.contents.as_str() { + "Add" => BinaryOpKind::Add, + "Sub" => BinaryOpKind::Subtract, + "Mul" => BinaryOpKind::Multiply, + "Div" => BinaryOpKind::Divide, + "Rem" => BinaryOpKind::Modulo, + "Eq" => BinaryOpKind::Equal, + "Ord" => BinaryOpKind::Less, + "BitAnd" => BinaryOpKind::And, + "BitOr" => BinaryOpKind::Or, + "BitXor" => BinaryOpKind::Xor, + "Shl" => BinaryOpKind::ShiftLeft, + "Shr" => BinaryOpKind::ShiftRight, + _ => return, }; - let struct_type = constructor_expression.r#type.borrow(); - let field_names = struct_type.field_names(); + self.operator_traits.insert(operator, trait_id); - match field_names.iter().find(|field_name| field_name.0 == expr_rhs.0) { - Some(found) => Some(Location::new(found.span(), struct_type.location.file)), - None => None, + // Some operators also require we insert a matching entry for related operators + match operator { + BinaryOpKind::Equal => { + self.operator_traits.insert(BinaryOpKind::NotEqual, trait_id); + } + BinaryOpKind::Less => { + self.operator_traits.insert(BinaryOpKind::LessEqual, trait_id); + self.operator_traits.insert(BinaryOpKind::Greater, trait_id); + self.operator_traits.insert(BinaryOpKind::GreaterEqual, trait_id); + + let the_trait = self.get_trait(trait_id); + self.ordering_type = match &the_trait.methods[0].typ { + Type::Forall(_, typ) => match typ.as_ref() { + Type::Function(_, return_type, _) => Some(return_type.as_ref().clone()), + other => unreachable!("Expected function type for `cmp`, found {}", other), + }, + other => unreachable!("Expected Forall type for `cmp`, found {}", other), + }; + } + _ => (), } } + + /// This function is needed when creating a NodeInterner for testing so that calls + /// to `get_operator_trait` do not panic when the stdlib isn't present. + #[cfg(test)] + pub fn populate_dummy_operator_traits(&mut self) { + let dummy_trait = TraitId(ModuleId::dummy_id()); + self.operator_traits.insert(BinaryOpKind::Add, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Subtract, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Multiply, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Divide, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Modulo, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Equal, dummy_trait); + self.operator_traits.insert(BinaryOpKind::NotEqual, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Less, dummy_trait); + self.operator_traits.insert(BinaryOpKind::LessEqual, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Greater, dummy_trait); + self.operator_traits.insert(BinaryOpKind::GreaterEqual, dummy_trait); + self.operator_traits.insert(BinaryOpKind::And, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Or, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Xor, dummy_trait); + self.operator_traits.insert(BinaryOpKind::ShiftLeft, dummy_trait); + self.operator_traits.insert(BinaryOpKind::ShiftRight, dummy_trait); + } + + pub(crate) fn ordering_type(&self) -> Type { + self.ordering_type.clone().expect("Expected ordering_type to be set in the NodeInterner") + } } impl Methods { diff --git a/noir/compiler/noirc_frontend/src/tests.rs b/noir/compiler/noirc_frontend/src/tests.rs index cd0c34f7e09..063e0215a30 100644 --- a/noir/compiler/noirc_frontend/src/tests.rs +++ b/noir/compiler/noirc_frontend/src/tests.rs @@ -53,6 +53,7 @@ mod test { let root = std::path::Path::new("/"); let fm = FileManager::new(root); let mut context = Context::new(fm); + context.def_interner.populate_dummy_operator_traits(); let root_file_id = FileId::dummy(); let root_crate_id = context.crate_graph.add_crate_root(root_file_id); let (program, parser_errors) = parse_program(src); diff --git a/noir/compiler/noirc_printable_type/Cargo.toml b/noir/compiler/noirc_printable_type/Cargo.toml index 5f2eea92257..fbbe778e561 100644 --- a/noir/compiler/noirc_printable_type/Cargo.toml +++ b/noir/compiler/noirc_printable_type/Cargo.toml @@ -14,5 +14,6 @@ regex = "1.9.1" serde.workspace = true serde_json.workspace = true thiserror.workspace = true +jsonrpc.workspace = true [dev-dependencies] diff --git a/noir/compiler/noirc_printable_type/src/lib.rs b/noir/compiler/noirc_printable_type/src/lib.rs index e10e400b0db..273e2d512ea 100644 --- a/noir/compiler/noirc_printable_type/src/lib.rs +++ b/noir/compiler/noirc_printable_type/src/lib.rs @@ -73,6 +73,9 @@ pub enum ForeignCallError { #[error("Could not parse PrintableType argument. {0}")] ParsingError(#[from] serde_json::Error), + + #[error("Failed calling external resolver. {0}")] + ExternalResolverError(#[from] jsonrpc::Error), } impl TryFrom<&[ForeignCallParam]> for PrintableValueDisplay { diff --git a/noir/compiler/source-resolver/lib-node/index.js b/noir/compiler/source-resolver/lib-node/index.js deleted file mode 100644 index 7de637b6853..00000000000 --- a/noir/compiler/source-resolver/lib-node/index.js +++ /dev/null @@ -1,32 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.initializeResolver = exports.read_file = void 0; -let resolveFunction = null; -const read_file = function (source_id) { - if (resolveFunction) { - const result = resolveFunction(source_id); - if (typeof result === 'string') { - return result; - } - else { - throw new Error('Noir source resolver function MUST return String synchronously. Are you trying to return anything else, eg. `Promise`?'); - } - } - else { - throw new Error('Not yet initialized. Use initializeResolver(() => string)'); - } -}; -exports.read_file = read_file; -function initialize(noir_resolver) { - if (typeof noir_resolver === 'function') { - return noir_resolver; - } - else { - throw new Error('Provided Noir Resolver is not a function, hint: use function(module_id) => NoirSource as second parameter'); - } -} -function initializeResolver(resolver) { - resolveFunction = initialize(resolver); -} -exports.initializeResolver = initializeResolver; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/noir/compiler/source-resolver/lib-node/index.js.map b/noir/compiler/source-resolver/lib-node/index.js.map deleted file mode 100644 index 4ac7301ddc9..00000000000 --- a/noir/compiler/source-resolver/lib-node/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;AAAA,IAAI,eAAe,GAA2C,IAAI,CAAC;AAE5D,MAAM,SAAS,GAAG,UAAU,SAAiB;IAClD,IAAI,eAAe,EAAE;QACnB,MAAM,MAAM,GAAG,eAAe,CAAC,SAAS,CAAC,CAAC;QAE1C,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;YAC9B,OAAO,MAAM,CAAC;SACf;aAAM;YACL,MAAM,IAAI,KAAK,CACb,wHAAwH,CACzH,CAAC;SACH;KACF;SAAM;QACL,MAAM,IAAI,KAAK,CAAC,2DAA2D,CAAC,CAAC;KAC9E;AACH,CAAC,CAAC;AAdW,QAAA,SAAS,aAcpB;AAEF,SAAS,UAAU,CAAC,aAA4C;IAC9D,IAAI,OAAO,aAAa,KAAK,UAAU,EAAE;QACvC,OAAO,aAAa,CAAC;KACtB;SAAM;QACL,MAAM,IAAI,KAAK,CACb,2GAA2G,CAC5G,CAAC;KACH;AACH,CAAC;AAED,SAAgB,kBAAkB,CAAC,QAAuC;IACxE,eAAe,GAAG,UAAU,CAAC,QAAQ,CAAC,CAAC;AACzC,CAAC;AAFD,gDAEC"} \ No newline at end of file diff --git a/noir/compiler/source-resolver/lib-node/index_node.js b/noir/compiler/source-resolver/lib-node/index_node.js deleted file mode 100644 index 7d54737ce49..00000000000 --- a/noir/compiler/source-resolver/lib-node/index_node.js +++ /dev/null @@ -1,20 +0,0 @@ -"use strict"; -/// -Object.defineProperty(exports, "__esModule", { value: true }); -exports.read_file = exports.initializeResolver = void 0; -const index_js_1 = require("./index.js"); -Object.defineProperty(exports, "initializeResolver", { enumerable: true, get: function () { return index_js_1.initializeResolver; } }); -Object.defineProperty(exports, "read_file", { enumerable: true, get: function () { return index_js_1.read_file; } }); -(0, index_js_1.initializeResolver)((source_id) => { - let fileContent = ''; - try { - // eslint-disable-next-line @typescript-eslint/no-var-requires - const fs = require('fs'); - fileContent = fs.readFileSync(source_id, { encoding: 'utf8' }); - } - catch (e) { - console.log(e); - } - return fileContent; -}); -//# sourceMappingURL=index_node.js.map \ No newline at end of file diff --git a/noir/compiler/source-resolver/lib-node/index_node.js.map b/noir/compiler/source-resolver/lib-node/index_node.js.map deleted file mode 100644 index 920818232c3..00000000000 --- a/noir/compiler/source-resolver/lib-node/index_node.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index_node.js","sourceRoot":"","sources":["../src/index_node.ts"],"names":[],"mappings":";AAAA,8BAA8B;;;AAE9B,yCAA2D;AAclD,mGAdA,6BAAkB,OAcA;AAAE,0FAdA,oBAAS,OAcA;AAZtC,IAAA,6BAAkB,EAAC,CAAC,SAAiB,EAAE,EAAE;IACvC,IAAI,WAAW,GAAG,EAAE,CAAC;IACrB,IAAI;QACF,8DAA8D;QAC9D,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;QACzB,WAAW,GAAG,EAAE,CAAC,YAAY,CAAC,SAAS,EAAE,EAAE,QAAQ,EAAE,MAAM,EAAE,CAAW,CAAC;KAC1E;IAAC,OAAO,CAAC,EAAE;QACV,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;KAChB;IACD,OAAO,WAAW,CAAC;AACrB,CAAC,CAAC,CAAC"} \ No newline at end of file diff --git a/noir/compiler/source-resolver/lib/index.js b/noir/compiler/source-resolver/lib/index.js deleted file mode 100644 index 079b4460592..00000000000 --- a/noir/compiler/source-resolver/lib/index.js +++ /dev/null @@ -1,27 +0,0 @@ -let resolveFunction = null; -export const read_file = function (source_id) { - if (resolveFunction) { - const result = resolveFunction(source_id); - if (typeof result === 'string') { - return result; - } - else { - throw new Error('Noir source resolver function MUST return String synchronously. Are you trying to return anything else, eg. `Promise`?'); - } - } - else { - throw new Error('Not yet initialized. Use initializeResolver(() => string)'); - } -}; -function initialize(noir_resolver) { - if (typeof noir_resolver === 'function') { - return noir_resolver; - } - else { - throw new Error('Provided Noir Resolver is not a function, hint: use function(module_id) => NoirSource as second parameter'); - } -} -export function initializeResolver(resolver) { - resolveFunction = initialize(resolver); -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/noir/compiler/source-resolver/lib/index.js.map b/noir/compiler/source-resolver/lib/index.js.map deleted file mode 100644 index e62ae1e4c8a..00000000000 --- a/noir/compiler/source-resolver/lib/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,IAAI,eAAe,GAA2C,IAAI,CAAC;AAEnE,MAAM,CAAC,MAAM,SAAS,GAAG,UAAU,SAAiB;IAClD,IAAI,eAAe,EAAE;QACnB,MAAM,MAAM,GAAG,eAAe,CAAC,SAAS,CAAC,CAAC;QAE1C,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;YAC9B,OAAO,MAAM,CAAC;SACf;aAAM;YACL,MAAM,IAAI,KAAK,CACb,wHAAwH,CACzH,CAAC;SACH;KACF;SAAM;QACL,MAAM,IAAI,KAAK,CAAC,2DAA2D,CAAC,CAAC;KAC9E;AACH,CAAC,CAAC;AAEF,SAAS,UAAU,CAAC,aAA4C;IAC9D,IAAI,OAAO,aAAa,KAAK,UAAU,EAAE;QACvC,OAAO,aAAa,CAAC;KACtB;SAAM;QACL,MAAM,IAAI,KAAK,CACb,2GAA2G,CAC5G,CAAC;KACH;AACH,CAAC;AAED,MAAM,UAAU,kBAAkB,CAAC,QAAuC;IACxE,eAAe,GAAG,UAAU,CAAC,QAAQ,CAAC,CAAC;AACzC,CAAC"} \ No newline at end of file diff --git a/noir/compiler/wasm/Cargo.toml b/noir/compiler/wasm/Cargo.toml index 58ad7764fdc..7ea138b60a4 100644 --- a/noir/compiler/wasm/Cargo.toml +++ b/noir/compiler/wasm/Cargo.toml @@ -23,9 +23,8 @@ serde.workspace = true js-sys.workspace = true console_error_panic_hook.workspace = true gloo-utils.workspace = true -log.workspace = true - -wasm-logger = "0.2.0" +tracing-subscriber.workspace = true +tracing-web.workspace = true # This is an unused dependency, we are adding it # so that we can enable the js feature in getrandom. diff --git a/noir/compiler/wasm/package.json b/noir/compiler/wasm/package.json index 352142eb3e3..38fd118f189 100644 --- a/noir/compiler/wasm/package.json +++ b/noir/compiler/wasm/package.json @@ -1,6 +1,6 @@ { "name": "@noir-lang/noir_wasm", - "collaborators": [ + "contributors": [ "The Noir Team " ], "version": "0.22.0", @@ -14,9 +14,14 @@ "package.json" ], "sideEffects": false, + "homepage": "https://noir-lang.org/", "repository": { - "type": "git", - "url": "https://github.com/noir-lang/noir.git" + "url": "https://github.com/noir-lang/noir.git", + "directory": "compiler/wasm", + "type": "git" + }, + "bugs": { + "url": "https://github.com/noir-lang/noir/issues" }, "scripts": { "build": "bash ./build.sh", diff --git a/noir/compiler/wasm/src/circuit.rs b/noir/compiler/wasm/src/circuit.rs deleted file mode 100644 index fdd9a7d9a20..00000000000 --- a/noir/compiler/wasm/src/circuit.rs +++ /dev/null @@ -1,18 +0,0 @@ -use acvm::acir::circuit::Circuit; -use gloo_utils::format::JsValueSerdeExt; -use wasm_bindgen::prelude::*; - -// Deserializes bytes into ACIR structure -#[wasm_bindgen] -pub fn acir_read_bytes(bytes: Vec) -> JsValue { - console_error_panic_hook::set_once(); - let circuit = Circuit::deserialize_circuit(&bytes).unwrap(); - ::from_serde(&circuit).unwrap() -} - -#[wasm_bindgen] -pub fn acir_write_bytes(acir: JsValue) -> Vec { - console_error_panic_hook::set_once(); - let circuit: Circuit = JsValueSerdeExt::into_serde(&acir).unwrap(); - Circuit::serialize_circuit(&circuit) -} diff --git a/noir/compiler/wasm/src/compile.rs b/noir/compiler/wasm/src/compile.rs index 4012effd947..c6ae0ae1f42 100644 --- a/noir/compiler/wasm/src/compile.rs +++ b/noir/compiler/wasm/src/compile.rs @@ -7,8 +7,9 @@ use nargo::artifacts::{ program::PreprocessedProgram, }; use noirc_driver::{ - add_dep, compile_contract, compile_main, prepare_crate, prepare_dependency, CompileOptions, - CompiledContract, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, + add_dep, compile_contract, compile_main, file_manager_with_stdlib, prepare_crate, + prepare_dependency, CompileOptions, CompiledContract, CompiledProgram, + NOIR_ARTIFACT_VERSION_STRING, }; use noirc_frontend::{ graph::{CrateId, CrateName}, @@ -224,7 +225,7 @@ pub fn compile( // should be considered as immutable. pub(crate) fn file_manager_with_source_map(source_map: PathToFileSourceMap) -> FileManager { let root = Path::new(""); - let mut fm = FileManager::new(root); + let mut fm = file_manager_with_stdlib(root); for (path, source) in source_map.0 { fm.add_file_with_source(path.as_path(), source); @@ -327,7 +328,7 @@ mod test { use super::{file_manager_with_source_map, process_dependency_graph, DependencyGraph}; use std::{collections::HashMap, path::Path}; - fn setup_test_context(source_map: PathToFileSourceMap) -> Context { + fn setup_test_context(source_map: PathToFileSourceMap) -> Context<'static> { let mut fm = file_manager_with_source_map(source_map); // Add this due to us calling prepare_crate on "/main.nr" below fm.add_file_with_source(Path::new("/main.nr"), "fn foo() {}".to_string()); diff --git a/noir/compiler/wasm/src/compile_new.rs b/noir/compiler/wasm/src/compile_new.rs index cd09d0fcc49..0cd1a2c50e5 100644 --- a/noir/compiler/wasm/src/compile_new.rs +++ b/noir/compiler/wasm/src/compile_new.rs @@ -18,7 +18,9 @@ use wasm_bindgen::prelude::wasm_bindgen; /// then the impl block is not picked up in javascript. #[wasm_bindgen] pub struct CompilerContext { - context: Context, + // `wasm_bindgen` currently doesn't allow lifetime parameters on structs so we must use a `'static` lifetime. + // `Context` must then own the `FileManager` to satisfy this lifetime. + context: Context<'static>, } #[wasm_bindgen(js_name = "CrateId")] diff --git a/noir/compiler/wasm/src/lib.rs b/noir/compiler/wasm/src/lib.rs index 43095fee4d4..e10b992a00e 100644 --- a/noir/compiler/wasm/src/lib.rs +++ b/noir/compiler/wasm/src/lib.rs @@ -6,22 +6,22 @@ use getrandom as _; use gloo_utils::format::JsValueSerdeExt; -use log::Level; + use noirc_driver::{GIT_COMMIT, GIT_DIRTY, NOIRC_VERSION}; use serde::{Deserialize, Serialize}; -use std::str::FromStr; -use wasm_bindgen::prelude::*; +use tracing_subscriber::prelude::*; +use tracing_subscriber::EnvFilter; +use tracing_web::MakeWebConsoleWriter; -mod circuit; mod compile; mod compile_new; mod errors; -pub use circuit::{acir_read_bytes, acir_write_bytes}; pub use compile::compile; // Expose the new Context-Centric API pub use compile_new::{compile_, CompilerContext, CrateIDWrapper}; +use wasm_bindgen::{prelude::wasm_bindgen, JsValue}; #[derive(Serialize, Deserialize)] pub struct BuildInfo { @@ -31,14 +31,21 @@ pub struct BuildInfo { } #[wasm_bindgen] -pub fn init_log_level(level: String) { +pub fn init_log_level(filter: String) { // Set the static variable from Rust use std::sync::Once; - let log_level = Level::from_str(&level).unwrap_or(Level::Error); + let filter: EnvFilter = + filter.parse().expect("Could not parse log filter while initializing logger"); + static SET_HOOK: Once = Once::new(); SET_HOOK.call_once(|| { - wasm_logger::init(wasm_logger::Config::new(log_level)); + let fmt_layer = tracing_subscriber::fmt::layer() + .with_ansi(false) + .without_time() + .with_writer(MakeWebConsoleWriter::new()); + + tracing_subscriber::registry().with(fmt_layer.with_filter(filter)).init(); }); } diff --git a/noir/cspell.json b/noir/cspell.json index e02e68871bb..94449a68a77 100644 --- a/noir/cspell.json +++ b/noir/cspell.json @@ -6,6 +6,7 @@ "acvm", "aeiou", "appender", + "Arbitrum", "arithmetization", "arity", "arkworks", @@ -14,22 +15,30 @@ "bincode", "bindgen", "bitand", + "bitxor", + "bitor", "blackbox", + "bridgekeeper", "brillig", "cachix", "callsite", "callsites", "canonicalize", "castable", + "Celo", "chumsky", "clippy", "codegen", "codegens", + "Codespaces", "codespan", "coeff", "combinators", "comptime", + "cpus", "cranelift", + "curvegroup", + "databus", "deflatten", "deflattened", "deflattening", @@ -40,20 +49,24 @@ "desugared", "direnv", "eddsa", + "Elligator", "endianness", "envrc", "Flamegraph", "flate", "fmtstr", "foldl", + "foos", "forall", "foralls", "formatcp", + "frontends", "fxhash", "getrandom", "gloo", "grumpkin", "Guillaume", + "gzipped", "hasher", "hexdigit", "higher-kinded", @@ -68,9 +81,11 @@ "jmpif", "jmpifs", "jmps", + "Jubjub", "keccak", "krate", "lvalue", + "mathbb", "merkle", "metas", "monomorphization", @@ -78,22 +93,27 @@ "monomorphized", "monomorphizer", "monomorphizes", + "montcurve", "nand", "nargo", "newtype", + "nightlies", "nixpkgs", "noirc", + "noirjs", "noirup", "nomicfoundation", "pedersen", "peekable", "plonkc", + "PLONKish", "pprof", "preprocess", "prettytable", "println", "printstd", "pseudocode", + "pubkey", "quantile", "rustc", "rustup", @@ -108,9 +128,12 @@ "srem", "stdlib", "struct", + "structs", "subexpression", "subshell", "subtyping", + "swcurve", + "tecurve", "tempdir", "tempfile", "termcolor", @@ -125,7 +148,13 @@ "unnormalized", "unoptimized", "urem", + "USERPROFILE", "vecmap", - "wasi" + "wasi", + "Weierstraß", + "zshell" + ], + "ignorePaths": [ + "./**/node_modules/**" ] } diff --git a/noir/deny.toml b/noir/deny.toml index d9ffd4d37f0..5edce08fb70 100644 --- a/noir/deny.toml +++ b/noir/deny.toml @@ -67,6 +67,7 @@ exceptions = [ # so we prefer to not have dependencies using it # https://tldrlegal.com/license/creative-commons-cc0-1.0-universal { allow = ["CC0-1.0"], name = "more-asserts" }, + { allow = ["CC0-1.0"], name = "jsonrpc" }, { allow = ["MPL-2.0"], name = "sized-chunks" }, { allow = ["MPL-2.0"], name = "webpki-roots" }, diff --git a/noir/docs/docs/explainers/explainer-recursion.md b/noir/docs/docs/explainers/explainer-recursion.md index cc431a878dc..8f992ec29fd 100644 --- a/noir/docs/docs/explainers/explainer-recursion.md +++ b/noir/docs/docs/explainers/explainer-recursion.md @@ -64,7 +64,9 @@ So, they use zero-knowledge proofs. Alice tries to guess Bob's number, and Bob w This ZK proof can go on a smart contract, revealing the winner and even giving prizes. However, this means every turn needs to be verified on-chain. This incurs some cost and waiting time that may simply make the game too expensive or time-consuming to be worth it. -So, Alice started thinking: "what if Bob generates his proof, and instead of sending it on-chain, I verify it *within* my own proof before playing my own turn?". She can then generate a proof that she verified his proof, and so on. +As a solution, Alice proposes the following: "what if Bob generates his proof, and instead of sending it on-chain, I verify it *within* my own proof before playing my own turn?". + +She can then generate a proof that she verified his proof, and so on. ```md Did you fail? <-------------------------- @@ -86,7 +88,7 @@ Generate proof of that / Charlie is a concerned citizen, and wants to be sure his vote in an election is accounted for. He votes with a ZK proof, but he has no way of knowing that his ZK proof was included in the total vote count! -So, the tallier puts all the votes in a merkle tree, and everyone can also prove the verification of two proofs within one proof, as such: +If the vote collector puts all of the votes into a [Merkle tree](https://en.wikipedia.org/wiki/Merkle_tree), everyone can prove the verification of two proofs within one proof, as such: ```md abcd @@ -104,7 +106,7 @@ Doing this recursively allows us to arrive on a final proof `abcd` which if true Daniel has a big circuit and a big headache. A part of his circuit is a setup phase that finishes with some assertions that need to be made. But that section alone takes most of the proving time, and is largely independent of the rest of the circuit. -He could find it more efficient to generate a proof for that setup phase separately, and verifying it in his actual business logic section of the circuit. This will allow for parallelization of both proofs, which results in a considerable speedup. +He might find it more efficient to generate a proof for that setup phase separately, and verify that proof recursively in the actual business logic section of his circuit. This will allow for parallelization of both proofs, which results in a considerable speedup. ## What params do I need @@ -149,7 +151,7 @@ In such a situation, and assuming Alice is first, she would skip the first part ### Aggregating proofs -In some one-way interaction situations, recursiveness would allow for aggregation of simple proofs that don't need to be immediately verified on-chain or elsewhere. +In some one-way interaction situations, recursion would allow for aggregation of simple proofs that don't need to be immediately verified on-chain or elsewhere. To give a practical example, a barman wouldn't need to verify a "proof-of-age" on-chain every time he serves alcohol to a customer. Instead, the architecture would comprise two circuits: diff --git a/noir/docs/docs/getting_started/create_a_project.md b/noir/docs/docs/getting_started/create_a_project.md index f10916c39c5..26ff265c389 100644 --- a/noir/docs/docs/getting_started/create_a_project.md +++ b/noir/docs/docs/getting_started/create_a_project.md @@ -69,7 +69,7 @@ x : Field, y : pub Field Program inputs in Noir are private by default (e.g. `x`), but can be labeled public using the keyword `pub` (e.g. `y`). To learn more about private and public values, check the -[Data Types](../noir/syntax/data_types/index.md) section. +[Data Types](../noir/concepts/data_types/index.md) section. The next line of the program specifies its body: @@ -79,7 +79,7 @@ assert(x != y); The Noir syntax `assert` can be interpreted as something similar to constraints in other zk-contract languages. -For more Noir syntax, check the [Language Concepts](../noir/syntax/comments.md) chapter. +For more Noir syntax, check the [Language Concepts](../noir/concepts/comments.md) chapter. ## Build In/Output Files diff --git a/noir/docs/docs/getting_started/installation/index.md b/noir/docs/docs/getting_started/installation/index.md index ddb8a250eb4..27eeeca88ed 100644 --- a/noir/docs/docs/getting_started/installation/index.md +++ b/noir/docs/docs/getting_started/installation/index.md @@ -1,7 +1,7 @@ --- title: Nargo Installation description: - nargo is a command line tool for interacting with Noir programs. This page is a quick guide on how to install Nargo though the most common and easy method, noirup + nargo is a command line tool for interacting with Noir programs. This page is a quick guide on how to install Nargo through the most common and easy method, noirup keywords: [ Nargo Noir @@ -41,5 +41,5 @@ noirup Done. That's it. You should have the latest version working. You can check with `nargo --version`. You can also install nightlies, specific versions -or branches, check out the [noirup repository](https://github.com/noir-lang/noirup) for more +or branches. Check out the [noirup repository](https://github.com/noir-lang/noirup) for more information. diff --git a/noir/docs/docs/getting_started/installation/other_install_methods.md b/noir/docs/docs/getting_started/installation/other_install_methods.md index 36f05657277..a532f83750e 100644 --- a/noir/docs/docs/getting_started/installation/other_install_methods.md +++ b/noir/docs/docs/getting_started/installation/other_install_methods.md @@ -83,9 +83,9 @@ Check if the installation was successful by running `nargo --version`. You shoul ### Option 3: Compile from Source -Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating ssues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). +Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating issues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). -Combined with direnv, which automatically sets or unsets environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. +Combined with direnv, which automatically sets or clears environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. #### Setting up your environment @@ -163,7 +163,7 @@ Advanced: If you aren't using direnv nor launching your editor within the subshe ### Option 4: WSL (for Windows) -The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed nativerly. However, it is available by using Windows Subsystem for Linux (WSL). +The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed natively. However, it is available by using Windows Subsystem for Linux (WSL). Step 1: Follow the instructions [here](https://learn.microsoft.com/en-us/windows/wsl/install) to install and run WSL. diff --git a/noir/docs/docs/getting_started/project_breakdown.md b/noir/docs/docs/getting_started/project_breakdown.md index 5a214804f7b..c4e2a9ae003 100644 --- a/noir/docs/docs/getting_started/project_breakdown.md +++ b/noir/docs/docs/getting_started/project_breakdown.md @@ -40,7 +40,7 @@ Example Nargo.toml: ```toml [package] -name = "noirstarter" +name = "noir_starter" type = "bin" authors = ["Alice"] compiler_version = "0.9.0" diff --git a/noir/docs/docs/getting_started/tooling/testing.md b/noir/docs/docs/getting_started/tooling/testing.md index 868a061200d..d3e0c522473 100644 --- a/noir/docs/docs/getting_started/tooling/testing.md +++ b/noir/docs/docs/getting_started/tooling/testing.md @@ -24,8 +24,8 @@ fn test_add() { } ``` -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all -the contraints which allows you to test that add returns the expected values. Test functions can't +Running `nargo test` will test that the `test_add` function can be executed while satisfying all +the constraints which allows you to test that add returns the expected values. Test functions can't have any arguments currently. ### Test fail diff --git a/noir/docs/docs/how_to/how-to-recursion.md b/noir/docs/docs/how_to/how-to-recursion.md index 226f7e6e73d..f60aa3ff2d9 100644 --- a/noir/docs/docs/how_to/how-to-recursion.md +++ b/noir/docs/docs/how_to/how-to-recursion.md @@ -31,9 +31,9 @@ It is also assumed that you're not using `noir_wasm` for compilation, and instea :::info -As you've read in the [explainer](../explainers/explainer-recursion.md), a recursive proof is an intermediate proof. Meaning it doesn't necessarily generate the final step that makes it verifiable in a smart contract. However, it is easy to verify within another circuit. +As you've read in the [explainer](../explainers/explainer-recursion.md), a recursive proof is an intermediate proof. This means that it doesn't necessarily generate the final step that makes it verifiable in a smart contract. However, it is easy to verify within another circuit. -While "standard" usage of NoirJS packages abstracts final proofs, it currently lacks the necessary interface to abstract away intermediate proofs. Which means these proofs need to be created by using the backend directly. +While "standard" usage of NoirJS packages abstracts final proofs, it currently lacks the necessary interface to abstract away intermediate proofs. This means that these proofs need to be created by using the backend directly. In short: @@ -53,7 +53,7 @@ For a full example on how recursive proofs work, please refer to the [noir-examp In a common NoirJS app, you need to instantiate a backend with something like `const backend = new Backend(circuit)`. Then you feed it to the `noir_js` interface. -For recursiveness, this doesn't happen, and the only need for `noir_js` is only to `execute` a circuit and get its witness and return value. Everything else is not interfaced, so it needs to happen on the `backend` object. +For recursion, this doesn't happen, and the only need for `noir_js` is only to `execute` a circuit and get its witness and return value. Everything else is not interfaced, so it needs to happen on the `backend` object. It is also recommended that you instantiate the backend with as many threads as possible, to allow for maximum concurrency: @@ -82,11 +82,11 @@ const { proof, publicInputs } = await backend.generateIntermediateProof(witness) :::warning -Always keep in mind what is actually happening on your development process, otherwise you'll quickly become confused about what circuit are we actually running and why! +Always keep in mind what is actually happening on your development process, otherwise you'll quickly become confused about what circuit we are actually running and why! In this case, you can imagine that Alice (running the `main` circuit) is proving something to Bob (running the `recursive` circuit), and Bob is verifying her proof within his proof. -With this in mind, it becomes clear that our intermediate proof is the one *meant to be verified within another circuit*. So it is Alice's. Actually, the only final proof in this theoretical scenario would be the last one, sent on-chain. +With this in mind, it becomes clear that our intermediate proof is the one *meant to be verified within another circuit*, so it must be Alice's. Actually, the only final proof in this theoretical scenario would be the last one, sent on-chain. ::: @@ -159,7 +159,7 @@ const backends = { main: new BarretenbergBackend(circuits.main), recursive: new BarretenbergBackend(circuits.recursive) } -const noirs = { +const noir_programs = { main: new Noir(circuits.main, backends.main), recursive: new Noir(circuits.recursive, backends.recursive) } @@ -169,7 +169,7 @@ This allows you to neatly call exactly the method you want without conflicting n ```js // Alice runs this 👇 -const { witness: mainWitness } = await noirs.main.execute(input) +const { witness: mainWitness } = await noir_programs.main.execute(input) const proof = await backends.main.generateIntermediateProof(mainWitness) // Bob runs this 👇 @@ -178,7 +178,7 @@ const { proofAsFields, vkAsFields, vkHash } = await backends.main.generateInterm proof, numPublicInputs, ); -const recursiveProof = await noirs.recursive.generateFinalProof(recursiveInputs) +const recursiveProof = await noir_programs.recursive.generateFinalProof(recursiveInputs) ``` ::: diff --git a/noir/docs/docs/index.md b/noir/docs/docs/index.md index 016832f9f5e..ab8c2f8acd2 100644 --- a/noir/docs/docs/index.md +++ b/noir/docs/docs/index.md @@ -24,12 +24,13 @@ sidebar_position: 0 Noir, a domain-specific language crafted for SNARK proving systems, stands out with its simplicity, flexibility, and robust capabilities. Unlike conventional approaches that compile directly to a fixed NP-complete language, -Noir takes a two-pronged path. It first compiles to an adaptable intermediate language known as ACIR. From there, -depending on the project's needs, ACIR can be further compiled into an arithmetic circuit for integration with Aztec's -barretenberg backend or transformed into a rank-1 constraint system suitable for R1CS backends like Arkwork's Marlin -backend, among others. +Noir takes a two-pronged path. First, Noir compiles to an adaptable intermediate language known as ACIR. -This innovative design introduces unique challenges, yet it strategically separates the programming language from the +From there, depending on a given project's needs, ACIR can be further compiled into an arithmetic circuit for integration with Aztec's +barretenberg backend, or transformed into a rank-1 constraint system suitable for R1CS backends like Arkworks' Marlin +backend (among others). + +This innovative design introduces unique challenges; however, this approach also strategically separates the programming language from the backend. Noir's approach echoes the modular philosophy of LLVM, offering developers a versatile toolkit for cryptographic programming. @@ -43,8 +44,7 @@ contracts efficiently. While the current alpha version offers this as a direct f to modularize this process for even greater ease of use. Noir currently includes a command to create a Solidity contract which verifies your Noir program. This will be -modularised in the future; however, as of the alpha, you can use the - command to create a verifier contract. +modularized in the future; however, as of the alpha, you can use the `nargo codegen-verifier` command to create a verifier contract. ### Protocol Developers @@ -62,7 +62,7 @@ within your projects. ## Libraries -Noir does not currently have an official package manager. You can find a list of available Noir libraries in the +Noir does not currently have an official package manager. You can find a list of some of the available Noir libraries in the [awesome-noir repo here](https://github.com/noir-lang/awesome-noir#libraries). Some libraries that are available today include: diff --git a/noir/docs/docs/migration_notes.md b/noir/docs/docs/migration_notes.md index d5d0682cf0c..9f27230a1a0 100644 --- a/noir/docs/docs/migration_notes.md +++ b/noir/docs/docs/migration_notes.md @@ -16,7 +16,7 @@ To update, please make sure this field in `Nargo.toml` matches the output of `na ## ≥0.14 -The index of the [for loops](noir/syntax/control_flow.md#loops) is now of type `u64` instead of `Field`. An example refactor would be: +The index of the [for loops](noir/concepts/control_flow.md#loops) is now of type `u64` instead of `Field`. An example refactor would be: ```rust for i in 0..10 { @@ -50,7 +50,7 @@ If you are using the default barretenberg backend, simply run: nargo prove ``` -with you Noir program. +with your Noir program. This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. @@ -78,7 +78,7 @@ nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/bar This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. -The gzipped filed is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. Then run: diff --git a/noir/docs/versioned_docs/version-v../noir/syntax/_category_.json b/noir/docs/docs/noir/concepts/_category_.json similarity index 72% rename from noir/docs/versioned_docs/version-v../noir/syntax/_category_.json rename to noir/docs/docs/noir/concepts/_category_.json index 666b691ae91..7da08f8a8c5 100644 --- a/noir/docs/versioned_docs/version-v../noir/syntax/_category_.json +++ b/noir/docs/docs/noir/concepts/_category_.json @@ -1,6 +1,6 @@ { - "label": "Syntax", + "label": "Concepts", "position": 0, "collapsible": true, "collapsed": true -} +} \ No newline at end of file diff --git a/noir/docs/docs/noir/syntax/assert.md b/noir/docs/docs/noir/concepts/assert.md similarity index 100% rename from noir/docs/docs/noir/syntax/assert.md rename to noir/docs/docs/noir/concepts/assert.md diff --git a/noir/docs/docs/noir/syntax/comments.md b/noir/docs/docs/noir/concepts/comments.md similarity index 100% rename from noir/docs/docs/noir/syntax/comments.md rename to noir/docs/docs/noir/concepts/comments.md diff --git a/noir/docs/docs/noir/syntax/control_flow.md b/noir/docs/docs/noir/concepts/control_flow.md similarity index 100% rename from noir/docs/docs/noir/syntax/control_flow.md rename to noir/docs/docs/noir/concepts/control_flow.md diff --git a/noir/docs/docs/noir/syntax/data_bus.md b/noir/docs/docs/noir/concepts/data_bus.md similarity index 100% rename from noir/docs/docs/noir/syntax/data_bus.md rename to noir/docs/docs/noir/concepts/data_bus.md diff --git a/noir/docs/docs/noir/syntax/data_types/_category_.json b/noir/docs/docs/noir/concepts/data_types/_category_.json similarity index 100% rename from noir/docs/docs/noir/syntax/data_types/_category_.json rename to noir/docs/docs/noir/concepts/data_types/_category_.json diff --git a/noir/docs/docs/noir/syntax/data_types/arrays.md b/noir/docs/docs/noir/concepts/data_types/arrays.md similarity index 100% rename from noir/docs/docs/noir/syntax/data_types/arrays.md rename to noir/docs/docs/noir/concepts/data_types/arrays.md diff --git a/noir/docs/docs/noir/syntax/data_types/booleans.md b/noir/docs/docs/noir/concepts/data_types/booleans.md similarity index 100% rename from noir/docs/docs/noir/syntax/data_types/booleans.md rename to noir/docs/docs/noir/concepts/data_types/booleans.md diff --git a/noir/docs/docs/noir/syntax/data_types/fields.md b/noir/docs/docs/noir/concepts/data_types/fields.md similarity index 100% rename from noir/docs/docs/noir/syntax/data_types/fields.md rename to noir/docs/docs/noir/concepts/data_types/fields.md diff --git a/noir/docs/versioned_docs/version-v../noir/syntax/data_types/function_types.md b/noir/docs/docs/noir/concepts/data_types/function_types.md similarity index 88% rename from noir/docs/versioned_docs/version-v../noir/syntax/data_types/function_types.md rename to noir/docs/docs/noir/concepts/data_types/function_types.md index 61e4076adaf..e224e860d59 100644 --- a/noir/docs/versioned_docs/version-v../noir/syntax/data_types/function_types.md +++ b/noir/docs/docs/noir/concepts/data_types/function_types.md @@ -23,4 +23,4 @@ fn main() { ``` A function type also has an optional capture environment - this is necessary to support closures. -See [Lambdas](@site/docs/noir/syntax/lambdas.md) for more details. +See [Lambdas](@site/docs/noir/concepts/lambdas.md) for more details. diff --git a/noir/docs/versioned_docs/version-v../noir/syntax/data_types/index.md b/noir/docs/docs/noir/concepts/data_types/index.md similarity index 95% rename from noir/docs/versioned_docs/version-v../noir/syntax/data_types/index.md rename to noir/docs/docs/noir/concepts/data_types/index.md index 52e568e9b7e..3c9cd4c2437 100644 --- a/noir/docs/versioned_docs/version-v../noir/syntax/data_types/index.md +++ b/noir/docs/docs/noir/concepts/data_types/index.md @@ -79,7 +79,7 @@ fn main() { } ``` -Type aliases can also be used with [generics](@site/docs/noir/syntax/generics.md): +Type aliases can also be used with [generics](@site/docs/noir/concepts/generics.md): ```rust type Id = Size; @@ -93,4 +93,4 @@ fn main() { ### BigInt -You can acheive BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. +You can achieve BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/noir/docs/docs/noir/syntax/data_types/integers.md b/noir/docs/docs/noir/concepts/data_types/integers.md similarity index 100% rename from noir/docs/docs/noir/syntax/data_types/integers.md rename to noir/docs/docs/noir/concepts/data_types/integers.md diff --git a/noir/docs/docs/noir/syntax/data_types/references.md b/noir/docs/docs/noir/concepts/data_types/references.md similarity index 100% rename from noir/docs/docs/noir/syntax/data_types/references.md rename to noir/docs/docs/noir/concepts/data_types/references.md diff --git a/noir/docs/docs/noir/syntax/data_types/slices.mdx b/noir/docs/docs/noir/concepts/data_types/slices.mdx similarity index 100% rename from noir/docs/docs/noir/syntax/data_types/slices.mdx rename to noir/docs/docs/noir/concepts/data_types/slices.mdx diff --git a/noir/docs/docs/noir/syntax/data_types/strings.md b/noir/docs/docs/noir/concepts/data_types/strings.md similarity index 100% rename from noir/docs/docs/noir/syntax/data_types/strings.md rename to noir/docs/docs/noir/concepts/data_types/strings.md diff --git a/noir/docs/docs/noir/syntax/data_types/structs.md b/noir/docs/docs/noir/concepts/data_types/structs.md similarity index 100% rename from noir/docs/docs/noir/syntax/data_types/structs.md rename to noir/docs/docs/noir/concepts/data_types/structs.md diff --git a/noir/docs/docs/noir/syntax/data_types/tuples.md b/noir/docs/docs/noir/concepts/data_types/tuples.md similarity index 100% rename from noir/docs/docs/noir/syntax/data_types/tuples.md rename to noir/docs/docs/noir/concepts/data_types/tuples.md diff --git a/noir/docs/docs/noir/syntax/data_types/vectors.mdx b/noir/docs/docs/noir/concepts/data_types/vectors.mdx similarity index 100% rename from noir/docs/docs/noir/syntax/data_types/vectors.mdx rename to noir/docs/docs/noir/concepts/data_types/vectors.mdx diff --git a/noir/docs/docs/noir/syntax/distinct.md b/noir/docs/docs/noir/concepts/distinct.md similarity index 100% rename from noir/docs/docs/noir/syntax/distinct.md rename to noir/docs/docs/noir/concepts/distinct.md diff --git a/noir/docs/docs/noir/syntax/functions.md b/noir/docs/docs/noir/concepts/functions.md similarity index 100% rename from noir/docs/docs/noir/syntax/functions.md rename to noir/docs/docs/noir/concepts/functions.md diff --git a/noir/docs/docs/noir/syntax/generics.md b/noir/docs/docs/noir/concepts/generics.md similarity index 100% rename from noir/docs/docs/noir/syntax/generics.md rename to noir/docs/docs/noir/concepts/generics.md diff --git a/noir/docs/docs/noir/syntax/lambdas.md b/noir/docs/docs/noir/concepts/lambdas.md similarity index 100% rename from noir/docs/docs/noir/syntax/lambdas.md rename to noir/docs/docs/noir/concepts/lambdas.md diff --git a/noir/docs/versioned_docs/version-v../noir/syntax/mutability.md b/noir/docs/docs/noir/concepts/mutability.md similarity index 97% rename from noir/docs/versioned_docs/version-v../noir/syntax/mutability.md rename to noir/docs/docs/noir/concepts/mutability.md index 58e9c1cecfb..6abfae3cfa7 100644 --- a/noir/docs/versioned_docs/version-v../noir/syntax/mutability.md +++ b/noir/docs/docs/noir/concepts/mutability.md @@ -70,11 +70,11 @@ fn main(x : Field, y : [Field; N]) { assert(res == y[0]); - let res2 = x * mysubmodule::N; + let res2 = x * my_submodule::N; assert(res != res2); } -mod mysubmodule { +mod my_submodule { use dep::std; global N: Field = 10; diff --git a/noir/docs/versioned_docs/version-v../noir/syntax/ops.md b/noir/docs/docs/noir/concepts/ops.md similarity index 97% rename from noir/docs/versioned_docs/version-v../noir/syntax/ops.md rename to noir/docs/docs/noir/concepts/ops.md index 977c8ba1203..60425cb8994 100644 --- a/noir/docs/versioned_docs/version-v../noir/syntax/ops.md +++ b/noir/docs/docs/noir/concepts/ops.md @@ -63,7 +63,7 @@ fn main(x : Field) { Noir has no support for the logical operators `||` and `&&`. This is because encoding the short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can -use the bitwise operators `|` and `&` which operate indentically for booleans, just without the +use the bitwise operators `|` and `&` which operate identically for booleans, just without the short-circuiting. ```rust diff --git a/noir/docs/docs/noir/syntax/shadowing.md b/noir/docs/docs/noir/concepts/shadowing.md similarity index 100% rename from noir/docs/docs/noir/syntax/shadowing.md rename to noir/docs/docs/noir/concepts/shadowing.md diff --git a/noir/docs/docs/explanations/noir/traits.md b/noir/docs/docs/noir/concepts/traits.md similarity index 100% rename from noir/docs/docs/explanations/noir/traits.md rename to noir/docs/docs/noir/concepts/traits.md diff --git a/noir/docs/versioned_docs/version-v../noir/syntax/unconstrained.md b/noir/docs/docs/noir/concepts/unconstrained.md similarity index 98% rename from noir/docs/versioned_docs/version-v../noir/syntax/unconstrained.md rename to noir/docs/docs/noir/concepts/unconstrained.md index 7a61d3953ef..6b3424f7993 100644 --- a/noir/docs/versioned_docs/version-v../noir/syntax/unconstrained.md +++ b/noir/docs/docs/noir/concepts/unconstrained.md @@ -6,7 +6,7 @@ keywords: [Noir programming language, unconstrained, open] sidebar_position: 5 --- -Unconstrained functions are functions which do not constrain any of the included computation and allow for non-determinisitic computation. +Unconstrained functions are functions which do not constrain any of the included computation and allow for non-deterministic computation. ## Why? diff --git a/noir/docs/docs/noir/modules_packages_crates/dependencies.md b/noir/docs/docs/noir/modules_packages_crates/dependencies.md index 21286bb3f72..a37dc401b7d 100644 --- a/noir/docs/docs/noir/modules_packages_crates/dependencies.md +++ b/noir/docs/docs/noir/modules_packages_crates/dependencies.md @@ -49,7 +49,7 @@ For example, this file structure has a library and binary crate │   ├── Nargo.toml │   └── src │   └── main.nr -└── liba +└── lib_a ├── Nargo.toml └── src └── lib.nr @@ -61,17 +61,17 @@ Inside of the binary crate, you can specify: # Nargo.toml [dependencies] -libA = { path = "../liba" } +lib_a = { path = "../lib_a" } ``` ## Importing dependencies You can import a dependency to a Noir file using the following syntax. For example, to import the -ecrecover-noir library and local liba referenced above: +ecrecover-noir library and local lib_a referenced above: ```rust use dep::ecrecover; -use dep::libA; +use dep::lib_a; ``` You can also import only the specific parts of dependency that you want to use, like so: diff --git a/noir/docs/docs/noir/modules_packages_crates/modules.md b/noir/docs/docs/noir/modules_packages_crates/modules.md index f9f15aee8be..ae822a1cff4 100644 --- a/noir/docs/docs/noir/modules_packages_crates/modules.md +++ b/noir/docs/docs/noir/modules_packages_crates/modules.md @@ -11,7 +11,7 @@ Noir's module system follows the same convention as the _newer_ version of Rust' ## Purpose of Modules -Modules are used to organise files. Without modules all of your code would need to live in a single +Modules are used to organize files. Without modules all of your code would need to live in a single file. In Noir, the compiler does not automatically scan all of your files to detect modules. This must be done explicitly by the developer. diff --git a/noir/docs/docs/noir/standard_library/black_box_fns.md b/noir/docs/docs/noir/standard_library/black_box_fns.md index 02a518e3803..4b1efbd17de 100644 --- a/noir/docs/docs/noir/standard_library/black_box_fns.md +++ b/noir/docs/docs/noir/standard_library/black_box_fns.md @@ -42,4 +42,4 @@ Here is a list of the current black box functions that are supported by UltraPlo Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. -You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/acvm/blob/acir-v0.12.0/acir/src/circuit/black_box_functions.rs). +You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/noir/blob/master/acvm-repo/acir/src/circuit/black_box_functions.rs). diff --git a/noir/docs/docs/noir/standard_library/logging.md b/noir/docs/docs/noir/standard_library/logging.md index 16daf922e15..2e163b52ab3 100644 --- a/noir/docs/docs/noir/standard_library/logging.md +++ b/noir/docs/docs/noir/standard_library/logging.md @@ -22,7 +22,7 @@ The standard library provides two familiar statements you can use: `println` and You can print the output of both statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are print statements in your tests). -It is recommended to use `nargo execute` if you want to debug failing constrains with `println` or `print` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). Neither `println`, nor `print` are callable for failed constraints caught at compile time. +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` or `print` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). Neither `println`, nor `print` are callable for failed constraints caught at compile time. Both `print` and `println` are generic functions which can work on integers, fields, strings, and even structs or expressions. Note however, that slices are currently unsupported. For example: diff --git a/noir/docs/docs/noir/standard_library/traits.md b/noir/docs/docs/noir/standard_library/traits.md new file mode 100644 index 00000000000..50b0e6816ab --- /dev/null +++ b/noir/docs/docs/noir/standard_library/traits.md @@ -0,0 +1,284 @@ +--- +title: Traits +description: Noir's stdlib provides a few commonly used traits. +keywords: [traits, trait, interface, protocol, default, add, eq] +--- + +## `std::default` + +### `std::default::Default` + +```rust +trait Default { + fn default() -> Self; +} +``` + +Constructs a default value of a type. + +Implementations: +```rust +impl Default for Field { .. } + +impl Default for i8 { .. } +impl Default for i16 { .. } +impl Default for i32 { .. } +impl Default for i64 { .. } + +impl Default for u8 { .. } +impl Default for u16 { .. } +impl Default for u32 { .. } +impl Default for u64 { .. } + +impl Default for () { .. } +impl Default for bool { .. } + +impl Default for [T; N] + where T: Default { .. } + +impl Default for (A, B) + where A: Default, B: Default { .. } + +impl Default for (A, B, C) + where A: Default, B: Default, C: Default { .. } + +impl Default for (A, B, C, D) + where A: Default, B: Default, C: Default, D: Default { .. } + +impl Default for (A, B, C, D, E) + where A: Default, B: Default, C: Default, D: Default, E: Default { .. } +``` + +For primitive integer types, the return value of `default` is `0`. Container +types such as arrays are filled with default values of their element type. + +## `std::cmp` + +### `std::cmp::Eq` + +```rust +trait Eq { + fn eq(self, other: Self) -> bool; +} +``` +Returns `true` if `self` is equal to `other`. Implementing this trait on a type +allows the type to be used with `==` and `!=`. + +Implementations: +```rust +impl Eq for Field { .. } + +impl Eq for i8 { .. } +impl Eq for i16 { .. } +impl Eq for i32 { .. } +impl Eq for i64 { .. } + +impl Eq for u8 { .. } +impl Eq for u16 { .. } +impl Eq for u32 { .. } +impl Eq for u64 { .. } + +impl Eq for () { .. } +impl Eq for bool { .. } + +impl Eq for [T; N] + where T: Eq { .. } + +impl Eq for (A, B) + where A: Eq, B: Eq { .. } + +impl Eq for (A, B, C) + where A: Eq, B: Eq, C: Eq { .. } + +impl Eq for (A, B, C, D) + where A: Eq, B: Eq, C: Eq, D: Eq { .. } + +impl Eq for (A, B, C, D, E) + where A: Eq, B: Eq, C: Eq, D: Eq, E: Eq { .. } +``` + +### `std::cmp::Cmp` + +```rust +trait Cmp { + fn cmp(self, other: Self) -> Ordering; +} +``` + +`a.cmp(b)` compares two values returning `Ordering::less()` if `a < b`, +`Ordering::equal()` if `a == b`, or `Ordering::greater()` if `a > b`. +Implementing this trait on a type allows `<`, `<=`, `>`, and `>=` to be +used on values of the type. + +Implementations: + +```rust +impl Ord for u8 { .. } +impl Ord for u16 { .. } +impl Ord for u32 { .. } +impl Ord for u64 { .. } + +impl Ord for i8 { .. } +impl Ord for i16 { .. } +impl Ord for i32 { .. } + +impl Ord for i64 { .. } + +impl Ord for () { .. } +impl Ord for bool { .. } + +impl Ord for [T; N] + where T: Ord { .. } + +impl Ord for (A, B) + where A: Ord, B: Ord { .. } + +impl Ord for (A, B, C) + where A: Ord, B: Ord, C: Ord { .. } + +impl Ord for (A, B, C, D) + where A: Ord, B: Ord, C: Ord, D: Ord { .. } + +impl Ord for (A, B, C, D, E) + where A: Ord, B: Ord, C: Ord, D: Ord, E: Ord { .. } +``` + +## `std::ops` + +### `std::ops::Add`, `std::ops::Sub`, `std::ops::Mul`, and `std::ops::Div` + +These traits abstract over addition, subtraction, multiplication, and division respectively. +Implementing these traits for a given type will also allow that type to be used with the corresponding operator +for that trait (`+` for Add, etc) in addition to the normal method names. + +```rust +trait Add { + fn add(self, other: Self) -> Self; +} + +trait Sub { + fn sub(self, other: Self) -> Self; +} + +trait Mul { + fn mul(self, other: Self) -> Self; +} + +trait Div { + fn div(self, other: Self) -> Self; +} +``` + +The implementations block below is given for the `Add` trait, but the same types that implement +`Add` also implement `Sub`, `Mul`, and `Div`. + +Implementations: +```rust +impl Add for Field { .. } + +impl Add for i8 { .. } +impl Add for i16 { .. } +impl Add for i32 { .. } +impl Add for i64 { .. } + +impl Add for u8 { .. } +impl Add for u16 { .. } +impl Add for u32 { .. } +impl Add for u64 { .. } +``` + +### `std::ops::Rem` + +```rust +trait Rem { + fn rem(self, other: Self) -> Self; +} +``` + +`Rem::rem(a, b)` is the remainder function returning the result of what is +left after dividing `a` and `b`. Implementing `Rem` allows the `%` operator +to be used with the implementation type. + +Unlike other numeric traits, `Rem` is not implemented for `Field`. + +Implementations: +```rust +impl Rem for u8 { fn rem(self, other: u8) -> u8 { self % other } } +impl Rem for u16 { fn rem(self, other: u16) -> u16 { self % other } } +impl Rem for u32 { fn rem(self, other: u32) -> u32 { self % other } } +impl Rem for u64 { fn rem(self, other: u64) -> u64 { self % other } } + +impl Rem for i8 { fn rem(self, other: i8) -> i8 { self % other } } +impl Rem for i16 { fn rem(self, other: i16) -> i16 { self % other } } +impl Rem for i32 { fn rem(self, other: i32) -> i32 { self % other } } +impl Rem for i64 { fn rem(self, other: i64) -> i64 { self % other } } +``` + +### `std::ops::{ BitOr, BitAnd, BitXor }` + +```rust +trait BitOr { + fn bitor(self, other: Self) -> Self; +} + +trait BitAnd { + fn bitand(self, other: Self) -> Self; +} + +trait BitXor { + fn bitxor(self, other: Self) -> Self; +} +``` + +Traits for the bitwise operations `|`, `&`, and `^`. + +Implementing `BitOr`, `BitAnd` or `BitXor` for a type allows the `|`, `&`, or `^` operator respectively +to be used with the type. + +The implementations block below is given for the `BitOr` trait, but the same types that implement +`BitOr` also implement `BitAnd` and `BitXor`. + +Implementations: +```rust +impl BitOr for bool { fn bitor(self, other: bool) -> bool { self | other } } + +impl BitOr for u8 { fn bitor(self, other: u8) -> u8 { self | other } } +impl BitOr for u16 { fn bitor(self, other: u16) -> u16 { self | other } } +impl BitOr for u32 { fn bitor(self, other: u32) -> u32 { self | other } } +impl BitOr for u64 { fn bitor(self, other: u64) -> u64 { self | other } } + +impl BitOr for i8 { fn bitor(self, other: i8) -> i8 { self | other } } +impl BitOr for i16 { fn bitor(self, other: i16) -> i16 { self | other } } +impl BitOr for i32 { fn bitor(self, other: i32) -> i32 { self | other } } +impl BitOr for i64 { fn bitor(self, other: i64) -> i64 { self | other } } +``` + +### `std::ops::{ Shl, Shr }` + +```rust +trait Shl { + fn shl(self, other: Self) -> Self; +} + +trait Shr { + fn shr(self, other: Self) -> Self; +} +``` + +Traits for a bit shift left and bit shift right. + +Implementing `Shl` for a type allows the left shift operator (`<<`) to be used with the implementation type. +Similarly, implementing `Shr` allows the right shift operator (`>>`) to be used with the type. + +Note that bit shifting is not currently implemented for signed types. + +The implementations block below is given for the `Shl` trait, but the same types that implement +`Shl` also implement `Shr`. + +Implementations: +```rust +impl Shl for u8 { fn shl(self, other: u8) -> u8 { self << other } } +impl Shl for u16 { fn shl(self, other: u16) -> u16 { self << other } } +impl Shl for u32 { fn shl(self, other: u32) -> u32 { self << other } } +impl Shl for u64 { fn shl(self, other: u64) -> u64 { self << other } } +``` \ No newline at end of file diff --git a/noir/docs/docusaurus.config.ts b/noir/docs/docusaurus.config.ts index 7516d35c6d9..043c6cba72e 100644 --- a/noir/docs/docusaurus.config.ts +++ b/noir/docs/docusaurus.config.ts @@ -1,3 +1,4 @@ +/* eslint-disable @typescript-eslint/no-var-requires */ import type { Config } from '@docusaurus/types'; const { themes } = require('prism-react-renderer'); @@ -47,6 +48,9 @@ export default { ], themeConfig: { + colorMode: { + respectPrefersColorScheme: true, + }, navbar: { logo: { alt: 'Noir Logo', diff --git a/noir/docs/src/css/custom.css b/noir/docs/src/css/custom.css index 53dec741513..c96e9df9832 100644 --- a/noir/docs/src/css/custom.css +++ b/noir/docs/src/css/custom.css @@ -6,45 +6,47 @@ /* You can override the default Infima variables here. */ :root { - --ifm-color-primary: #9f3fff; - --ifm-color-primary-dark: #2f1f49; - --ifm-color-primary-darker: #2f1f49; - --ifm-color-primary-darkest: #2f1f49; - --ifm-color-primary-light: #9f3fff; - --ifm-color-primary-lighter: #9f3fff; - --ifm-color-primary-lightest: #9f3fff; + --ifm-color-primary: #514167; + --ifm-color-primary-dark: #493a5d; + --ifm-color-primary-darker: #453758; + --ifm-color-primary-darkest: #392d48; + --ifm-color-primary-light: #594871; + --ifm-color-primary-lighter: #5d4b76; + --ifm-color-primary-lightest: #695486; --search-local-highlight-color: #2f1f49; --ifm-menu-color-background-active: #f6f8fa; --ifm-code-font-size: 95%; - --ifm-breadcrumb-color-active: white; - --ifm-breadcrumb-item-background-active: #2f1f49; + --ifm-breadcrumb-color-active: #F6FBFC; + --ifm-breadcrumb-item-background-active: #2f1f49; --ifm-heading-color: #2f1f49; --docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.1); + --ifm-link-color: #B68BE4; } /* For readability concerns, you should choose a lighter palette in dark mode. */ [data-theme='dark'] { - --ifm-color-primary: #f5bda9; - --ifm-color-primary-dark: #f5bda9; - --ifm-color-primary-darker: #f5bda9; - --ifm-color-primary-darkest: #f5bda9; - --ifm-color-primary-light: #f5bda9; - --ifm-color-primary-lighter: #f5bda9; - --ifm-color-primary-lightest: #f5bda9; - - --ifm-heading-color: white; + --ifm-color-primary: #fbc0b4; + --ifm-color-primary-dark: #f99e8b; + --ifm-color-primary-darker: #f88c77; + --ifm-color-primary-darkest: #f45939; + --ifm-color-primary-light: #fde2dd; + --ifm-color-primary-lighter: #fef4f1; + --ifm-color-primary-lightest: #ffffff; + + --ifm-heading-color: #F6FBFC; --ifm-menu-color-background-active: #282a36; --ifm-breadcrumb-color-active: #2f1f49; --ifm-breadcrumb-item-background-active: #f5bda9; --docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.3); + --ifm-link-color: var(--ifm-color-primary); } html[data-theme='dark'] { --search-local-highlight-color: #f5bda9; - --search-local-muted-color: white; + --search-local-muted-color: #F6FBFC; --search-local-hit-active-color: #1b1b1d; - --search-local-hit-color: white; + --search-local-hit-color: #F6FBFC; } [data-theme='dark'] .footer { @@ -65,6 +67,13 @@ html[data-theme='dark'] { --ifm-footer-title-color: #2f1f49; } +[data-theme='light'] #__docusaurus { + background-color: #F6FBFC; +} +[data-theme='dark'] #__docusaurus { + background-color: #161717; +} + .katex-html { display: none; } @@ -91,7 +100,6 @@ html[data-theme='dark'] { justify-content: center; margin: 0 auto; text-align: center; - background: white; border: none; width: 50%; } diff --git a/noir/docs/versioned_docs/version-v../explanations/standard_library/traits.md b/noir/docs/versioned_docs/version-v../explanations/standard_library/traits.md deleted file mode 100644 index 63b4f3d6f0b..00000000000 --- a/noir/docs/versioned_docs/version-v../explanations/standard_library/traits.md +++ /dev/null @@ -1,140 +0,0 @@ ---- -title: Traits -description: Noir's stdlib provides a few commonly used traits. -keywords: [traits, trait, interface, protocol, default, add, eq] ---- - -## `std::default` - -### `std::default::Default` - -```rust -trait Default { - fn default() -> Self; -} -``` - -Constructs a default value of a type. - -Implementations: -```rust -impl Default for Field { .. } - -impl Default for i8 { .. } -impl Default for i16 { .. } -impl Default for i32 { .. } -impl Default for i64 { .. } - -impl Default for u8 { .. } -impl Default for u16 { .. } -impl Default for u32 { .. } -impl Default for u64 { .. } - -impl Default for () { .. } -impl Default for bool { .. } - -impl Default for [T; N] - where T: Default { .. } - -impl Default for (A, B) - where A: Default, B: Default { .. } - -impl Default for (A, B, C) - where A: Default, B: Default, C: Default { .. } - -impl Default for (A, B, C, D) - where A: Default, B: Default, C: Default, D: Default { .. } - -impl Default for (A, B, C, D, E) - where A: Default, B: Default, C: Default, D: Default, E: Default { .. } -``` - -For primitive integer types, the return value of `default` is `0`. Container -types such as arrays are filled with default values of their element type. - -## `std::ops` - -### `std::ops::Eq` - -```rust -trait Eq { - fn eq(self, other: Self) -> bool; -} -``` -Returns `true` if `self` is equal to `other`. - -Implementations: -```rust -impl Eq for Field { .. } - -impl Eq for i8 { .. } -impl Eq for i16 { .. } -impl Eq for i32 { .. } -impl Eq for i64 { .. } - -impl Eq for u8 { .. } -impl Eq for u16 { .. } -impl Eq for u32 { .. } -impl Eq for u64 { .. } - -impl Eq for () { .. } -impl Eq for bool { .. } - -impl Eq for [T; N] - where T: Eq { .. } - -impl Eq for (A, B) - where A: Eq, B: Eq { .. } - -impl Eq for (A, B, C) - where A: Eq, B: Eq, C: Eq { .. } - -impl Eq for (A, B, C, D) - where A: Eq, B: Eq, C: Eq, D: Eq { .. } - -impl Eq for (A, B, C, D, E) - where A: Eq, B: Eq, C: Eq, D: Eq, E: Eq { .. } -``` - -### `std::ops::Add`, `std::ops::Sub`, `std::ops::Mul`, and `std::ops::Div` - -These traits abstract over addition, subtraction, multiplication, and division respectively. -Although Noir does not currently have operator overloading, in the future implementing these -traits for a given type will also allow that type to be used with the corresponding operator -for that trait (`+` for Add, etc) in addition to the normal method names. - -```rust -trait Add { - fn add(self, other: Self) -> Self; -} - -trait Sub { - fn sub(self, other: Self) -> Self; -} - -trait Mul { - fn mul(self, other: Self) -> Self; -} - -trait Div { - fn div(self, other: Self) -> Self; -} -``` - -The implementations block below is given for the `Add` trait, but the same types that implement -`Add` also implement `Sub`, `Mul`, and `Div`. - -Implementations: -```rust -impl Add for Field { .. } - -impl Add for i8 { .. } -impl Add for i16 { .. } -impl Add for i32 { .. } -impl Add for i64 { .. } - -impl Add for u8 { .. } -impl Add for u16 { .. } -impl Add for u32 { .. } -impl Add for u64 { .. } -``` diff --git a/noir/docs/versioned_docs/version-v0.17.0/getting_started/00_nargo_installation.md b/noir/docs/versioned_docs/version-v0.17.0/getting_started/00_nargo_installation.md index f4ca361d3c4..0de5597c213 100644 --- a/noir/docs/versioned_docs/version-v0.17.0/getting_started/00_nargo_installation.md +++ b/noir/docs/versioned_docs/version-v0.17.0/getting_started/00_nargo_installation.md @@ -140,9 +140,9 @@ Commands: ### Option 3: Compile from Source -Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating ssues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). +Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating issues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). -Combined with direnv, which automatically sets or unsets environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. +Combined with direnv, which automatically sets or clears environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. #### Setting up your environment diff --git a/noir/docs/versioned_docs/version-v0.17.0/getting_started/02_breakdown.md b/noir/docs/versioned_docs/version-v0.17.0/getting_started/02_breakdown.md index bc0e742fb4e..e7b1f33b339 100644 --- a/noir/docs/versioned_docs/version-v0.17.0/getting_started/02_breakdown.md +++ b/noir/docs/versioned_docs/version-v0.17.0/getting_started/02_breakdown.md @@ -39,7 +39,7 @@ Example Nargo.toml: ```toml [package] -name = "noirstarter" +name = "noir_starter" type = "bin" authors = ["Alice"] compiler_version = "0.9.0" diff --git a/noir/docs/versioned_docs/version-v0.17.0/index.md b/noir/docs/versioned_docs/version-v0.17.0/index.md index ddbee58f6e4..2d5e6f4454f 100644 --- a/noir/docs/versioned_docs/version-v0.17.0/index.md +++ b/noir/docs/versioned_docs/version-v0.17.0/index.md @@ -25,7 +25,7 @@ slug: / Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR compatible proving system. -It's design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. +Its design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. ## Who is Noir for? @@ -34,7 +34,7 @@ Noir can be used for a variety of purposes. ### Solidity Developers Noir currently includes a command to create a Solidity contract which verifies your Noir program. This will -be modularised in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create +be modularized in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create a verifier contract. ### Protocol Developers @@ -55,7 +55,7 @@ must be implemented for it. Noir is simple and flexible in its design, as it does not compile immediately to a fixed NP-complete language. Instead, Noir compiles to an intermediate language (ACIR), which itself can be compiled -to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkwork's Marlin backend, or others). +to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkworks' Marlin backend, or others). This in itself brings up a few challenges within the design process, but allows one to decouple the programming language completely from the backend. This is similar in theory to LLVM. diff --git a/noir/docs/versioned_docs/version-v0.17.0/language_concepts/03_ops.md b/noir/docs/versioned_docs/version-v0.17.0/language_concepts/03_ops.md index d5caa463765..0e35ef5e584 100644 --- a/noir/docs/versioned_docs/version-v0.17.0/language_concepts/03_ops.md +++ b/noir/docs/versioned_docs/version-v0.17.0/language_concepts/03_ops.md @@ -62,7 +62,7 @@ fn main(x : Field) { Noir has no support for the logical operators `||` and `&&`. This is because encoding the short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can -use the bitwise operators `|` and `&` which operate indentically for booleans, just without the +use the bitwise operators `|` and `&` which operate identically for booleans, just without the short-circuiting. ```rust diff --git a/noir/docs/versioned_docs/version-v0.17.0/language_concepts/05_unconstrained.md b/noir/docs/versioned_docs/version-v0.17.0/language_concepts/05_unconstrained.md index 6b621eda3eb..097d6ee9894 100644 --- a/noir/docs/versioned_docs/version-v0.17.0/language_concepts/05_unconstrained.md +++ b/noir/docs/versioned_docs/version-v0.17.0/language_concepts/05_unconstrained.md @@ -7,7 +7,7 @@ keywords: [Noir programming language, unconstrained, open] -Unconstrained functions are functions which do not constrain any of the included computation and allow for non-determinisitic computation. +Unconstrained functions are functions which do not constrain any of the included computation and allow for non-deterministic computation. ## Why? diff --git a/noir/docs/versioned_docs/version-v0.17.0/language_concepts/07_mutability.md b/noir/docs/versioned_docs/version-v0.17.0/language_concepts/07_mutability.md index ad902c42c9b..ed3fed820ec 100644 --- a/noir/docs/versioned_docs/version-v0.17.0/language_concepts/07_mutability.md +++ b/noir/docs/versioned_docs/version-v0.17.0/language_concepts/07_mutability.md @@ -69,11 +69,11 @@ fn main(x : Field, y : [Field; N]) { assert(res == y[0]); - let res2 = x * mysubmodule::N; + let res2 = x * my_submodule::N; assert(res != res2); } -mod mysubmodule { +mod my_submodule { use dep::std; global N: Field = 10; diff --git a/noir/docs/versioned_docs/version-v0.17.0/language_concepts/data_types.md b/noir/docs/versioned_docs/version-v0.17.0/language_concepts/data_types.md index d546cc463a8..74f573f7d3f 100644 --- a/noir/docs/versioned_docs/version-v0.17.0/language_concepts/data_types.md +++ b/noir/docs/versioned_docs/version-v0.17.0/language_concepts/data_types.md @@ -93,4 +93,4 @@ fn main() { ### BigInt -You can acheive BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. +You can achieve BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/noir/docs/versioned_docs/version-v0.17.0/migration_notes.md b/noir/docs/versioned_docs/version-v0.17.0/migration_notes.md index 69782cba388..1a81af04b3a 100644 --- a/noir/docs/versioned_docs/version-v0.17.0/migration_notes.md +++ b/noir/docs/versioned_docs/version-v0.17.0/migration_notes.md @@ -42,7 +42,7 @@ If you are using the default barretenberg backend, simply run: nargo prove ``` -with you Noir program. +with your Noir program. This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. @@ -70,7 +70,7 @@ nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/bar This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. -The gzipped filed is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. Then run: diff --git a/noir/docs/versioned_docs/version-v0.17.0/modules_packages_crates/dependencies.md b/noir/docs/versioned_docs/version-v0.17.0/modules_packages_crates/dependencies.md index fd7511a3423..e91e73a4c4f 100644 --- a/noir/docs/versioned_docs/version-v0.17.0/modules_packages_crates/dependencies.md +++ b/noir/docs/versioned_docs/version-v0.17.0/modules_packages_crates/dependencies.md @@ -48,7 +48,7 @@ For example, this file structure has a library and binary crate │   ├── Nargo.toml │   └── src │   └── main.nr -└── liba +└── lib_a ├── Nargo.toml └── src └── lib.nr @@ -60,17 +60,17 @@ Inside of the binary crate, you can specify: # Nargo.toml [dependencies] -libA = { path = "../liba" } +lib_a = { path = "../lib_a" } ``` ## Importing dependencies You can import a dependency to a Noir file using the following syntax. For example, to import the -ecrecover-noir library and local liba referenced above: +ecrecover-noir library and local lib_a referenced above: ```rust use dep::ecrecover; -use dep::libA; +use dep::lib_a; ``` You can also import only the specific parts of dependency that you want to use, like so: diff --git a/noir/docs/versioned_docs/version-v0.17.0/modules_packages_crates/modules.md b/noir/docs/versioned_docs/version-v0.17.0/modules_packages_crates/modules.md index 147c9b284e8..11e60cbf35e 100644 --- a/noir/docs/versioned_docs/version-v0.17.0/modules_packages_crates/modules.md +++ b/noir/docs/versioned_docs/version-v0.17.0/modules_packages_crates/modules.md @@ -10,7 +10,7 @@ Noir's module system follows the same convention as the _newer_ version of Rust' ## Purpose of Modules -Modules are used to organise files. Without modules all of your code would need to live in a single +Modules are used to organize files. Without modules all of your code would need to live in a single file. In Noir, the compiler does not automatically scan all of your files to detect modules. This must be done explicitly by the developer. diff --git a/noir/docs/versioned_docs/version-v0.17.0/nargo/02_testing.md b/noir/docs/versioned_docs/version-v0.17.0/nargo/02_testing.md index da767274efd..5c57ef92705 100644 --- a/noir/docs/versioned_docs/version-v0.17.0/nargo/02_testing.md +++ b/noir/docs/versioned_docs/version-v0.17.0/nargo/02_testing.md @@ -23,8 +23,8 @@ fn test_add() { } ``` -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all -the contraints which allows you to test that add returns the expected values. Test functions can't +Running `nargo test` will test that the `test_add` function can be executed while satisfying all +the constraints which allows you to test that add returns the expected values. Test functions can't have any arguments currently. ### Test fail diff --git a/noir/docs/versioned_docs/version-v0.17.0/noir_js/reference/01_noirjs.md b/noir/docs/versioned_docs/version-v0.17.0/noir_js/reference/01_noirjs.md index 11fef2bf8b5..3480fbfedad 100644 --- a/noir/docs/versioned_docs/version-v0.17.0/noir_js/reference/01_noirjs.md +++ b/noir/docs/versioned_docs/version-v0.17.0/noir_js/reference/01_noirjs.md @@ -90,7 +90,7 @@ This async method generates a witness and a proof given an object as input. ### Syntax ```js -async generateFinalproof(input) +async generateFinalProof(input) ``` ### Parameters diff --git a/noir/docs/versioned_docs/version-v0.17.0/noir_js/reference/02_bb_backend.md b/noir/docs/versioned_docs/version-v0.17.0/noir_js/reference/02_bb_backend.md index f444eab1772..958cabd6289 100644 --- a/noir/docs/versioned_docs/version-v0.17.0/noir_js/reference/02_bb_backend.md +++ b/noir/docs/versioned_docs/version-v0.17.0/noir_js/reference/02_bb_backend.md @@ -41,7 +41,7 @@ constructor(acirCircuit, (numberOfThreads = 1)); | Parameter | Type | Description | | ----------------- | ----------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `acirCircuit` | Object | A circuit represented in a `json` format, containing the ABI and bytecode Tipically obtained by running [`nargo compile`](../../nargo/01_commands.md). This is the same circuit expected to be passed to [the Noir class](01_noirjs.md) | +| `acirCircuit` | Object | A circuit represented in a `json` format, containing the ABI and bytecode typically obtained by running [`nargo compile`](../../nargo/01_commands.md). This is the same circuit expected to be passed to [the Noir class](01_noirjs.md) | | `numberOfThreads` | Number (optional) | The number of threads to be used by the backend. Defaults to 1. | ### Usage diff --git a/noir/docs/versioned_docs/version-v0.17.0/standard_library/black_box_fns.md b/noir/docs/versioned_docs/version-v0.17.0/standard_library/black_box_fns.md index c758846b688..a412de19d06 100644 --- a/noir/docs/versioned_docs/version-v0.17.0/standard_library/black_box_fns.md +++ b/noir/docs/versioned_docs/version-v0.17.0/standard_library/black_box_fns.md @@ -42,4 +42,4 @@ Here is a list of the current black box functions that are supported by UltraPlo Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. -You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/acvm/blob/acir-v0.12.0/acir/src/circuit/black_box_functions.rs). +You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/noir/blob/master/acvm-repo/acir/src/circuit/black_box_functions.rs). diff --git a/noir/docs/versioned_docs/version-v0.17.0/standard_library/logging.md b/noir/docs/versioned_docs/version-v0.17.0/standard_library/logging.md index 7e2fd9b9aff..4ba0fe0e656 100644 --- a/noir/docs/versioned_docs/version-v0.17.0/standard_library/logging.md +++ b/noir/docs/versioned_docs/version-v0.17.0/standard_library/logging.md @@ -22,7 +22,7 @@ implementation of rust's `println!` macro, this construct can be useful for debu You can print the output of println statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are println statements in your tests). -It is recommended to use `nargo execute` if you want to debug failing constrains with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. The `println` statement is unconstrained, so it works for outputting integers, fields, strings, and even structs or expressions. For example: diff --git a/noir/docs/versioned_docs/version-v0.19.0/getting_started/00_nargo_installation.md b/noir/docs/versioned_docs/version-v0.19.0/getting_started/00_nargo_installation.md index 349756d60c0..7d3c88c7693 100644 --- a/noir/docs/versioned_docs/version-v0.19.0/getting_started/00_nargo_installation.md +++ b/noir/docs/versioned_docs/version-v0.19.0/getting_started/00_nargo_installation.md @@ -140,9 +140,9 @@ Commands: ### Option 3: Compile from Source -Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating ssues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). +Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating issues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). -Combined with direnv, which automatically sets or unsets environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. +Combined with direnv, which automatically sets or clears environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. #### Setting up your environment @@ -220,7 +220,7 @@ Advanced: If you aren't using direnv nor launching your editor within the subshe ### Option 4: WSL (for Windows) -The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed nativerly. However, it is available by using Windows Subsystem for Linux (WSL). +The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed natively. However, it is available by using Windows Subsystem for Linux (WSL). Step 1: Follow the instructions [here](https://learn.microsoft.com/en-us/windows/wsl/install) to install and run WSL. diff --git a/noir/docs/versioned_docs/version-v0.19.0/getting_started/02_breakdown.md b/noir/docs/versioned_docs/version-v0.19.0/getting_started/02_breakdown.md index 9a17f5d6360..d28a54a1600 100644 --- a/noir/docs/versioned_docs/version-v0.19.0/getting_started/02_breakdown.md +++ b/noir/docs/versioned_docs/version-v0.19.0/getting_started/02_breakdown.md @@ -39,7 +39,7 @@ Example Nargo.toml: ```toml [package] -name = "noirstarter" +name = "noir_starter" type = "bin" authors = ["Alice"] compiler_version = "0.9.0" diff --git a/noir/docs/versioned_docs/version-v0.19.0/index.md b/noir/docs/versioned_docs/version-v0.19.0/index.md index 4e2f4043892..380368db036 100644 --- a/noir/docs/versioned_docs/version-v0.19.0/index.md +++ b/noir/docs/versioned_docs/version-v0.19.0/index.md @@ -25,7 +25,7 @@ slug: / Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR compatible proving system. -It's design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. +Its design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. ## Who is Noir for? @@ -34,7 +34,7 @@ Noir can be used for a variety of purposes. ### Solidity Developers Noir currently includes a command to create a Solidity contract which verifies your Noir program. This will -be modularised in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create +be modularized in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create a verifier contract. ### Protocol Developers @@ -55,7 +55,7 @@ must be implemented for it. Noir is simple and flexible in its design, as it does not compile immediately to a fixed NP-complete language. Instead, Noir compiles to an intermediate language (ACIR), which itself can be compiled -to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkwork's Marlin backend, or others). +to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkworks' Marlin backend, or others). This in itself brings up a few challenges within the design process, but allows one to decouple the programming language completely from the backend. This is similar in theory to LLVM. diff --git a/noir/docs/versioned_docs/version-v0.19.0/language_concepts/03_ops.md b/noir/docs/versioned_docs/version-v0.19.0/language_concepts/03_ops.md index d5caa463765..0e35ef5e584 100644 --- a/noir/docs/versioned_docs/version-v0.19.0/language_concepts/03_ops.md +++ b/noir/docs/versioned_docs/version-v0.19.0/language_concepts/03_ops.md @@ -62,7 +62,7 @@ fn main(x : Field) { Noir has no support for the logical operators `||` and `&&`. This is because encoding the short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can -use the bitwise operators `|` and `&` which operate indentically for booleans, just without the +use the bitwise operators `|` and `&` which operate identically for booleans, just without the short-circuiting. ```rust diff --git a/noir/docs/versioned_docs/version-v0.19.0/language_concepts/05_unconstrained.md b/noir/docs/versioned_docs/version-v0.19.0/language_concepts/05_unconstrained.md index 6b621eda3eb..097d6ee9894 100644 --- a/noir/docs/versioned_docs/version-v0.19.0/language_concepts/05_unconstrained.md +++ b/noir/docs/versioned_docs/version-v0.19.0/language_concepts/05_unconstrained.md @@ -7,7 +7,7 @@ keywords: [Noir programming language, unconstrained, open] -Unconstrained functions are functions which do not constrain any of the included computation and allow for non-determinisitic computation. +Unconstrained functions are functions which do not constrain any of the included computation and allow for non-deterministic computation. ## Why? diff --git a/noir/docs/versioned_docs/version-v0.19.0/language_concepts/07_mutability.md b/noir/docs/versioned_docs/version-v0.19.0/language_concepts/07_mutability.md index ad902c42c9b..ed3fed820ec 100644 --- a/noir/docs/versioned_docs/version-v0.19.0/language_concepts/07_mutability.md +++ b/noir/docs/versioned_docs/version-v0.19.0/language_concepts/07_mutability.md @@ -69,11 +69,11 @@ fn main(x : Field, y : [Field; N]) { assert(res == y[0]); - let res2 = x * mysubmodule::N; + let res2 = x * my_submodule::N; assert(res != res2); } -mod mysubmodule { +mod my_submodule { use dep::std; global N: Field = 10; diff --git a/noir/docs/versioned_docs/version-v0.19.0/language_concepts/data_types.md b/noir/docs/versioned_docs/version-v0.19.0/language_concepts/data_types.md index d546cc463a8..74f573f7d3f 100644 --- a/noir/docs/versioned_docs/version-v0.19.0/language_concepts/data_types.md +++ b/noir/docs/versioned_docs/version-v0.19.0/language_concepts/data_types.md @@ -93,4 +93,4 @@ fn main() { ### BigInt -You can acheive BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. +You can achieve BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/noir/docs/versioned_docs/version-v0.19.0/migration_notes.md b/noir/docs/versioned_docs/version-v0.19.0/migration_notes.md index 0d7e0af0efd..905bca3d30c 100644 --- a/noir/docs/versioned_docs/version-v0.19.0/migration_notes.md +++ b/noir/docs/versioned_docs/version-v0.19.0/migration_notes.md @@ -50,7 +50,7 @@ If you are using the default barretenberg backend, simply run: nargo prove ``` -with you Noir program. +with your Noir program. This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. @@ -78,7 +78,7 @@ nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/bar This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. -The gzipped filed is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. Then run: diff --git a/noir/docs/versioned_docs/version-v0.19.0/modules_packages_crates/dependencies.md b/noir/docs/versioned_docs/version-v0.19.0/modules_packages_crates/dependencies.md index fd7511a3423..e91e73a4c4f 100644 --- a/noir/docs/versioned_docs/version-v0.19.0/modules_packages_crates/dependencies.md +++ b/noir/docs/versioned_docs/version-v0.19.0/modules_packages_crates/dependencies.md @@ -48,7 +48,7 @@ For example, this file structure has a library and binary crate │   ├── Nargo.toml │   └── src │   └── main.nr -└── liba +└── lib_a ├── Nargo.toml └── src └── lib.nr @@ -60,17 +60,17 @@ Inside of the binary crate, you can specify: # Nargo.toml [dependencies] -libA = { path = "../liba" } +lib_a = { path = "../lib_a" } ``` ## Importing dependencies You can import a dependency to a Noir file using the following syntax. For example, to import the -ecrecover-noir library and local liba referenced above: +ecrecover-noir library and local lib_a referenced above: ```rust use dep::ecrecover; -use dep::libA; +use dep::lib_a; ``` You can also import only the specific parts of dependency that you want to use, like so: diff --git a/noir/docs/versioned_docs/version-v0.19.0/modules_packages_crates/modules.md b/noir/docs/versioned_docs/version-v0.19.0/modules_packages_crates/modules.md index 147c9b284e8..11e60cbf35e 100644 --- a/noir/docs/versioned_docs/version-v0.19.0/modules_packages_crates/modules.md +++ b/noir/docs/versioned_docs/version-v0.19.0/modules_packages_crates/modules.md @@ -10,7 +10,7 @@ Noir's module system follows the same convention as the _newer_ version of Rust' ## Purpose of Modules -Modules are used to organise files. Without modules all of your code would need to live in a single +Modules are used to organize files. Without modules all of your code would need to live in a single file. In Noir, the compiler does not automatically scan all of your files to detect modules. This must be done explicitly by the developer. diff --git a/noir/docs/versioned_docs/version-v0.19.0/nargo/02_testing.md b/noir/docs/versioned_docs/version-v0.19.0/nargo/02_testing.md index da767274efd..5c57ef92705 100644 --- a/noir/docs/versioned_docs/version-v0.19.0/nargo/02_testing.md +++ b/noir/docs/versioned_docs/version-v0.19.0/nargo/02_testing.md @@ -23,8 +23,8 @@ fn test_add() { } ``` -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all -the contraints which allows you to test that add returns the expected values. Test functions can't +Running `nargo test` will test that the `test_add` function can be executed while satisfying all +the constraints which allows you to test that add returns the expected values. Test functions can't have any arguments currently. ### Test fail diff --git a/noir/docs/versioned_docs/version-v0.19.0/noir_js/reference/01_noirjs.md b/noir/docs/versioned_docs/version-v0.19.0/noir_js/reference/01_noirjs.md index ccdd53f2bcc..2e90779ceab 100644 --- a/noir/docs/versioned_docs/version-v0.19.0/noir_js/reference/01_noirjs.md +++ b/noir/docs/versioned_docs/version-v0.19.0/noir_js/reference/01_noirjs.md @@ -94,7 +94,7 @@ This async method generates a witness and a proof given an object as input. ### Syntax ```js -async generateFinalproof(input) +async generateFinalProof(input) ``` ### Parameters diff --git a/noir/docs/versioned_docs/version-v0.19.0/noir_js/reference/02_bb_backend.md b/noir/docs/versioned_docs/version-v0.19.0/noir_js/reference/02_bb_backend.md index f444eab1772..958cabd6289 100644 --- a/noir/docs/versioned_docs/version-v0.19.0/noir_js/reference/02_bb_backend.md +++ b/noir/docs/versioned_docs/version-v0.19.0/noir_js/reference/02_bb_backend.md @@ -41,7 +41,7 @@ constructor(acirCircuit, (numberOfThreads = 1)); | Parameter | Type | Description | | ----------------- | ----------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `acirCircuit` | Object | A circuit represented in a `json` format, containing the ABI and bytecode Tipically obtained by running [`nargo compile`](../../nargo/01_commands.md). This is the same circuit expected to be passed to [the Noir class](01_noirjs.md) | +| `acirCircuit` | Object | A circuit represented in a `json` format, containing the ABI and bytecode typically obtained by running [`nargo compile`](../../nargo/01_commands.md). This is the same circuit expected to be passed to [the Noir class](01_noirjs.md) | | `numberOfThreads` | Number (optional) | The number of threads to be used by the backend. Defaults to 1. | ### Usage diff --git a/noir/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/classes/Noir.md b/noir/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/classes/Noir.md index a8a0bb451c1..1d7b54a9dca 100644 --- a/noir/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/classes/Noir.md +++ b/noir/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/classes/Noir.md @@ -95,7 +95,7 @@ Generates a witness and a proof given an object as input. #### Example ```typescript -async generateFinalproof(input) +async generateFinalProof(input) ``` *** diff --git a/noir/docs/versioned_docs/version-v0.19.0/standard_library/black_box_fns.md b/noir/docs/versioned_docs/version-v0.19.0/standard_library/black_box_fns.md index 1dfabfe8f22..e0c6d475c1f 100644 --- a/noir/docs/versioned_docs/version-v0.19.0/standard_library/black_box_fns.md +++ b/noir/docs/versioned_docs/version-v0.19.0/standard_library/black_box_fns.md @@ -43,4 +43,4 @@ Here is a list of the current black box functions that are supported by UltraPlo Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. -You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/acvm/blob/acir-v0.12.0/acir/src/circuit/black_box_functions.rs). +You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/noir/blob/master/acvm-repo/acir/src/circuit/black_box_functions.rs). diff --git a/noir/docs/versioned_docs/version-v0.19.0/standard_library/logging.md b/noir/docs/versioned_docs/version-v0.19.0/standard_library/logging.md index 7e2fd9b9aff..4ba0fe0e656 100644 --- a/noir/docs/versioned_docs/version-v0.19.0/standard_library/logging.md +++ b/noir/docs/versioned_docs/version-v0.19.0/standard_library/logging.md @@ -22,7 +22,7 @@ implementation of rust's `println!` macro, this construct can be useful for debu You can print the output of println statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are println statements in your tests). -It is recommended to use `nargo execute` if you want to debug failing constrains with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. The `println` statement is unconstrained, so it works for outputting integers, fields, strings, and even structs or expressions. For example: diff --git a/noir/docs/versioned_docs/version-v0.19.1/getting_started/00_nargo_installation.md b/noir/docs/versioned_docs/version-v0.19.1/getting_started/00_nargo_installation.md index 349756d60c0..7d3c88c7693 100644 --- a/noir/docs/versioned_docs/version-v0.19.1/getting_started/00_nargo_installation.md +++ b/noir/docs/versioned_docs/version-v0.19.1/getting_started/00_nargo_installation.md @@ -140,9 +140,9 @@ Commands: ### Option 3: Compile from Source -Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating ssues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). +Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating issues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). -Combined with direnv, which automatically sets or unsets environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. +Combined with direnv, which automatically sets or clears environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. #### Setting up your environment @@ -220,7 +220,7 @@ Advanced: If you aren't using direnv nor launching your editor within the subshe ### Option 4: WSL (for Windows) -The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed nativerly. However, it is available by using Windows Subsystem for Linux (WSL). +The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed natively. However, it is available by using Windows Subsystem for Linux (WSL). Step 1: Follow the instructions [here](https://learn.microsoft.com/en-us/windows/wsl/install) to install and run WSL. diff --git a/noir/docs/versioned_docs/version-v0.19.1/getting_started/02_breakdown.md b/noir/docs/versioned_docs/version-v0.19.1/getting_started/02_breakdown.md index 9a17f5d6360..d28a54a1600 100644 --- a/noir/docs/versioned_docs/version-v0.19.1/getting_started/02_breakdown.md +++ b/noir/docs/versioned_docs/version-v0.19.1/getting_started/02_breakdown.md @@ -39,7 +39,7 @@ Example Nargo.toml: ```toml [package] -name = "noirstarter" +name = "noir_starter" type = "bin" authors = ["Alice"] compiler_version = "0.9.0" diff --git a/noir/docs/versioned_docs/version-v0.19.1/index.md b/noir/docs/versioned_docs/version-v0.19.1/index.md index 4e2f4043892..380368db036 100644 --- a/noir/docs/versioned_docs/version-v0.19.1/index.md +++ b/noir/docs/versioned_docs/version-v0.19.1/index.md @@ -25,7 +25,7 @@ slug: / Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR compatible proving system. -It's design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. +Its design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. ## Who is Noir for? @@ -34,7 +34,7 @@ Noir can be used for a variety of purposes. ### Solidity Developers Noir currently includes a command to create a Solidity contract which verifies your Noir program. This will -be modularised in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create +be modularized in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create a verifier contract. ### Protocol Developers @@ -55,7 +55,7 @@ must be implemented for it. Noir is simple and flexible in its design, as it does not compile immediately to a fixed NP-complete language. Instead, Noir compiles to an intermediate language (ACIR), which itself can be compiled -to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkwork's Marlin backend, or others). +to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkworks' Marlin backend, or others). This in itself brings up a few challenges within the design process, but allows one to decouple the programming language completely from the backend. This is similar in theory to LLVM. diff --git a/noir/docs/versioned_docs/version-v0.19.1/language_concepts/03_ops.md b/noir/docs/versioned_docs/version-v0.19.1/language_concepts/03_ops.md index d5caa463765..0e35ef5e584 100644 --- a/noir/docs/versioned_docs/version-v0.19.1/language_concepts/03_ops.md +++ b/noir/docs/versioned_docs/version-v0.19.1/language_concepts/03_ops.md @@ -62,7 +62,7 @@ fn main(x : Field) { Noir has no support for the logical operators `||` and `&&`. This is because encoding the short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can -use the bitwise operators `|` and `&` which operate indentically for booleans, just without the +use the bitwise operators `|` and `&` which operate identically for booleans, just without the short-circuiting. ```rust diff --git a/noir/docs/versioned_docs/version-v0.19.1/language_concepts/05_unconstrained.md b/noir/docs/versioned_docs/version-v0.19.1/language_concepts/05_unconstrained.md index 6b621eda3eb..097d6ee9894 100644 --- a/noir/docs/versioned_docs/version-v0.19.1/language_concepts/05_unconstrained.md +++ b/noir/docs/versioned_docs/version-v0.19.1/language_concepts/05_unconstrained.md @@ -7,7 +7,7 @@ keywords: [Noir programming language, unconstrained, open] -Unconstrained functions are functions which do not constrain any of the included computation and allow for non-determinisitic computation. +Unconstrained functions are functions which do not constrain any of the included computation and allow for non-deterministic computation. ## Why? diff --git a/noir/docs/versioned_docs/version-v0.19.1/language_concepts/07_mutability.md b/noir/docs/versioned_docs/version-v0.19.1/language_concepts/07_mutability.md index ad902c42c9b..ed3fed820ec 100644 --- a/noir/docs/versioned_docs/version-v0.19.1/language_concepts/07_mutability.md +++ b/noir/docs/versioned_docs/version-v0.19.1/language_concepts/07_mutability.md @@ -69,11 +69,11 @@ fn main(x : Field, y : [Field; N]) { assert(res == y[0]); - let res2 = x * mysubmodule::N; + let res2 = x * my_submodule::N; assert(res != res2); } -mod mysubmodule { +mod my_submodule { use dep::std; global N: Field = 10; diff --git a/noir/docs/versioned_docs/version-v0.19.1/language_concepts/data_types.md b/noir/docs/versioned_docs/version-v0.19.1/language_concepts/data_types.md index d546cc463a8..74f573f7d3f 100644 --- a/noir/docs/versioned_docs/version-v0.19.1/language_concepts/data_types.md +++ b/noir/docs/versioned_docs/version-v0.19.1/language_concepts/data_types.md @@ -93,4 +93,4 @@ fn main() { ### BigInt -You can acheive BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. +You can achieve BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/noir/docs/versioned_docs/version-v0.19.1/migration_notes.md b/noir/docs/versioned_docs/version-v0.19.1/migration_notes.md index 0d7e0af0efd..905bca3d30c 100644 --- a/noir/docs/versioned_docs/version-v0.19.1/migration_notes.md +++ b/noir/docs/versioned_docs/version-v0.19.1/migration_notes.md @@ -50,7 +50,7 @@ If you are using the default barretenberg backend, simply run: nargo prove ``` -with you Noir program. +with your Noir program. This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. @@ -78,7 +78,7 @@ nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/bar This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. -The gzipped filed is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. Then run: diff --git a/noir/docs/versioned_docs/version-v0.19.1/modules_packages_crates/dependencies.md b/noir/docs/versioned_docs/version-v0.19.1/modules_packages_crates/dependencies.md index fd7511a3423..e91e73a4c4f 100644 --- a/noir/docs/versioned_docs/version-v0.19.1/modules_packages_crates/dependencies.md +++ b/noir/docs/versioned_docs/version-v0.19.1/modules_packages_crates/dependencies.md @@ -48,7 +48,7 @@ For example, this file structure has a library and binary crate │   ├── Nargo.toml │   └── src │   └── main.nr -└── liba +└── lib_a ├── Nargo.toml └── src └── lib.nr @@ -60,17 +60,17 @@ Inside of the binary crate, you can specify: # Nargo.toml [dependencies] -libA = { path = "../liba" } +lib_a = { path = "../lib_a" } ``` ## Importing dependencies You can import a dependency to a Noir file using the following syntax. For example, to import the -ecrecover-noir library and local liba referenced above: +ecrecover-noir library and local lib_a referenced above: ```rust use dep::ecrecover; -use dep::libA; +use dep::lib_a; ``` You can also import only the specific parts of dependency that you want to use, like so: diff --git a/noir/docs/versioned_docs/version-v0.19.1/modules_packages_crates/modules.md b/noir/docs/versioned_docs/version-v0.19.1/modules_packages_crates/modules.md index 147c9b284e8..11e60cbf35e 100644 --- a/noir/docs/versioned_docs/version-v0.19.1/modules_packages_crates/modules.md +++ b/noir/docs/versioned_docs/version-v0.19.1/modules_packages_crates/modules.md @@ -10,7 +10,7 @@ Noir's module system follows the same convention as the _newer_ version of Rust' ## Purpose of Modules -Modules are used to organise files. Without modules all of your code would need to live in a single +Modules are used to organize files. Without modules all of your code would need to live in a single file. In Noir, the compiler does not automatically scan all of your files to detect modules. This must be done explicitly by the developer. diff --git a/noir/docs/versioned_docs/version-v0.19.1/nargo/02_testing.md b/noir/docs/versioned_docs/version-v0.19.1/nargo/02_testing.md index da767274efd..5c57ef92705 100644 --- a/noir/docs/versioned_docs/version-v0.19.1/nargo/02_testing.md +++ b/noir/docs/versioned_docs/version-v0.19.1/nargo/02_testing.md @@ -23,8 +23,8 @@ fn test_add() { } ``` -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all -the contraints which allows you to test that add returns the expected values. Test functions can't +Running `nargo test` will test that the `test_add` function can be executed while satisfying all +the constraints which allows you to test that add returns the expected values. Test functions can't have any arguments currently. ### Test fail diff --git a/noir/docs/versioned_docs/version-v0.19.1/noir_js/reference/noir_js/classes/Noir.md b/noir/docs/versioned_docs/version-v0.19.1/noir_js/reference/noir_js/classes/Noir.md index a8a0bb451c1..1d7b54a9dca 100644 --- a/noir/docs/versioned_docs/version-v0.19.1/noir_js/reference/noir_js/classes/Noir.md +++ b/noir/docs/versioned_docs/version-v0.19.1/noir_js/reference/noir_js/classes/Noir.md @@ -95,7 +95,7 @@ Generates a witness and a proof given an object as input. #### Example ```typescript -async generateFinalproof(input) +async generateFinalProof(input) ``` *** diff --git a/noir/docs/versioned_docs/version-v0.19.1/standard_library/black_box_fns.md b/noir/docs/versioned_docs/version-v0.19.1/standard_library/black_box_fns.md index 1dfabfe8f22..e0c6d475c1f 100644 --- a/noir/docs/versioned_docs/version-v0.19.1/standard_library/black_box_fns.md +++ b/noir/docs/versioned_docs/version-v0.19.1/standard_library/black_box_fns.md @@ -43,4 +43,4 @@ Here is a list of the current black box functions that are supported by UltraPlo Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. -You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/acvm/blob/acir-v0.12.0/acir/src/circuit/black_box_functions.rs). +You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/noir/blob/master/acvm-repo/acir/src/circuit/black_box_functions.rs). diff --git a/noir/docs/versioned_docs/version-v0.19.1/standard_library/logging.md b/noir/docs/versioned_docs/version-v0.19.1/standard_library/logging.md index 7e2fd9b9aff..4ba0fe0e656 100644 --- a/noir/docs/versioned_docs/version-v0.19.1/standard_library/logging.md +++ b/noir/docs/versioned_docs/version-v0.19.1/standard_library/logging.md @@ -22,7 +22,7 @@ implementation of rust's `println!` macro, this construct can be useful for debu You can print the output of println statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are println statements in your tests). -It is recommended to use `nargo execute` if you want to debug failing constrains with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. The `println` statement is unconstrained, so it works for outputting integers, fields, strings, and even structs or expressions. For example: diff --git a/noir/docs/versioned_docs/version-v0.19.2/getting_started/00_nargo_installation.md b/noir/docs/versioned_docs/version-v0.19.2/getting_started/00_nargo_installation.md index 349756d60c0..7d3c88c7693 100644 --- a/noir/docs/versioned_docs/version-v0.19.2/getting_started/00_nargo_installation.md +++ b/noir/docs/versioned_docs/version-v0.19.2/getting_started/00_nargo_installation.md @@ -140,9 +140,9 @@ Commands: ### Option 3: Compile from Source -Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating ssues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). +Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating issues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). -Combined with direnv, which automatically sets or unsets environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. +Combined with direnv, which automatically sets or clears environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. #### Setting up your environment @@ -220,7 +220,7 @@ Advanced: If you aren't using direnv nor launching your editor within the subshe ### Option 4: WSL (for Windows) -The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed nativerly. However, it is available by using Windows Subsystem for Linux (WSL). +The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed natively. However, it is available by using Windows Subsystem for Linux (WSL). Step 1: Follow the instructions [here](https://learn.microsoft.com/en-us/windows/wsl/install) to install and run WSL. diff --git a/noir/docs/versioned_docs/version-v0.19.2/getting_started/02_breakdown.md b/noir/docs/versioned_docs/version-v0.19.2/getting_started/02_breakdown.md index 9a17f5d6360..d28a54a1600 100644 --- a/noir/docs/versioned_docs/version-v0.19.2/getting_started/02_breakdown.md +++ b/noir/docs/versioned_docs/version-v0.19.2/getting_started/02_breakdown.md @@ -39,7 +39,7 @@ Example Nargo.toml: ```toml [package] -name = "noirstarter" +name = "noir_starter" type = "bin" authors = ["Alice"] compiler_version = "0.9.0" diff --git a/noir/docs/versioned_docs/version-v0.19.2/index.md b/noir/docs/versioned_docs/version-v0.19.2/index.md index 4e2f4043892..380368db036 100644 --- a/noir/docs/versioned_docs/version-v0.19.2/index.md +++ b/noir/docs/versioned_docs/version-v0.19.2/index.md @@ -25,7 +25,7 @@ slug: / Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR compatible proving system. -It's design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. +Its design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. ## Who is Noir for? @@ -34,7 +34,7 @@ Noir can be used for a variety of purposes. ### Solidity Developers Noir currently includes a command to create a Solidity contract which verifies your Noir program. This will -be modularised in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create +be modularized in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create a verifier contract. ### Protocol Developers @@ -55,7 +55,7 @@ must be implemented for it. Noir is simple and flexible in its design, as it does not compile immediately to a fixed NP-complete language. Instead, Noir compiles to an intermediate language (ACIR), which itself can be compiled -to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkwork's Marlin backend, or others). +to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkworks' Marlin backend, or others). This in itself brings up a few challenges within the design process, but allows one to decouple the programming language completely from the backend. This is similar in theory to LLVM. diff --git a/noir/docs/versioned_docs/version-v0.19.2/language_concepts/03_ops.md b/noir/docs/versioned_docs/version-v0.19.2/language_concepts/03_ops.md index d5caa463765..0e35ef5e584 100644 --- a/noir/docs/versioned_docs/version-v0.19.2/language_concepts/03_ops.md +++ b/noir/docs/versioned_docs/version-v0.19.2/language_concepts/03_ops.md @@ -62,7 +62,7 @@ fn main(x : Field) { Noir has no support for the logical operators `||` and `&&`. This is because encoding the short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can -use the bitwise operators `|` and `&` which operate indentically for booleans, just without the +use the bitwise operators `|` and `&` which operate identically for booleans, just without the short-circuiting. ```rust diff --git a/noir/docs/versioned_docs/version-v0.19.2/language_concepts/05_unconstrained.md b/noir/docs/versioned_docs/version-v0.19.2/language_concepts/05_unconstrained.md index 6b621eda3eb..097d6ee9894 100644 --- a/noir/docs/versioned_docs/version-v0.19.2/language_concepts/05_unconstrained.md +++ b/noir/docs/versioned_docs/version-v0.19.2/language_concepts/05_unconstrained.md @@ -7,7 +7,7 @@ keywords: [Noir programming language, unconstrained, open] -Unconstrained functions are functions which do not constrain any of the included computation and allow for non-determinisitic computation. +Unconstrained functions are functions which do not constrain any of the included computation and allow for non-deterministic computation. ## Why? diff --git a/noir/docs/versioned_docs/version-v0.19.2/language_concepts/07_mutability.md b/noir/docs/versioned_docs/version-v0.19.2/language_concepts/07_mutability.md index ad902c42c9b..ed3fed820ec 100644 --- a/noir/docs/versioned_docs/version-v0.19.2/language_concepts/07_mutability.md +++ b/noir/docs/versioned_docs/version-v0.19.2/language_concepts/07_mutability.md @@ -69,11 +69,11 @@ fn main(x : Field, y : [Field; N]) { assert(res == y[0]); - let res2 = x * mysubmodule::N; + let res2 = x * my_submodule::N; assert(res != res2); } -mod mysubmodule { +mod my_submodule { use dep::std; global N: Field = 10; diff --git a/noir/docs/versioned_docs/version-v0.19.2/language_concepts/data_types.md b/noir/docs/versioned_docs/version-v0.19.2/language_concepts/data_types.md index d546cc463a8..74f573f7d3f 100644 --- a/noir/docs/versioned_docs/version-v0.19.2/language_concepts/data_types.md +++ b/noir/docs/versioned_docs/version-v0.19.2/language_concepts/data_types.md @@ -93,4 +93,4 @@ fn main() { ### BigInt -You can acheive BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. +You can achieve BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/noir/docs/versioned_docs/version-v0.19.2/migration_notes.md b/noir/docs/versioned_docs/version-v0.19.2/migration_notes.md index 0d7e0af0efd..905bca3d30c 100644 --- a/noir/docs/versioned_docs/version-v0.19.2/migration_notes.md +++ b/noir/docs/versioned_docs/version-v0.19.2/migration_notes.md @@ -50,7 +50,7 @@ If you are using the default barretenberg backend, simply run: nargo prove ``` -with you Noir program. +with your Noir program. This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. @@ -78,7 +78,7 @@ nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/bar This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. -The gzipped filed is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. Then run: diff --git a/noir/docs/versioned_docs/version-v0.19.2/modules_packages_crates/dependencies.md b/noir/docs/versioned_docs/version-v0.19.2/modules_packages_crates/dependencies.md index fd7511a3423..e91e73a4c4f 100644 --- a/noir/docs/versioned_docs/version-v0.19.2/modules_packages_crates/dependencies.md +++ b/noir/docs/versioned_docs/version-v0.19.2/modules_packages_crates/dependencies.md @@ -48,7 +48,7 @@ For example, this file structure has a library and binary crate │   ├── Nargo.toml │   └── src │   └── main.nr -└── liba +└── lib_a ├── Nargo.toml └── src └── lib.nr @@ -60,17 +60,17 @@ Inside of the binary crate, you can specify: # Nargo.toml [dependencies] -libA = { path = "../liba" } +lib_a = { path = "../lib_a" } ``` ## Importing dependencies You can import a dependency to a Noir file using the following syntax. For example, to import the -ecrecover-noir library and local liba referenced above: +ecrecover-noir library and local lib_a referenced above: ```rust use dep::ecrecover; -use dep::libA; +use dep::lib_a; ``` You can also import only the specific parts of dependency that you want to use, like so: diff --git a/noir/docs/versioned_docs/version-v0.19.2/modules_packages_crates/modules.md b/noir/docs/versioned_docs/version-v0.19.2/modules_packages_crates/modules.md index 147c9b284e8..11e60cbf35e 100644 --- a/noir/docs/versioned_docs/version-v0.19.2/modules_packages_crates/modules.md +++ b/noir/docs/versioned_docs/version-v0.19.2/modules_packages_crates/modules.md @@ -10,7 +10,7 @@ Noir's module system follows the same convention as the _newer_ version of Rust' ## Purpose of Modules -Modules are used to organise files. Without modules all of your code would need to live in a single +Modules are used to organize files. Without modules all of your code would need to live in a single file. In Noir, the compiler does not automatically scan all of your files to detect modules. This must be done explicitly by the developer. diff --git a/noir/docs/versioned_docs/version-v0.19.2/nargo/02_testing.md b/noir/docs/versioned_docs/version-v0.19.2/nargo/02_testing.md index da767274efd..5c57ef92705 100644 --- a/noir/docs/versioned_docs/version-v0.19.2/nargo/02_testing.md +++ b/noir/docs/versioned_docs/version-v0.19.2/nargo/02_testing.md @@ -23,8 +23,8 @@ fn test_add() { } ``` -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all -the contraints which allows you to test that add returns the expected values. Test functions can't +Running `nargo test` will test that the `test_add` function can be executed while satisfying all +the constraints which allows you to test that add returns the expected values. Test functions can't have any arguments currently. ### Test fail diff --git a/noir/docs/versioned_docs/version-v0.19.2/noir_js/reference/noir_js/classes/Noir.md b/noir/docs/versioned_docs/version-v0.19.2/noir_js/reference/noir_js/classes/Noir.md index a8a0bb451c1..1d7b54a9dca 100644 --- a/noir/docs/versioned_docs/version-v0.19.2/noir_js/reference/noir_js/classes/Noir.md +++ b/noir/docs/versioned_docs/version-v0.19.2/noir_js/reference/noir_js/classes/Noir.md @@ -95,7 +95,7 @@ Generates a witness and a proof given an object as input. #### Example ```typescript -async generateFinalproof(input) +async generateFinalProof(input) ``` *** diff --git a/noir/docs/versioned_docs/version-v0.19.2/standard_library/black_box_fns.md b/noir/docs/versioned_docs/version-v0.19.2/standard_library/black_box_fns.md index 1dfabfe8f22..e0c6d475c1f 100644 --- a/noir/docs/versioned_docs/version-v0.19.2/standard_library/black_box_fns.md +++ b/noir/docs/versioned_docs/version-v0.19.2/standard_library/black_box_fns.md @@ -43,4 +43,4 @@ Here is a list of the current black box functions that are supported by UltraPlo Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. -You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/acvm/blob/acir-v0.12.0/acir/src/circuit/black_box_functions.rs). +You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/noir/blob/master/acvm-repo/acir/src/circuit/black_box_functions.rs). diff --git a/noir/docs/versioned_docs/version-v0.19.2/standard_library/logging.md b/noir/docs/versioned_docs/version-v0.19.2/standard_library/logging.md index 7e2fd9b9aff..4ba0fe0e656 100644 --- a/noir/docs/versioned_docs/version-v0.19.2/standard_library/logging.md +++ b/noir/docs/versioned_docs/version-v0.19.2/standard_library/logging.md @@ -22,7 +22,7 @@ implementation of rust's `println!` macro, this construct can be useful for debu You can print the output of println statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are println statements in your tests). -It is recommended to use `nargo execute` if you want to debug failing constrains with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. The `println` statement is unconstrained, so it works for outputting integers, fields, strings, and even structs or expressions. For example: diff --git a/noir/docs/versioned_docs/version-v0.19.3/getting_started/00_nargo_installation.md b/noir/docs/versioned_docs/version-v0.19.3/getting_started/00_nargo_installation.md index 349756d60c0..7d3c88c7693 100644 --- a/noir/docs/versioned_docs/version-v0.19.3/getting_started/00_nargo_installation.md +++ b/noir/docs/versioned_docs/version-v0.19.3/getting_started/00_nargo_installation.md @@ -140,9 +140,9 @@ Commands: ### Option 3: Compile from Source -Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating ssues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). +Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating issues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). -Combined with direnv, which automatically sets or unsets environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. +Combined with direnv, which automatically sets or clears environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. #### Setting up your environment @@ -220,7 +220,7 @@ Advanced: If you aren't using direnv nor launching your editor within the subshe ### Option 4: WSL (for Windows) -The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed nativerly. However, it is available by using Windows Subsystem for Linux (WSL). +The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed natively. However, it is available by using Windows Subsystem for Linux (WSL). Step 1: Follow the instructions [here](https://learn.microsoft.com/en-us/windows/wsl/install) to install and run WSL. diff --git a/noir/docs/versioned_docs/version-v0.19.3/getting_started/02_breakdown.md b/noir/docs/versioned_docs/version-v0.19.3/getting_started/02_breakdown.md index 9a17f5d6360..7a7fb876c35 100644 --- a/noir/docs/versioned_docs/version-v0.19.3/getting_started/02_breakdown.md +++ b/noir/docs/versioned_docs/version-v0.19.3/getting_started/02_breakdown.md @@ -39,7 +39,7 @@ Example Nargo.toml: ```toml [package] -name = "noirstarter" +name = "noir_starter'" type = "bin" authors = ["Alice"] compiler_version = "0.9.0" diff --git a/noir/docs/versioned_docs/version-v0.19.3/index.md b/noir/docs/versioned_docs/version-v0.19.3/index.md index 4e2f4043892..380368db036 100644 --- a/noir/docs/versioned_docs/version-v0.19.3/index.md +++ b/noir/docs/versioned_docs/version-v0.19.3/index.md @@ -25,7 +25,7 @@ slug: / Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR compatible proving system. -It's design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. +Its design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. ## Who is Noir for? @@ -34,7 +34,7 @@ Noir can be used for a variety of purposes. ### Solidity Developers Noir currently includes a command to create a Solidity contract which verifies your Noir program. This will -be modularised in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create +be modularized in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create a verifier contract. ### Protocol Developers @@ -55,7 +55,7 @@ must be implemented for it. Noir is simple and flexible in its design, as it does not compile immediately to a fixed NP-complete language. Instead, Noir compiles to an intermediate language (ACIR), which itself can be compiled -to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkwork's Marlin backend, or others). +to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkworks' Marlin backend, or others). This in itself brings up a few challenges within the design process, but allows one to decouple the programming language completely from the backend. This is similar in theory to LLVM. diff --git a/noir/docs/versioned_docs/version-v0.19.3/language_concepts/03_ops.md b/noir/docs/versioned_docs/version-v0.19.3/language_concepts/03_ops.md index d5caa463765..0e35ef5e584 100644 --- a/noir/docs/versioned_docs/version-v0.19.3/language_concepts/03_ops.md +++ b/noir/docs/versioned_docs/version-v0.19.3/language_concepts/03_ops.md @@ -62,7 +62,7 @@ fn main(x : Field) { Noir has no support for the logical operators `||` and `&&`. This is because encoding the short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can -use the bitwise operators `|` and `&` which operate indentically for booleans, just without the +use the bitwise operators `|` and `&` which operate identically for booleans, just without the short-circuiting. ```rust diff --git a/noir/docs/versioned_docs/version-v0.19.3/language_concepts/05_unconstrained.md b/noir/docs/versioned_docs/version-v0.19.3/language_concepts/05_unconstrained.md index 6b621eda3eb..097d6ee9894 100644 --- a/noir/docs/versioned_docs/version-v0.19.3/language_concepts/05_unconstrained.md +++ b/noir/docs/versioned_docs/version-v0.19.3/language_concepts/05_unconstrained.md @@ -7,7 +7,7 @@ keywords: [Noir programming language, unconstrained, open] -Unconstrained functions are functions which do not constrain any of the included computation and allow for non-determinisitic computation. +Unconstrained functions are functions which do not constrain any of the included computation and allow for non-deterministic computation. ## Why? diff --git a/noir/docs/versioned_docs/version-v0.19.3/language_concepts/07_mutability.md b/noir/docs/versioned_docs/version-v0.19.3/language_concepts/07_mutability.md index ad902c42c9b..ed3fed820ec 100644 --- a/noir/docs/versioned_docs/version-v0.19.3/language_concepts/07_mutability.md +++ b/noir/docs/versioned_docs/version-v0.19.3/language_concepts/07_mutability.md @@ -69,11 +69,11 @@ fn main(x : Field, y : [Field; N]) { assert(res == y[0]); - let res2 = x * mysubmodule::N; + let res2 = x * my_submodule::N; assert(res != res2); } -mod mysubmodule { +mod my_submodule { use dep::std; global N: Field = 10; diff --git a/noir/docs/versioned_docs/version-v0.19.3/language_concepts/data_types.md b/noir/docs/versioned_docs/version-v0.19.3/language_concepts/data_types.md index d546cc463a8..74f573f7d3f 100644 --- a/noir/docs/versioned_docs/version-v0.19.3/language_concepts/data_types.md +++ b/noir/docs/versioned_docs/version-v0.19.3/language_concepts/data_types.md @@ -93,4 +93,4 @@ fn main() { ### BigInt -You can acheive BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. +You can achieve BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/noir/docs/versioned_docs/version-v0.19.3/migration_notes.md b/noir/docs/versioned_docs/version-v0.19.3/migration_notes.md index 0d7e0af0efd..905bca3d30c 100644 --- a/noir/docs/versioned_docs/version-v0.19.3/migration_notes.md +++ b/noir/docs/versioned_docs/version-v0.19.3/migration_notes.md @@ -50,7 +50,7 @@ If you are using the default barretenberg backend, simply run: nargo prove ``` -with you Noir program. +with your Noir program. This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. @@ -78,7 +78,7 @@ nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/bar This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. -The gzipped filed is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. Then run: diff --git a/noir/docs/versioned_docs/version-v0.19.3/modules_packages_crates/dependencies.md b/noir/docs/versioned_docs/version-v0.19.3/modules_packages_crates/dependencies.md index fd7511a3423..e91e73a4c4f 100644 --- a/noir/docs/versioned_docs/version-v0.19.3/modules_packages_crates/dependencies.md +++ b/noir/docs/versioned_docs/version-v0.19.3/modules_packages_crates/dependencies.md @@ -48,7 +48,7 @@ For example, this file structure has a library and binary crate │   ├── Nargo.toml │   └── src │   └── main.nr -└── liba +└── lib_a ├── Nargo.toml └── src └── lib.nr @@ -60,17 +60,17 @@ Inside of the binary crate, you can specify: # Nargo.toml [dependencies] -libA = { path = "../liba" } +lib_a = { path = "../lib_a" } ``` ## Importing dependencies You can import a dependency to a Noir file using the following syntax. For example, to import the -ecrecover-noir library and local liba referenced above: +ecrecover-noir library and local lib_a referenced above: ```rust use dep::ecrecover; -use dep::libA; +use dep::lib_a; ``` You can also import only the specific parts of dependency that you want to use, like so: diff --git a/noir/docs/versioned_docs/version-v0.19.3/modules_packages_crates/modules.md b/noir/docs/versioned_docs/version-v0.19.3/modules_packages_crates/modules.md index 147c9b284e8..11e60cbf35e 100644 --- a/noir/docs/versioned_docs/version-v0.19.3/modules_packages_crates/modules.md +++ b/noir/docs/versioned_docs/version-v0.19.3/modules_packages_crates/modules.md @@ -10,7 +10,7 @@ Noir's module system follows the same convention as the _newer_ version of Rust' ## Purpose of Modules -Modules are used to organise files. Without modules all of your code would need to live in a single +Modules are used to organize files. Without modules all of your code would need to live in a single file. In Noir, the compiler does not automatically scan all of your files to detect modules. This must be done explicitly by the developer. diff --git a/noir/docs/versioned_docs/version-v0.19.3/nargo/02_testing.md b/noir/docs/versioned_docs/version-v0.19.3/nargo/02_testing.md index da767274efd..5c57ef92705 100644 --- a/noir/docs/versioned_docs/version-v0.19.3/nargo/02_testing.md +++ b/noir/docs/versioned_docs/version-v0.19.3/nargo/02_testing.md @@ -23,8 +23,8 @@ fn test_add() { } ``` -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all -the contraints which allows you to test that add returns the expected values. Test functions can't +Running `nargo test` will test that the `test_add` function can be executed while satisfying all +the constraints which allows you to test that add returns the expected values. Test functions can't have any arguments currently. ### Test fail diff --git a/noir/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/classes/Noir.md b/noir/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/classes/Noir.md index a8a0bb451c1..1d7b54a9dca 100644 --- a/noir/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/classes/Noir.md +++ b/noir/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/classes/Noir.md @@ -95,7 +95,7 @@ Generates a witness and a proof given an object as input. #### Example ```typescript -async generateFinalproof(input) +async generateFinalProof(input) ``` *** diff --git a/noir/docs/versioned_docs/version-v0.19.3/standard_library/black_box_fns.md b/noir/docs/versioned_docs/version-v0.19.3/standard_library/black_box_fns.md index 1dfabfe8f22..e0c6d475c1f 100644 --- a/noir/docs/versioned_docs/version-v0.19.3/standard_library/black_box_fns.md +++ b/noir/docs/versioned_docs/version-v0.19.3/standard_library/black_box_fns.md @@ -43,4 +43,4 @@ Here is a list of the current black box functions that are supported by UltraPlo Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. -You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/acvm/blob/acir-v0.12.0/acir/src/circuit/black_box_functions.rs). +You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/noir/blob/master/acvm-repo/acir/src/circuit/black_box_functions.rs). diff --git a/noir/docs/versioned_docs/version-v0.19.3/standard_library/logging.md b/noir/docs/versioned_docs/version-v0.19.3/standard_library/logging.md index 7e2fd9b9aff..4ba0fe0e656 100644 --- a/noir/docs/versioned_docs/version-v0.19.3/standard_library/logging.md +++ b/noir/docs/versioned_docs/version-v0.19.3/standard_library/logging.md @@ -22,7 +22,7 @@ implementation of rust's `println!` macro, this construct can be useful for debu You can print the output of println statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are println statements in your tests). -It is recommended to use `nargo execute` if you want to debug failing constrains with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. The `println` statement is unconstrained, so it works for outputting integers, fields, strings, and even structs or expressions. For example: diff --git a/noir/docs/versioned_docs/version-v0.19.4/getting_started/00_nargo_installation.md b/noir/docs/versioned_docs/version-v0.19.4/getting_started/00_nargo_installation.md index 349756d60c0..7d3c88c7693 100644 --- a/noir/docs/versioned_docs/version-v0.19.4/getting_started/00_nargo_installation.md +++ b/noir/docs/versioned_docs/version-v0.19.4/getting_started/00_nargo_installation.md @@ -140,9 +140,9 @@ Commands: ### Option 3: Compile from Source -Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating ssues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). +Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating issues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). -Combined with direnv, which automatically sets or unsets environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. +Combined with direnv, which automatically sets or clears environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. #### Setting up your environment @@ -220,7 +220,7 @@ Advanced: If you aren't using direnv nor launching your editor within the subshe ### Option 4: WSL (for Windows) -The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed nativerly. However, it is available by using Windows Subsystem for Linux (WSL). +The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed natively. However, it is available by using Windows Subsystem for Linux (WSL). Step 1: Follow the instructions [here](https://learn.microsoft.com/en-us/windows/wsl/install) to install and run WSL. diff --git a/noir/docs/versioned_docs/version-v0.19.4/getting_started/02_breakdown.md b/noir/docs/versioned_docs/version-v0.19.4/getting_started/02_breakdown.md index 9a17f5d6360..d28a54a1600 100644 --- a/noir/docs/versioned_docs/version-v0.19.4/getting_started/02_breakdown.md +++ b/noir/docs/versioned_docs/version-v0.19.4/getting_started/02_breakdown.md @@ -39,7 +39,7 @@ Example Nargo.toml: ```toml [package] -name = "noirstarter" +name = "noir_starter" type = "bin" authors = ["Alice"] compiler_version = "0.9.0" diff --git a/noir/docs/versioned_docs/version-v0.19.4/index.md b/noir/docs/versioned_docs/version-v0.19.4/index.md index 4e2f4043892..380368db036 100644 --- a/noir/docs/versioned_docs/version-v0.19.4/index.md +++ b/noir/docs/versioned_docs/version-v0.19.4/index.md @@ -25,7 +25,7 @@ slug: / Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR compatible proving system. -It's design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. +Its design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. ## Who is Noir for? @@ -34,7 +34,7 @@ Noir can be used for a variety of purposes. ### Solidity Developers Noir currently includes a command to create a Solidity contract which verifies your Noir program. This will -be modularised in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create +be modularized in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create a verifier contract. ### Protocol Developers @@ -55,7 +55,7 @@ must be implemented for it. Noir is simple and flexible in its design, as it does not compile immediately to a fixed NP-complete language. Instead, Noir compiles to an intermediate language (ACIR), which itself can be compiled -to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkwork's Marlin backend, or others). +to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkworks' Marlin backend, or others). This in itself brings up a few challenges within the design process, but allows one to decouple the programming language completely from the backend. This is similar in theory to LLVM. diff --git a/noir/docs/versioned_docs/version-v0.19.4/language_concepts/03_ops.md b/noir/docs/versioned_docs/version-v0.19.4/language_concepts/03_ops.md index d5caa463765..0e35ef5e584 100644 --- a/noir/docs/versioned_docs/version-v0.19.4/language_concepts/03_ops.md +++ b/noir/docs/versioned_docs/version-v0.19.4/language_concepts/03_ops.md @@ -62,7 +62,7 @@ fn main(x : Field) { Noir has no support for the logical operators `||` and `&&`. This is because encoding the short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can -use the bitwise operators `|` and `&` which operate indentically for booleans, just without the +use the bitwise operators `|` and `&` which operate identically for booleans, just without the short-circuiting. ```rust diff --git a/noir/docs/versioned_docs/version-v0.19.4/language_concepts/05_unconstrained.md b/noir/docs/versioned_docs/version-v0.19.4/language_concepts/05_unconstrained.md index 6b621eda3eb..097d6ee9894 100644 --- a/noir/docs/versioned_docs/version-v0.19.4/language_concepts/05_unconstrained.md +++ b/noir/docs/versioned_docs/version-v0.19.4/language_concepts/05_unconstrained.md @@ -7,7 +7,7 @@ keywords: [Noir programming language, unconstrained, open] -Unconstrained functions are functions which do not constrain any of the included computation and allow for non-determinisitic computation. +Unconstrained functions are functions which do not constrain any of the included computation and allow for non-deterministic computation. ## Why? diff --git a/noir/docs/versioned_docs/version-v0.19.4/language_concepts/07_mutability.md b/noir/docs/versioned_docs/version-v0.19.4/language_concepts/07_mutability.md index ad902c42c9b..ed3fed820ec 100644 --- a/noir/docs/versioned_docs/version-v0.19.4/language_concepts/07_mutability.md +++ b/noir/docs/versioned_docs/version-v0.19.4/language_concepts/07_mutability.md @@ -69,11 +69,11 @@ fn main(x : Field, y : [Field; N]) { assert(res == y[0]); - let res2 = x * mysubmodule::N; + let res2 = x * my_submodule::N; assert(res != res2); } -mod mysubmodule { +mod my_submodule { use dep::std; global N: Field = 10; diff --git a/noir/docs/versioned_docs/version-v0.19.4/language_concepts/data_types.md b/noir/docs/versioned_docs/version-v0.19.4/language_concepts/data_types.md index d546cc463a8..74f573f7d3f 100644 --- a/noir/docs/versioned_docs/version-v0.19.4/language_concepts/data_types.md +++ b/noir/docs/versioned_docs/version-v0.19.4/language_concepts/data_types.md @@ -93,4 +93,4 @@ fn main() { ### BigInt -You can acheive BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. +You can achieve BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/noir/docs/versioned_docs/version-v0.19.4/migration_notes.md b/noir/docs/versioned_docs/version-v0.19.4/migration_notes.md index 0d7e0af0efd..905bca3d30c 100644 --- a/noir/docs/versioned_docs/version-v0.19.4/migration_notes.md +++ b/noir/docs/versioned_docs/version-v0.19.4/migration_notes.md @@ -50,7 +50,7 @@ If you are using the default barretenberg backend, simply run: nargo prove ``` -with you Noir program. +with your Noir program. This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. @@ -78,7 +78,7 @@ nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/bar This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. -The gzipped filed is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. Then run: diff --git a/noir/docs/versioned_docs/version-v0.19.4/modules_packages_crates/dependencies.md b/noir/docs/versioned_docs/version-v0.19.4/modules_packages_crates/dependencies.md index fd7511a3423..e91e73a4c4f 100644 --- a/noir/docs/versioned_docs/version-v0.19.4/modules_packages_crates/dependencies.md +++ b/noir/docs/versioned_docs/version-v0.19.4/modules_packages_crates/dependencies.md @@ -48,7 +48,7 @@ For example, this file structure has a library and binary crate │   ├── Nargo.toml │   └── src │   └── main.nr -└── liba +└── lib_a ├── Nargo.toml └── src └── lib.nr @@ -60,17 +60,17 @@ Inside of the binary crate, you can specify: # Nargo.toml [dependencies] -libA = { path = "../liba" } +lib_a = { path = "../lib_a" } ``` ## Importing dependencies You can import a dependency to a Noir file using the following syntax. For example, to import the -ecrecover-noir library and local liba referenced above: +ecrecover-noir library and local lib_a referenced above: ```rust use dep::ecrecover; -use dep::libA; +use dep::lib_a; ``` You can also import only the specific parts of dependency that you want to use, like so: diff --git a/noir/docs/versioned_docs/version-v0.19.4/modules_packages_crates/modules.md b/noir/docs/versioned_docs/version-v0.19.4/modules_packages_crates/modules.md index 147c9b284e8..11e60cbf35e 100644 --- a/noir/docs/versioned_docs/version-v0.19.4/modules_packages_crates/modules.md +++ b/noir/docs/versioned_docs/version-v0.19.4/modules_packages_crates/modules.md @@ -10,7 +10,7 @@ Noir's module system follows the same convention as the _newer_ version of Rust' ## Purpose of Modules -Modules are used to organise files. Without modules all of your code would need to live in a single +Modules are used to organize files. Without modules all of your code would need to live in a single file. In Noir, the compiler does not automatically scan all of your files to detect modules. This must be done explicitly by the developer. diff --git a/noir/docs/versioned_docs/version-v0.19.4/nargo/02_testing.md b/noir/docs/versioned_docs/version-v0.19.4/nargo/02_testing.md index da767274efd..5c57ef92705 100644 --- a/noir/docs/versioned_docs/version-v0.19.4/nargo/02_testing.md +++ b/noir/docs/versioned_docs/version-v0.19.4/nargo/02_testing.md @@ -23,8 +23,8 @@ fn test_add() { } ``` -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all -the contraints which allows you to test that add returns the expected values. Test functions can't +Running `nargo test` will test that the `test_add` function can be executed while satisfying all +the constraints which allows you to test that add returns the expected values. Test functions can't have any arguments currently. ### Test fail diff --git a/noir/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/classes/Noir.md b/noir/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/classes/Noir.md index e54116fb1d8..c54468891af 100644 --- a/noir/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/classes/Noir.md +++ b/noir/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/classes/Noir.md @@ -95,7 +95,7 @@ Generates a witness and a proof given an object as input. #### Example ```typescript -async generateFinalproof(input) +async generateFinalProof(input) ``` *** diff --git a/noir/docs/versioned_docs/version-v0.19.4/standard_library/black_box_fns.md b/noir/docs/versioned_docs/version-v0.19.4/standard_library/black_box_fns.md index 1dfabfe8f22..e0c6d475c1f 100644 --- a/noir/docs/versioned_docs/version-v0.19.4/standard_library/black_box_fns.md +++ b/noir/docs/versioned_docs/version-v0.19.4/standard_library/black_box_fns.md @@ -43,4 +43,4 @@ Here is a list of the current black box functions that are supported by UltraPlo Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. -You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/acvm/blob/acir-v0.12.0/acir/src/circuit/black_box_functions.rs). +You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/noir/blob/master/acvm-repo/acir/src/circuit/black_box_functions.rs). diff --git a/noir/docs/versioned_docs/version-v0.19.4/standard_library/logging.md b/noir/docs/versioned_docs/version-v0.19.4/standard_library/logging.md index 7e2fd9b9aff..4ba0fe0e656 100644 --- a/noir/docs/versioned_docs/version-v0.19.4/standard_library/logging.md +++ b/noir/docs/versioned_docs/version-v0.19.4/standard_library/logging.md @@ -22,7 +22,7 @@ implementation of rust's `println!` macro, this construct can be useful for debu You can print the output of println statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are println statements in your tests). -It is recommended to use `nargo execute` if you want to debug failing constrains with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. The `println` statement is unconstrained, so it works for outputting integers, fields, strings, and even structs or expressions. For example: diff --git a/noir/docs/versioned_docs/version-v../explainers/explainer-recursion.md b/noir/docs/versioned_docs/version-v0.22.0/explainers/explainer-recursion.md similarity index 98% rename from noir/docs/versioned_docs/version-v../explainers/explainer-recursion.md rename to noir/docs/versioned_docs/version-v0.22.0/explainers/explainer-recursion.md index cc431a878dc..9357d3c7341 100644 --- a/noir/docs/versioned_docs/version-v../explainers/explainer-recursion.md +++ b/noir/docs/versioned_docs/version-v0.22.0/explainers/explainer-recursion.md @@ -149,7 +149,7 @@ In such a situation, and assuming Alice is first, she would skip the first part ### Aggregating proofs -In some one-way interaction situations, recursiveness would allow for aggregation of simple proofs that don't need to be immediately verified on-chain or elsewhere. +In some one-way interaction situations, recursion would allow for aggregation of simple proofs that don't need to be immediately verified on-chain or elsewhere. To give a practical example, a barman wouldn't need to verify a "proof-of-age" on-chain every time he serves alcohol to a customer. Instead, the architecture would comprise two circuits: diff --git a/noir/docs/versioned_docs/version-v../explanations/noir/traits.md b/noir/docs/versioned_docs/version-v0.22.0/explanations/noir/traits.md similarity index 100% rename from noir/docs/versioned_docs/version-v../explanations/noir/traits.md rename to noir/docs/versioned_docs/version-v0.22.0/explanations/noir/traits.md diff --git a/noir/docs/docs/explanations/standard_library/traits.md b/noir/docs/versioned_docs/version-v0.22.0/explanations/standard_library/traits.md similarity index 100% rename from noir/docs/docs/explanations/standard_library/traits.md rename to noir/docs/versioned_docs/version-v0.22.0/explanations/standard_library/traits.md diff --git a/noir/docs/versioned_docs/version-v../getting_started/_category_.json b/noir/docs/versioned_docs/version-v0.22.0/getting_started/_category_.json similarity index 100% rename from noir/docs/versioned_docs/version-v../getting_started/_category_.json rename to noir/docs/versioned_docs/version-v0.22.0/getting_started/_category_.json diff --git a/noir/docs/versioned_docs/version-v../getting_started/create_a_project.md b/noir/docs/versioned_docs/version-v0.22.0/getting_started/create_a_project.md similarity index 100% rename from noir/docs/versioned_docs/version-v../getting_started/create_a_project.md rename to noir/docs/versioned_docs/version-v0.22.0/getting_started/create_a_project.md diff --git a/noir/docs/versioned_docs/version-v../getting_started/installation/_category_.json b/noir/docs/versioned_docs/version-v0.22.0/getting_started/installation/_category_.json similarity index 100% rename from noir/docs/versioned_docs/version-v../getting_started/installation/_category_.json rename to noir/docs/versioned_docs/version-v0.22.0/getting_started/installation/_category_.json diff --git a/noir/docs/versioned_docs/version-v../getting_started/installation/index.md b/noir/docs/versioned_docs/version-v0.22.0/getting_started/installation/index.md similarity index 100% rename from noir/docs/versioned_docs/version-v../getting_started/installation/index.md rename to noir/docs/versioned_docs/version-v0.22.0/getting_started/installation/index.md diff --git a/noir/docs/versioned_docs/version-v../getting_started/installation/other_install_methods.md b/noir/docs/versioned_docs/version-v0.22.0/getting_started/installation/other_install_methods.md similarity index 94% rename from noir/docs/versioned_docs/version-v../getting_started/installation/other_install_methods.md rename to noir/docs/versioned_docs/version-v0.22.0/getting_started/installation/other_install_methods.md index 36f05657277..a532f83750e 100644 --- a/noir/docs/versioned_docs/version-v../getting_started/installation/other_install_methods.md +++ b/noir/docs/versioned_docs/version-v0.22.0/getting_started/installation/other_install_methods.md @@ -83,9 +83,9 @@ Check if the installation was successful by running `nargo --version`. You shoul ### Option 3: Compile from Source -Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating ssues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). +Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating issues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). -Combined with direnv, which automatically sets or unsets environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. +Combined with direnv, which automatically sets or clears environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. #### Setting up your environment @@ -163,7 +163,7 @@ Advanced: If you aren't using direnv nor launching your editor within the subshe ### Option 4: WSL (for Windows) -The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed nativerly. However, it is available by using Windows Subsystem for Linux (WSL). +The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed natively. However, it is available by using Windows Subsystem for Linux (WSL). Step 1: Follow the instructions [here](https://learn.microsoft.com/en-us/windows/wsl/install) to install and run WSL. diff --git a/noir/docs/versioned_docs/version-v../getting_started/project_breakdown.md b/noir/docs/versioned_docs/version-v0.22.0/getting_started/project_breakdown.md similarity index 99% rename from noir/docs/versioned_docs/version-v../getting_started/project_breakdown.md rename to noir/docs/versioned_docs/version-v0.22.0/getting_started/project_breakdown.md index 5a214804f7b..c4e2a9ae003 100644 --- a/noir/docs/versioned_docs/version-v../getting_started/project_breakdown.md +++ b/noir/docs/versioned_docs/version-v0.22.0/getting_started/project_breakdown.md @@ -40,7 +40,7 @@ Example Nargo.toml: ```toml [package] -name = "noirstarter" +name = "noir_starter" type = "bin" authors = ["Alice"] compiler_version = "0.9.0" diff --git a/noir/docs/versioned_docs/version-v../getting_started/tooling/_category_.json b/noir/docs/versioned_docs/version-v0.22.0/getting_started/tooling/_category_.json similarity index 100% rename from noir/docs/versioned_docs/version-v../getting_started/tooling/_category_.json rename to noir/docs/versioned_docs/version-v0.22.0/getting_started/tooling/_category_.json diff --git a/noir/docs/versioned_docs/version-v../getting_started/tooling/index.md b/noir/docs/versioned_docs/version-v0.22.0/getting_started/tooling/index.md similarity index 100% rename from noir/docs/versioned_docs/version-v../getting_started/tooling/index.md rename to noir/docs/versioned_docs/version-v0.22.0/getting_started/tooling/index.md diff --git a/noir/docs/versioned_docs/version-v../getting_started/tooling/language_server.md b/noir/docs/versioned_docs/version-v0.22.0/getting_started/tooling/language_server.md similarity index 100% rename from noir/docs/versioned_docs/version-v../getting_started/tooling/language_server.md rename to noir/docs/versioned_docs/version-v0.22.0/getting_started/tooling/language_server.md diff --git a/noir/docs/versioned_docs/version-v../getting_started/tooling/testing.md b/noir/docs/versioned_docs/version-v0.22.0/getting_started/tooling/testing.md similarity index 90% rename from noir/docs/versioned_docs/version-v../getting_started/tooling/testing.md rename to noir/docs/versioned_docs/version-v0.22.0/getting_started/tooling/testing.md index 868a061200d..d3e0c522473 100644 --- a/noir/docs/versioned_docs/version-v../getting_started/tooling/testing.md +++ b/noir/docs/versioned_docs/version-v0.22.0/getting_started/tooling/testing.md @@ -24,8 +24,8 @@ fn test_add() { } ``` -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all -the contraints which allows you to test that add returns the expected values. Test functions can't +Running `nargo test` will test that the `test_add` function can be executed while satisfying all +the constraints which allows you to test that add returns the expected values. Test functions can't have any arguments currently. ### Test fail diff --git a/noir/docs/versioned_docs/version-v../how_to/_category_.json b/noir/docs/versioned_docs/version-v0.22.0/how_to/_category_.json similarity index 100% rename from noir/docs/versioned_docs/version-v../how_to/_category_.json rename to noir/docs/versioned_docs/version-v0.22.0/how_to/_category_.json diff --git a/noir/docs/versioned_docs/version-v../how_to/how-to-recursion.md b/noir/docs/versioned_docs/version-v0.22.0/how_to/how-to-recursion.md similarity index 95% rename from noir/docs/versioned_docs/version-v../how_to/how-to-recursion.md rename to noir/docs/versioned_docs/version-v0.22.0/how_to/how-to-recursion.md index 226f7e6e73d..db9ad0e99f8 100644 --- a/noir/docs/versioned_docs/version-v../how_to/how-to-recursion.md +++ b/noir/docs/versioned_docs/version-v0.22.0/how_to/how-to-recursion.md @@ -53,7 +53,7 @@ For a full example on how recursive proofs work, please refer to the [noir-examp In a common NoirJS app, you need to instantiate a backend with something like `const backend = new Backend(circuit)`. Then you feed it to the `noir_js` interface. -For recursiveness, this doesn't happen, and the only need for `noir_js` is only to `execute` a circuit and get its witness and return value. Everything else is not interfaced, so it needs to happen on the `backend` object. +For recursion, this doesn't happen, and the only need for `noir_js` is only to `execute` a circuit and get its witness and return value. Everything else is not interfaced, so it needs to happen on the `backend` object. It is also recommended that you instantiate the backend with as many threads as possible, to allow for maximum concurrency: @@ -159,7 +159,7 @@ const backends = { main: new BarretenbergBackend(circuits.main), recursive: new BarretenbergBackend(circuits.recursive) } -const noirs = { +const noir_programs = { main: new Noir(circuits.main, backends.main), recursive: new Noir(circuits.recursive, backends.recursive) } @@ -169,7 +169,7 @@ This allows you to neatly call exactly the method you want without conflicting n ```js // Alice runs this 👇 -const { witness: mainWitness } = await noirs.main.execute(input) +const { witness: mainWitness } = await noir_programs.main.execute(input) const proof = await backends.main.generateIntermediateProof(mainWitness) // Bob runs this 👇 @@ -178,7 +178,7 @@ const { proofAsFields, vkAsFields, vkHash } = await backends.main.generateInterm proof, numPublicInputs, ); -const recursiveProof = await noirs.recursive.generateFinalProof(recursiveInputs) +const recursiveProof = await noir_programs.recursive.generateFinalProof(recursiveInputs) ``` ::: diff --git a/noir/docs/versioned_docs/version-v../how_to/merkle-proof.mdx b/noir/docs/versioned_docs/version-v0.22.0/how_to/merkle-proof.mdx similarity index 100% rename from noir/docs/versioned_docs/version-v../how_to/merkle-proof.mdx rename to noir/docs/versioned_docs/version-v0.22.0/how_to/merkle-proof.mdx diff --git a/noir/docs/versioned_docs/version-v../how_to/solidity_verifier.md b/noir/docs/versioned_docs/version-v0.22.0/how_to/solidity_verifier.md similarity index 100% rename from noir/docs/versioned_docs/version-v../how_to/solidity_verifier.md rename to noir/docs/versioned_docs/version-v0.22.0/how_to/solidity_verifier.md diff --git a/noir/docs/versioned_docs/version-v../index.md b/noir/docs/versioned_docs/version-v0.22.0/index.md similarity index 95% rename from noir/docs/versioned_docs/version-v../index.md rename to noir/docs/versioned_docs/version-v0.22.0/index.md index 016832f9f5e..eaf8c59f935 100644 --- a/noir/docs/versioned_docs/version-v../index.md +++ b/noir/docs/versioned_docs/version-v0.22.0/index.md @@ -26,7 +26,7 @@ Noir, a domain-specific language crafted for SNARK proving systems, stands out w and robust capabilities. Unlike conventional approaches that compile directly to a fixed NP-complete language, Noir takes a two-pronged path. It first compiles to an adaptable intermediate language known as ACIR. From there, depending on the project's needs, ACIR can be further compiled into an arithmetic circuit for integration with Aztec's -barretenberg backend or transformed into a rank-1 constraint system suitable for R1CS backends like Arkwork's Marlin +barretenberg backend or transformed into a rank-1 constraint system suitable for R1CS backends like Arkworks' Marlin backend, among others. This innovative design introduces unique challenges, yet it strategically separates the programming language from the @@ -43,8 +43,7 @@ contracts efficiently. While the current alpha version offers this as a direct f to modularize this process for even greater ease of use. Noir currently includes a command to create a Solidity contract which verifies your Noir program. This will be -modularised in the future; however, as of the alpha, you can use the - command to create a verifier contract. +modularized in the future; however, as of the alpha, you can use the `nargo codegen-verifier` command to create a verifier contract. ### Protocol Developers diff --git a/noir/docs/versioned_docs/version-v../migration_notes.md b/noir/docs/versioned_docs/version-v0.22.0/migration_notes.md similarity index 92% rename from noir/docs/versioned_docs/version-v../migration_notes.md rename to noir/docs/versioned_docs/version-v0.22.0/migration_notes.md index d5d0682cf0c..184ca283539 100644 --- a/noir/docs/versioned_docs/version-v../migration_notes.md +++ b/noir/docs/versioned_docs/version-v0.22.0/migration_notes.md @@ -50,7 +50,7 @@ If you are using the default barretenberg backend, simply run: nargo prove ``` -with you Noir program. +with your Noir program. This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. @@ -78,7 +78,7 @@ nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/bar This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. -The gzipped filed is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. Then run: diff --git a/noir/docs/versioned_docs/version-v../noir/modules_packages_crates/_category_.json b/noir/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/_category_.json similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/modules_packages_crates/_category_.json rename to noir/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/_category_.json diff --git a/noir/docs/versioned_docs/version-v../noir/modules_packages_crates/crates_and_packages.md b/noir/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/crates_and_packages.md similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/modules_packages_crates/crates_and_packages.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/crates_and_packages.md diff --git a/noir/docs/versioned_docs/version-v../noir/modules_packages_crates/dependencies.md b/noir/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/dependencies.md similarity index 97% rename from noir/docs/versioned_docs/version-v../noir/modules_packages_crates/dependencies.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/dependencies.md index 21286bb3f72..a37dc401b7d 100644 --- a/noir/docs/versioned_docs/version-v../noir/modules_packages_crates/dependencies.md +++ b/noir/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/dependencies.md @@ -49,7 +49,7 @@ For example, this file structure has a library and binary crate │   ├── Nargo.toml │   └── src │   └── main.nr -└── liba +└── lib_a ├── Nargo.toml └── src └── lib.nr @@ -61,17 +61,17 @@ Inside of the binary crate, you can specify: # Nargo.toml [dependencies] -libA = { path = "../liba" } +lib_a = { path = "../lib_a" } ``` ## Importing dependencies You can import a dependency to a Noir file using the following syntax. For example, to import the -ecrecover-noir library and local liba referenced above: +ecrecover-noir library and local lib_a referenced above: ```rust use dep::ecrecover; -use dep::libA; +use dep::lib_a; ``` You can also import only the specific parts of dependency that you want to use, like so: diff --git a/noir/docs/versioned_docs/version-v../noir/modules_packages_crates/modules.md b/noir/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/modules.md similarity index 96% rename from noir/docs/versioned_docs/version-v../noir/modules_packages_crates/modules.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/modules.md index f9f15aee8be..ae822a1cff4 100644 --- a/noir/docs/versioned_docs/version-v../noir/modules_packages_crates/modules.md +++ b/noir/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/modules.md @@ -11,7 +11,7 @@ Noir's module system follows the same convention as the _newer_ version of Rust' ## Purpose of Modules -Modules are used to organise files. Without modules all of your code would need to live in a single +Modules are used to organize files. Without modules all of your code would need to live in a single file. In Noir, the compiler does not automatically scan all of your files to detect modules. This must be done explicitly by the developer. diff --git a/noir/docs/versioned_docs/version-v../noir/modules_packages_crates/workspaces.md b/noir/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/workspaces.md similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/modules_packages_crates/workspaces.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/workspaces.md diff --git a/noir/docs/versioned_docs/version-v../noir/standard_library/_category_.json b/noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/_category_.json similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/standard_library/_category_.json rename to noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/_category_.json diff --git a/noir/docs/versioned_docs/version-v../noir/standard_library/black_box_fns.md b/noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/black_box_fns.md similarity index 96% rename from noir/docs/versioned_docs/version-v../noir/standard_library/black_box_fns.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/black_box_fns.md index 1dfabfe8f22..e0c6d475c1f 100644 --- a/noir/docs/versioned_docs/version-v../noir/standard_library/black_box_fns.md +++ b/noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/black_box_fns.md @@ -43,4 +43,4 @@ Here is a list of the current black box functions that are supported by UltraPlo Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. -You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/acvm/blob/acir-v0.12.0/acir/src/circuit/black_box_functions.rs). +You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/noir/blob/master/acvm-repo/acir/src/circuit/black_box_functions.rs). diff --git a/noir/docs/versioned_docs/version-v../noir/standard_library/cryptographic_primitives/_category_.json b/noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/_category_.json similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/standard_library/cryptographic_primitives/_category_.json rename to noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/_category_.json diff --git a/noir/docs/versioned_docs/version-v../noir/standard_library/cryptographic_primitives/ec_primitives.md b/noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/ec_primitives.md similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/standard_library/cryptographic_primitives/ec_primitives.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/ec_primitives.md diff --git a/noir/docs/versioned_docs/version-v../noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx b/noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx rename to noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx diff --git a/noir/docs/versioned_docs/version-v../noir/standard_library/cryptographic_primitives/eddsa.mdx b/noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/eddsa.mdx similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/standard_library/cryptographic_primitives/eddsa.mdx rename to noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/eddsa.mdx diff --git a/noir/docs/versioned_docs/version-v../noir/standard_library/cryptographic_primitives/hashes.mdx b/noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/hashes.mdx similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/standard_library/cryptographic_primitives/hashes.mdx rename to noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/hashes.mdx diff --git a/noir/docs/versioned_docs/version-v../noir/standard_library/cryptographic_primitives/index.md b/noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/index.md similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/standard_library/cryptographic_primitives/index.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/index.md diff --git a/noir/docs/versioned_docs/version-v../noir/standard_library/cryptographic_primitives/scalar.mdx b/noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/scalar.mdx similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/standard_library/cryptographic_primitives/scalar.mdx rename to noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/scalar.mdx diff --git a/noir/docs/versioned_docs/version-v../noir/standard_library/cryptographic_primitives/schnorr.mdx b/noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/schnorr.mdx similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/standard_library/cryptographic_primitives/schnorr.mdx rename to noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/schnorr.mdx diff --git a/noir/docs/versioned_docs/version-v../noir/standard_library/logging.md b/noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/logging.md similarity index 87% rename from noir/docs/versioned_docs/version-v../noir/standard_library/logging.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/logging.md index 16daf922e15..2e163b52ab3 100644 --- a/noir/docs/versioned_docs/version-v../noir/standard_library/logging.md +++ b/noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/logging.md @@ -22,7 +22,7 @@ The standard library provides two familiar statements you can use: `println` and You can print the output of both statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are print statements in your tests). -It is recommended to use `nargo execute` if you want to debug failing constrains with `println` or `print` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). Neither `println`, nor `print` are callable for failed constraints caught at compile time. +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` or `print` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). Neither `println`, nor `print` are callable for failed constraints caught at compile time. Both `print` and `println` are generic functions which can work on integers, fields, strings, and even structs or expressions. Note however, that slices are currently unsupported. For example: diff --git a/noir/docs/versioned_docs/version-v../noir/standard_library/merkle_trees.md b/noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/merkle_trees.md similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/standard_library/merkle_trees.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/merkle_trees.md diff --git a/noir/docs/versioned_docs/version-v../noir/standard_library/options.md b/noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/options.md similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/standard_library/options.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/options.md diff --git a/noir/docs/versioned_docs/version-v../noir/standard_library/recursion.md b/noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/recursion.md similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/standard_library/recursion.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/recursion.md diff --git a/noir/docs/versioned_docs/version-v../noir/standard_library/zeroed.md b/noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/zeroed.md similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/standard_library/zeroed.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/standard_library/zeroed.md diff --git a/noir/docs/docs/noir/syntax/_category_.json b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/_category_.json similarity index 100% rename from noir/docs/docs/noir/syntax/_category_.json rename to noir/docs/versioned_docs/version-v0.22.0/noir/syntax/_category_.json diff --git a/noir/docs/versioned_docs/version-v../noir/syntax/assert.md b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/assert.md similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/syntax/assert.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/syntax/assert.md diff --git a/noir/docs/versioned_docs/version-v../noir/syntax/comments.md b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/comments.md similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/syntax/comments.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/syntax/comments.md diff --git a/noir/docs/versioned_docs/version-v../noir/syntax/control_flow.md b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/control_flow.md similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/syntax/control_flow.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/syntax/control_flow.md diff --git a/noir/docs/versioned_docs/version-v../noir/syntax/data_bus.md b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/data_bus.md similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/syntax/data_bus.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/syntax/data_bus.md diff --git a/noir/docs/versioned_docs/version-v../noir/syntax/data_types/_category_.json b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/_category_.json similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/syntax/data_types/_category_.json rename to noir/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/_category_.json diff --git a/noir/docs/versioned_docs/version-v../noir/syntax/data_types/arrays.md b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/arrays.md similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/syntax/data_types/arrays.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/arrays.md diff --git a/noir/docs/versioned_docs/version-v../noir/syntax/data_types/booleans.md b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/booleans.md similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/syntax/data_types/booleans.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/booleans.md diff --git a/noir/docs/versioned_docs/version-v../noir/syntax/data_types/fields.md b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/fields.md similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/syntax/data_types/fields.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/fields.md diff --git a/noir/docs/docs/noir/syntax/data_types/function_types.md b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/function_types.md similarity index 100% rename from noir/docs/docs/noir/syntax/data_types/function_types.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/function_types.md diff --git a/noir/docs/docs/noir/syntax/data_types/index.md b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/index.md similarity index 98% rename from noir/docs/docs/noir/syntax/data_types/index.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/index.md index 52e568e9b7e..01cd0431a68 100644 --- a/noir/docs/docs/noir/syntax/data_types/index.md +++ b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/index.md @@ -93,4 +93,4 @@ fn main() { ### BigInt -You can acheive BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. +You can achieve BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/noir/docs/versioned_docs/version-v../noir/syntax/data_types/integers.md b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/integers.md similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/syntax/data_types/integers.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/integers.md diff --git a/noir/docs/versioned_docs/version-v../noir/syntax/data_types/references.md b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/references.md similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/syntax/data_types/references.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/references.md diff --git a/noir/docs/versioned_docs/version-v../noir/syntax/data_types/slices.mdx b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/slices.mdx similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/syntax/data_types/slices.mdx rename to noir/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/slices.mdx diff --git a/noir/docs/versioned_docs/version-v../noir/syntax/data_types/strings.md b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/strings.md similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/syntax/data_types/strings.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/strings.md diff --git a/noir/docs/versioned_docs/version-v../noir/syntax/data_types/structs.md b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/structs.md similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/syntax/data_types/structs.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/structs.md diff --git a/noir/docs/versioned_docs/version-v../noir/syntax/data_types/tuples.md b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/tuples.md similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/syntax/data_types/tuples.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/tuples.md diff --git a/noir/docs/versioned_docs/version-v../noir/syntax/data_types/vectors.mdx b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/vectors.mdx similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/syntax/data_types/vectors.mdx rename to noir/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/vectors.mdx diff --git a/noir/docs/versioned_docs/version-v../noir/syntax/distinct.md b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/distinct.md similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/syntax/distinct.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/syntax/distinct.md diff --git a/noir/docs/versioned_docs/version-v../noir/syntax/functions.md b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/functions.md similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/syntax/functions.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/syntax/functions.md diff --git a/noir/docs/versioned_docs/version-v../noir/syntax/generics.md b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/generics.md similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/syntax/generics.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/syntax/generics.md diff --git a/noir/docs/versioned_docs/version-v../noir/syntax/lambdas.md b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/lambdas.md similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/syntax/lambdas.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/syntax/lambdas.md diff --git a/noir/docs/docs/noir/syntax/mutability.md b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/mutability.md similarity index 97% rename from noir/docs/docs/noir/syntax/mutability.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/syntax/mutability.md index 58e9c1cecfb..6abfae3cfa7 100644 --- a/noir/docs/docs/noir/syntax/mutability.md +++ b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/mutability.md @@ -70,11 +70,11 @@ fn main(x : Field, y : [Field; N]) { assert(res == y[0]); - let res2 = x * mysubmodule::N; + let res2 = x * my_submodule::N; assert(res != res2); } -mod mysubmodule { +mod my_submodule { use dep::std; global N: Field = 10; diff --git a/noir/docs/docs/noir/syntax/ops.md b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/ops.md similarity index 97% rename from noir/docs/docs/noir/syntax/ops.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/syntax/ops.md index 977c8ba1203..60425cb8994 100644 --- a/noir/docs/docs/noir/syntax/ops.md +++ b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/ops.md @@ -63,7 +63,7 @@ fn main(x : Field) { Noir has no support for the logical operators `||` and `&&`. This is because encoding the short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can -use the bitwise operators `|` and `&` which operate indentically for booleans, just without the +use the bitwise operators `|` and `&` which operate identically for booleans, just without the short-circuiting. ```rust diff --git a/noir/docs/versioned_docs/version-v../noir/syntax/shadowing.md b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/shadowing.md similarity index 100% rename from noir/docs/versioned_docs/version-v../noir/syntax/shadowing.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/syntax/shadowing.md diff --git a/noir/docs/docs/noir/syntax/unconstrained.md b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/unconstrained.md similarity index 98% rename from noir/docs/docs/noir/syntax/unconstrained.md rename to noir/docs/versioned_docs/version-v0.22.0/noir/syntax/unconstrained.md index 7a61d3953ef..6b3424f7993 100644 --- a/noir/docs/docs/noir/syntax/unconstrained.md +++ b/noir/docs/versioned_docs/version-v0.22.0/noir/syntax/unconstrained.md @@ -6,7 +6,7 @@ keywords: [Noir programming language, unconstrained, open] sidebar_position: 5 --- -Unconstrained functions are functions which do not constrain any of the included computation and allow for non-determinisitic computation. +Unconstrained functions are functions which do not constrain any of the included computation and allow for non-deterministic computation. ## Why? diff --git a/noir/docs/versioned_docs/version-v../reference/NoirJS/backend_barretenberg/.nojekyll b/noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/.nojekyll similarity index 100% rename from noir/docs/versioned_docs/version-v../reference/NoirJS/backend_barretenberg/.nojekyll rename to noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/.nojekyll diff --git a/noir/docs/versioned_docs/version-v../reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend.md b/noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend.md similarity index 100% rename from noir/docs/versioned_docs/version-v../reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend.md rename to noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend.md diff --git a/noir/docs/versioned_docs/version-v../reference/NoirJS/backend_barretenberg/index.md b/noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/index.md similarity index 100% rename from noir/docs/versioned_docs/version-v../reference/NoirJS/backend_barretenberg/index.md rename to noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/index.md diff --git a/noir/docs/versioned_docs/version-v../reference/NoirJS/backend_barretenberg/interfaces/Backend.md b/noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/interfaces/Backend.md similarity index 100% rename from noir/docs/versioned_docs/version-v../reference/NoirJS/backend_barretenberg/interfaces/Backend.md rename to noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/interfaces/Backend.md diff --git a/noir/docs/versioned_docs/version-v../reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions.md b/noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions.md similarity index 100% rename from noir/docs/versioned_docs/version-v../reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions.md rename to noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions.md diff --git a/noir/docs/versioned_docs/version-v../reference/NoirJS/backend_barretenberg/type-aliases/CompiledCircuit.md b/noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/type-aliases/CompiledCircuit.md similarity index 100% rename from noir/docs/versioned_docs/version-v../reference/NoirJS/backend_barretenberg/type-aliases/CompiledCircuit.md rename to noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/type-aliases/CompiledCircuit.md diff --git a/noir/docs/versioned_docs/version-v../reference/NoirJS/backend_barretenberg/type-aliases/ProofData.md b/noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/type-aliases/ProofData.md similarity index 100% rename from noir/docs/versioned_docs/version-v../reference/NoirJS/backend_barretenberg/type-aliases/ProofData.md rename to noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/type-aliases/ProofData.md diff --git a/noir/docs/versioned_docs/version-v../reference/NoirJS/backend_barretenberg/typedoc-sidebar.cjs b/noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/typedoc-sidebar.cjs similarity index 100% rename from noir/docs/versioned_docs/version-v../reference/NoirJS/backend_barretenberg/typedoc-sidebar.cjs rename to noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/typedoc-sidebar.cjs diff --git a/noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/.nojekyll b/noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/.nojekyll similarity index 100% rename from noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/.nojekyll rename to noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/.nojekyll diff --git a/noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/classes/Noir.md b/noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/classes/Noir.md similarity index 100% rename from noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/classes/Noir.md rename to noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/classes/Noir.md diff --git a/noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/functions/and.md b/noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/and.md similarity index 100% rename from noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/functions/and.md rename to noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/and.md diff --git a/noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/functions/blake2s256.md b/noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/blake2s256.md similarity index 100% rename from noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/functions/blake2s256.md rename to noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/blake2s256.md diff --git a/noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify.md b/noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify.md similarity index 100% rename from noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify.md rename to noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify.md diff --git a/noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify.md b/noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify.md similarity index 100% rename from noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify.md rename to noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify.md diff --git a/noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/functions/keccak256.md b/noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/keccak256.md similarity index 100% rename from noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/functions/keccak256.md rename to noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/keccak256.md diff --git a/noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/functions/sha256.md b/noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/sha256.md similarity index 100% rename from noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/functions/sha256.md rename to noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/sha256.md diff --git a/noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/functions/xor.md b/noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/xor.md similarity index 100% rename from noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/functions/xor.md rename to noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/xor.md diff --git a/noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/index.md b/noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/index.md similarity index 100% rename from noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/index.md rename to noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/index.md diff --git a/noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/type-aliases/CompiledCircuit.md b/noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/CompiledCircuit.md similarity index 100% rename from noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/type-aliases/CompiledCircuit.md rename to noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/CompiledCircuit.md diff --git a/noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md b/noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md similarity index 100% rename from noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md rename to noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md diff --git a/noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/type-aliases/ForeignCallInput.md b/noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ForeignCallInput.md similarity index 100% rename from noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/type-aliases/ForeignCallInput.md rename to noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ForeignCallInput.md diff --git a/noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md b/noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md similarity index 100% rename from noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md rename to noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md diff --git a/noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/type-aliases/InputMap.md b/noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/InputMap.md similarity index 100% rename from noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/type-aliases/InputMap.md rename to noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/InputMap.md diff --git a/noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/type-aliases/ProofData.md b/noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ProofData.md similarity index 100% rename from noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/type-aliases/ProofData.md rename to noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ProofData.md diff --git a/noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/type-aliases/WitnessMap.md b/noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/WitnessMap.md similarity index 100% rename from noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/type-aliases/WitnessMap.md rename to noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/WitnessMap.md diff --git a/noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/typedoc-sidebar.cjs b/noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/typedoc-sidebar.cjs similarity index 100% rename from noir/docs/versioned_docs/version-v../reference/NoirJS/noir_js/typedoc-sidebar.cjs rename to noir/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/typedoc-sidebar.cjs diff --git a/noir/docs/versioned_docs/version-v../reference/_category_.json b/noir/docs/versioned_docs/version-v0.22.0/reference/_category_.json similarity index 100% rename from noir/docs/versioned_docs/version-v../reference/_category_.json rename to noir/docs/versioned_docs/version-v0.22.0/reference/_category_.json diff --git a/noir/docs/versioned_docs/version-v../reference/nargo_commands.md b/noir/docs/versioned_docs/version-v0.22.0/reference/nargo_commands.md similarity index 100% rename from noir/docs/versioned_docs/version-v../reference/nargo_commands.md rename to noir/docs/versioned_docs/version-v0.22.0/reference/nargo_commands.md diff --git a/noir/docs/versioned_docs/version-v../tutorials/noirjs_app.md b/noir/docs/versioned_docs/version-v0.22.0/tutorials/noirjs_app.md similarity index 100% rename from noir/docs/versioned_docs/version-v../tutorials/noirjs_app.md rename to noir/docs/versioned_docs/version-v0.22.0/tutorials/noirjs_app.md diff --git a/noir/noir_stdlib/src/cmp.nr b/noir/noir_stdlib/src/cmp.nr new file mode 100644 index 00000000000..11127494c18 --- /dev/null +++ b/noir/noir_stdlib/src/cmp.nr @@ -0,0 +1,310 @@ +trait Eq { + fn eq(self, other: Self) -> bool; +} + +impl Eq for Field { fn eq(self, other: Field) -> bool { self == other } } + +impl Eq for u1 { fn eq(self, other: u1) -> bool { self == other } } +impl Eq for u8 { fn eq(self, other: u8) -> bool { self == other } } +impl Eq for u16 { fn eq(self, other: u16) -> bool { self == other } } +impl Eq for u32 { fn eq(self, other: u32) -> bool { self == other } } +impl Eq for u64 { fn eq(self, other: u64) -> bool { self == other } } + +impl Eq for i8 { fn eq(self, other: i8) -> bool { self == other } } +impl Eq for i16 { fn eq(self, other: i16) -> bool { self == other } } +impl Eq for i32 { fn eq(self, other: i32) -> bool { self == other } } +impl Eq for i64 { fn eq(self, other: i64) -> bool { self == other } } + +impl Eq for () { fn eq(_self: Self, _other: ()) -> bool { true } } +impl Eq for bool { fn eq(self, other: bool) -> bool { self == other } } + +impl Eq for [T; N] where T: Eq { + fn eq(self, other: [T; N]) -> bool { + let mut result = true; + for i in 0 .. self.len() { + result &= self[i].eq(other[i]); + } + result + } +} + +impl Eq for str { + fn eq(self, other: str) -> bool { + let self_bytes = self.as_bytes(); + let other_bytes = other.as_bytes(); + self_bytes == other_bytes + } +} + +impl Eq for (A, B) where A: Eq, B: Eq { + fn eq(self, other: (A, B)) -> bool { + self.0.eq(other.0) & self.1.eq(other.1) + } +} + +impl Eq for (A, B, C) where A: Eq, B: Eq, C: Eq { + fn eq(self, other: (A, B, C)) -> bool { + self.0.eq(other.0) & self.1.eq(other.1) & self.2.eq(other.2) + } +} + +impl Eq for (A, B, C, D) where A: Eq, B: Eq, C: Eq, D: Eq { + fn eq(self, other: (A, B, C, D)) -> bool { + self.0.eq(other.0) & self.1.eq(other.1) & self.2.eq(other.2) & self.3.eq(other.3) + } +} + +impl Eq for (A, B, C, D, E) where A: Eq, B: Eq, C: Eq, D: Eq, E: Eq { + fn eq(self, other: (A, B, C, D, E)) -> bool { + self.0.eq(other.0) & self.1.eq(other.1) & self.2.eq(other.2) & self.3.eq(other.3) & self.4.eq(other.4) + } +} + +impl Eq for Ordering { + fn eq(self, other: Ordering) -> bool { + self.result == other.result + } +} + + +// Noir doesn't have enums yet so we emulate (Lt | Eq | Gt) with a struct +// that has 3 public functions for constructing the struct. +struct Ordering { + result: Field, +} + +impl Ordering { + // Implementation note: 0, 1, and 2 for Lt, Eq, and Gt are built + // into the compiler, do not change these without also updating + // the compiler itself! + pub fn less() -> Ordering { + Ordering { result: 0 } + } + + pub fn equal() -> Ordering { + Ordering { result: 1 } + } + + pub fn greater() -> Ordering { + Ordering { result: 2 } + } +} + + +trait Ord { + fn cmp(self, other: Self) -> Ordering; +} + +// Note: Field deliberately does not implement Ord + +impl Ord for u8 { + fn cmp(self, other: u8) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for u16 { + fn cmp(self, other: u16) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for u32 { + fn cmp(self, other: u32) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for u64 { + fn cmp(self, other: u64) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for i8 { + fn cmp(self, other: i8) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for i16 { + fn cmp(self, other: i16) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for i32 { + fn cmp(self, other: i32) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for i64 { + fn cmp(self, other: i64) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for () { + fn cmp(_self: Self, _other: ()) -> Ordering { + Ordering::equal() + } +} + +impl Ord for bool { + fn cmp(self, other: bool) -> Ordering { + if self { + if other { + Ordering::equal() + } else { + Ordering::greater() + } + } else { + if other { + Ordering::less() + } else { + Ordering::equal() + } + } + } +} + +impl Ord for [T; N] where T: Ord { + // The first non-equal element of both arrays determines + // the ordering for the whole array. + fn cmp(self, other: [T; N]) -> Ordering { + let mut result = Ordering::equal(); + for i in 0 .. self.len() { + if result == Ordering::equal() { + let result_i = self[i].cmp(other[i]); + + if result_i == Ordering::less() { + result = result_i; + } else if result_i == Ordering::greater() { + result = result_i; + } + } + } + result + } +} + +impl Ord for (A, B) where A: Ord, B: Ord { + fn cmp(self, other: (A, B)) -> Ordering { + let result = self.0.cmp(other.0); + + if result != Ordering::equal() { + result + } else { + self.1.cmp(other.1) + } + } +} + +impl Ord for (A, B, C) where A: Ord, B: Ord, C: Ord { + fn cmp(self, other: (A, B, C)) -> Ordering { + let mut result = self.0.cmp(other.0); + + if result == Ordering::equal() { + result = self.1.cmp(other.1); + } + + if result == Ordering::equal() { + result = self.2.cmp(other.2); + } + + result + } +} + +impl Ord for (A, B, C, D) where A: Ord, B: Ord, C: Ord, D: Ord { + fn cmp(self, other: (A, B, C, D)) -> Ordering { + let mut result = self.0.cmp(other.0); + + if result == Ordering::equal() { + result = self.1.cmp(other.1); + } + + if result == Ordering::equal() { + result = self.2.cmp(other.2); + } + + if result == Ordering::equal() { + result = self.3.cmp(other.3); + } + + result + } +} + +impl Ord for (A, B, C, D, E) where A: Ord, B: Ord, C: Ord, D: Ord, E: Ord { + fn cmp(self, other: (A, B, C, D, E)) -> Ordering { + let mut result = self.0.cmp(other.0); + + if result == Ordering::equal() { + result = self.1.cmp(other.1); + } + + if result == Ordering::equal() { + result = self.2.cmp(other.2); + } + + if result == Ordering::equal() { + result = self.3.cmp(other.3); + } + + if result == Ordering::equal() { + result = self.4.cmp(other.4); + } + + result + } +} diff --git a/noir/noir_stdlib/src/ec/montcurve.nr b/noir/noir_stdlib/src/ec/montcurve.nr index 82d22837b46..83a17bae322 100644 --- a/noir/noir_stdlib/src/ec/montcurve.nr +++ b/noir/noir_stdlib/src/ec/montcurve.nr @@ -12,6 +12,8 @@ mod affine { use crate::ec::safe_inverse; use crate::ec::sqrt; use crate::ec::ZETA; + use crate::cmp::Eq; + // Curve specification struct Curve { // Montgomery Curve configuration (ky^2 = x^3 + j*x^2 + x) j: Field, @@ -32,11 +34,6 @@ mod affine { Self {x, y, infty: false} } - // Check for equality - fn eq(self, p: Self) -> bool { - (self.infty & p.infty) | (!self.infty & !p.infty & (self.x == p.x) & (self.y == p.y)) - } - // Check if zero pub fn is_zero(self) -> bool { self.infty @@ -76,6 +73,12 @@ mod affine { } } + impl Eq for Point { + fn eq(self, p: Self) -> bool { + (self.infty & p.infty) | (!self.infty & !p.infty & (self.x == p.x) & (self.y == p.y)) + } + } + impl Curve { // Curve constructor pub fn new(j: Field, k: Field, gen: Point) -> Self { @@ -219,6 +222,7 @@ mod curvegroup { use crate::ec::swcurve::curvegroup::Point as SWPoint; use crate::ec::tecurve::curvegroup::Curve as TECurve; use crate::ec::tecurve::curvegroup::Point as TEPoint; + use crate::cmp::Eq; struct Curve { // Montgomery Curve configuration (ky^2 z = x*(x^2 + j*x*z + z*z)) j: Field, @@ -239,11 +243,6 @@ mod curvegroup { Self {x, y, z} } - // Check for equality - fn eq(self, p: Self) -> bool { - (self.z == p.z) | (((self.x * self.z) == (p.x * p.z)) & ((self.y * self.z) == (p.y * p.z))) - } - // Check if zero pub fn is_zero(self) -> bool { self.z == 0 @@ -277,6 +276,12 @@ mod curvegroup { } } + impl Eq for Point { + fn eq(self, p: Self) -> bool { + (self.z == p.z) | (((self.x * self.z) == (p.x * p.z)) & ((self.y * self.z) == (p.y * p.z))) + } + } + impl Curve { // Curve constructor pub fn new(j: Field, k: Field, gen: Point) -> Self { diff --git a/noir/noir_stdlib/src/ec/swcurve.nr b/noir/noir_stdlib/src/ec/swcurve.nr index e9b6f661843..e64f5a7be02 100644 --- a/noir/noir_stdlib/src/ec/swcurve.nr +++ b/noir/noir_stdlib/src/ec/swcurve.nr @@ -7,6 +7,8 @@ mod affine { use crate::ec::safe_inverse; use crate::ec::is_square; use crate::ec::sqrt; + use crate::cmp::Eq; + // Curve specification struct Curve { // Short Weierstraß curve // Coefficients in defining equation y^2 = x^3 + ax + b @@ -28,15 +30,6 @@ mod affine { Self {x, y, infty: false} } - // Check for equality - fn eq(self, p: Point) -> bool { - let Self {x: x1, y: y1, infty: inf1} = self; - let Self {x: x2, y: y2, infty: inf2} = p; - - (inf1 & inf2) - | (!inf1 & !inf2 & (x1 == x2) & (y1 == y2)) - } - // Check if zero pub fn is_zero(self) -> bool { self.eq(Point::zero()) @@ -65,6 +58,16 @@ mod affine { } } + impl Eq for Point { + fn eq(self, p: Self) -> bool { + let Self {x: x1, y: y1, infty: inf1} = self; + let Self {x: x2, y: y2, infty: inf2} = p; + + (inf1 & inf2) + | (!inf1 & !inf2 & (x1 == x2) & (y1 == y2)) + } + } + impl Curve { // Curve constructor pub fn new(a: Field, b: Field, gen: Point) -> Curve { @@ -182,6 +185,8 @@ mod curvegroup { // Points are represented by three-dimensional Jacobian coordinates. // See for details. use crate::ec::swcurve::affine; + use crate::cmp::Eq; + // Curve specification struct Curve { // Short Weierstraß curve // Coefficients in defining equation y^2 = x^3 + axz^4 + bz^6 @@ -203,14 +208,6 @@ mod curvegroup { Self {x, y, z} } - // Check for equality - fn eq(self, p: Point) -> bool { - let Self {x: x1, y: y1, z: z1} = self; - let Self {x: x2, y: y2, z: z2} = p; - - ((z1 == 0) & (z2 == 0)) | ((z1 != 0) & (z2 != 0) & (x1*z2*z2 == x2*z1*z1) & (y1*z2*z2*z2 == y2*z1*z1*z1)) - } - // Check if zero pub fn is_zero(self) -> bool { self.eq(Point::zero()) @@ -240,6 +237,15 @@ mod curvegroup { } } + impl Eq for Point { + fn eq(self, p: Self) -> bool { + let Self {x: x1, y: y1, z: z1} = self; + let Self {x: x2, y: y2, z: z2} = p; + + ((z1 == 0) & (z2 == 0)) | ((z1 != 0) & (z2 != 0) & (x1*z2*z2 == x2*z1*z1) & (y1*z2*z2*z2 == y2*z1*z1*z1)) + } + } + impl Curve { // Curve constructor pub fn new(a: Field, b: Field, gen: Point) -> Curve { diff --git a/noir/noir_stdlib/src/ec/tecurve.nr b/noir/noir_stdlib/src/ec/tecurve.nr index 849b45ff012..5333ece4c4a 100644 --- a/noir/noir_stdlib/src/ec/tecurve.nr +++ b/noir/noir_stdlib/src/ec/tecurve.nr @@ -9,6 +9,8 @@ mod affine { use crate::ec::montcurve::affine::Point as MPoint; use crate::ec::swcurve::affine::Curve as SWCurve; use crate::ec::swcurve::affine::Point as SWPoint; + use crate::cmp::Eq; + // Curve specification struct Curve { // Twisted Edwards curve // Coefficients in defining equation ax^2 + y^2 = 1 + dx^2y^2 @@ -29,14 +31,6 @@ mod affine { Self { x, y } } - // Check for equality - fn eq(self, p: Point) -> bool { - let Self {x: x1, y: y1} = self; - let Self {x: x2, y: y2} = p; - - (x1 == x2) & (y1 == y2) - } - // Check if zero pub fn is_zero(self) -> bool { self.eq(Point::zero()) @@ -74,6 +68,15 @@ mod affine { } } + impl Eq for Point { + fn eq(self, p: Self) -> bool { + let Self {x: x1, y: y1} = self; + let Self {x: x2, y: y2} = p; + + (x1 == x2) & (y1 == y2) + } + } + impl Curve { // Curve constructor pub fn new(a: Field, d: Field, gen: Point) -> Curve { @@ -198,6 +201,8 @@ mod curvegroup { use crate::ec::montcurve::curvegroup::Point as MPoint; use crate::ec::swcurve::curvegroup::Curve as SWCurve; use crate::ec::swcurve::curvegroup::Point as SWPoint; + use crate::cmp::Eq; + // Curve specification struct Curve { // Twisted Edwards curve // Coefficients in defining equation a(x^2 + y^2)z^2 = z^4 + dx^2y^2 @@ -220,14 +225,6 @@ mod curvegroup { Self {x, y, t, z} } - // Check for equality - fn eq(self, p: Point) -> bool { - let Self {x: x1, y: y1, t: _t1, z: z1} = self; - let Self {x: x2, y: y2, t: _t2, z:z2} = p; - - (x1*z2 == x2*z1) & (y1*z2 == y2*z1) - } - // Check if zero pub fn is_zero(self) -> bool { let Self {x, y, t, z} = self; @@ -259,6 +256,15 @@ mod curvegroup { } } + impl Eq for Point { + fn eq(self, p: Self) -> bool { + let Self {x: x1, y: y1, t: _t1, z: z1} = self; + let Self {x: x2, y: y2, t: _t2, z:z2} = p; + + (x1*z2 == x2*z1) & (y1*z2 == y2*z1) + } + } + impl Curve { // Curve constructor pub fn new(a: Field, d: Field, gen: Point) -> Curve { diff --git a/noir/noir_stdlib/src/lib.nr b/noir/noir_stdlib/src/lib.nr index 70b4681b54d..9b166f6ae94 100644 --- a/noir/noir_stdlib/src/lib.nr +++ b/noir/noir_stdlib/src/lib.nr @@ -19,6 +19,7 @@ mod compat; mod option; mod string; mod test; +mod cmp; mod ops; mod default; mod prelude; diff --git a/noir/noir_stdlib/src/ops.nr b/noir/noir_stdlib/src/ops.nr index 23acc2f0e5d..3078ac11296 100644 --- a/noir/noir_stdlib/src/ops.nr +++ b/noir/noir_stdlib/src/ops.nr @@ -63,55 +63,94 @@ impl Div for i16 { fn div(self, other: i16) -> i16 { self / other } } impl Div for i32 { fn div(self, other: i32) -> i32 { self / other } } impl Div for i64 { fn div(self, other: i64) -> i64 { self / other } } -trait Eq { - fn eq(self, other: Self) -> bool; +trait Rem { + fn rem(self, other: Self) -> Self; } -impl Eq for Field { fn eq(self, other: Field) -> bool { self == other } } - -impl Eq for u8 { fn eq(self, other: u8) -> bool { self == other } } -impl Eq for u16 { fn eq(self, other: u16) -> bool { self == other } } -impl Eq for u32 { fn eq(self, other: u32) -> bool { self == other } } -impl Eq for u64 { fn eq(self, other: u64) -> bool { self == other } } - -impl Eq for i8 { fn eq(self, other: i8) -> bool { self == other } } -impl Eq for i16 { fn eq(self, other: i16) -> bool { self == other } } -impl Eq for i32 { fn eq(self, other: i32) -> bool { self == other } } -impl Eq for i64 { fn eq(self, other: i64) -> bool { self == other } } - -impl Eq for () { fn eq(_self: Self, _other: ()) -> bool { true } } -impl Eq for bool { fn eq(self, other: bool) -> bool { self == other } } - -impl Eq for [T; N] where T: Eq { - fn eq(self, other: [T; N]) -> bool { - let mut result = true; - for i in 0 .. self.len() { - result &= self[i].eq(other[i]); - } - result - } +impl Rem for u8 { fn rem(self, other: u8) -> u8 { self % other } } +impl Rem for u16 { fn rem(self, other: u16) -> u16 { self % other } } +impl Rem for u32 { fn rem(self, other: u32) -> u32 { self % other } } +impl Rem for u64 { fn rem(self, other: u64) -> u64 { self % other } } + +impl Rem for i8 { fn rem(self, other: i8) -> i8 { self % other } } +impl Rem for i16 { fn rem(self, other: i16) -> i16 { self % other } } +impl Rem for i32 { fn rem(self, other: i32) -> i32 { self % other } } +impl Rem for i64 { fn rem(self, other: i64) -> i64 { self % other } } + +trait BitOr { + fn bitor(self, other: Self) -> Self; } -impl Eq for (A, B) where A: Eq, B: Eq { - fn eq(self, other: (A, B)) -> bool { - self.0.eq(other.0) & self.1.eq(other.1) - } +impl BitOr for bool { fn bitor(self, other: bool) -> bool { self | other } } + +impl BitOr for u8 { fn bitor(self, other: u8) -> u8 { self | other } } +impl BitOr for u16 { fn bitor(self, other: u16) -> u16 { self | other } } +impl BitOr for u32 { fn bitor(self, other: u32) -> u32 { self | other } } +impl BitOr for u64 { fn bitor(self, other: u64) -> u64 { self | other } } + +impl BitOr for i8 { fn bitor(self, other: i8) -> i8 { self | other } } +impl BitOr for i16 { fn bitor(self, other: i16) -> i16 { self | other } } +impl BitOr for i32 { fn bitor(self, other: i32) -> i32 { self | other } } +impl BitOr for i64 { fn bitor(self, other: i64) -> i64 { self | other } } + +trait BitAnd { + fn bitand(self, other: Self) -> Self; } -impl Eq for (A, B, C) where A: Eq, B: Eq, C: Eq { - fn eq(self, other: (A, B, C)) -> bool { - self.0.eq(other.0) & self.1.eq(other.1) & self.2.eq(other.2) - } +impl BitAnd for bool { fn bitand(self, other: bool) -> bool { self & other } } + +impl BitAnd for u8 { fn bitand(self, other: u8) -> u8 { self & other } } +impl BitAnd for u16 { fn bitand(self, other: u16) -> u16 { self & other } } +impl BitAnd for u32 { fn bitand(self, other: u32) -> u32 { self & other } } +impl BitAnd for u64 { fn bitand(self, other: u64) -> u64 { self & other } } + +impl BitAnd for i8 { fn bitand(self, other: i8) -> i8 { self & other } } +impl BitAnd for i16 { fn bitand(self, other: i16) -> i16 { self & other } } +impl BitAnd for i32 { fn bitand(self, other: i32) -> i32 { self & other } } +impl BitAnd for i64 { fn bitand(self, other: i64) -> i64 { self & other } } + +trait BitXor { + fn bitxor(self, other: Self) -> Self; } -impl Eq for (A, B, C, D) where A: Eq, B: Eq, C: Eq, D: Eq { - fn eq(self, other: (A, B, C, D)) -> bool { - self.0.eq(other.0) & self.1.eq(other.1) & self.2.eq(other.2) & self.3.eq(other.3) - } +impl BitXor for bool { fn bitxor(self, other: bool) -> bool { self ^ other } } + +impl BitXor for u8 { fn bitxor(self, other: u8) -> u8 { self ^ other } } +impl BitXor for u16 { fn bitxor(self, other: u16) -> u16 { self ^ other } } +impl BitXor for u32 { fn bitxor(self, other: u32) -> u32 { self ^ other } } +impl BitXor for u64 { fn bitxor(self, other: u64) -> u64 { self ^ other } } + +impl BitXor for i8 { fn bitxor(self, other: i8) -> i8 { self ^ other } } +impl BitXor for i16 { fn bitxor(self, other: i16) -> i16 { self ^ other } } +impl BitXor for i32 { fn bitxor(self, other: i32) -> i32 { self ^ other } } +impl BitXor for i64 { fn bitxor(self, other: i64) -> i64 { self ^ other } } + +trait Shl { + fn shl(self, other: Self) -> Self; } -impl Eq for (A, B, C, D, E) where A: Eq, B: Eq, C: Eq, D: Eq, E: Eq { - fn eq(self, other: (A, B, C, D, E)) -> bool { - self.0.eq(other.0) & self.1.eq(other.1) & self.2.eq(other.2) & self.3.eq(other.3) & self.4.eq(other.4) - } +impl Shl for u8 { fn shl(self, other: u8) -> u8 { self << other } } +impl Shl for u16 { fn shl(self, other: u16) -> u16 { self << other } } +impl Shl for u32 { fn shl(self, other: u32) -> u32 { self << other } } +impl Shl for u64 { fn shl(self, other: u64) -> u64 { self << other } } + +// Bit shifting is not currently supported for signed integer types +// impl Shl for i8 { fn shl(self, other: i8) -> i8 { self << other } } +// impl Shl for i16 { fn shl(self, other: i16) -> i16 { self << other } } +// impl Shl for i32 { fn shl(self, other: i32) -> i32 { self << other } } +// impl Shl for i64 { fn shl(self, other: i64) -> i64 { self << other } } + +trait Shr { + fn shr(self, other: Self) -> Self; } + +impl Shr for u8 { fn shr(self, other: u8) -> u8 { self >> other } } +impl Shr for u16 { fn shr(self, other: u16) -> u16 { self >> other } } +impl Shr for u32 { fn shr(self, other: u32) -> u32 { self >> other } } +impl Shr for u64 { fn shr(self, other: u64) -> u64 { self >> other } } + +// Bit shifting is not currently supported for signed integer types +// impl Shr for i8 { fn shr(self, other: i8) -> i8 { self >> other } } +// impl Shr for i16 { fn shr(self, other: i16) -> i16 { self >> other } } +// impl Shr for i32 { fn shr(self, other: i32) -> i32 { self >> other } } +// impl Shr for i64 { fn shr(self, other: i64) -> i64 { self >> other } } diff --git a/noir/noir_stdlib/src/prelude.nr b/noir/noir_stdlib/src/prelude.nr index f33a1f7e7f1..56020509122 100644 --- a/noir/noir_stdlib/src/prelude.nr +++ b/noir/noir_stdlib/src/prelude.nr @@ -1,3 +1,5 @@ use crate::collections::vec::Vec; use crate::option::Option; use crate::{print, println, assert_constant}; +use crate::cmp::{Eq, Ord}; +use crate::default::Default; diff --git a/noir/test_programs/acir_artifacts/double_verify_proof/target/acir.gz b/noir/test_programs/acir_artifacts/double_verify_proof/target/acir.gz deleted file mode 100644 index 31ff852f775fd90f01e16b25100e391e964dd19d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1186 zcma*j{Zo<$0KjqU>3SQZ!TY+CxoUcOk4PddK$hIhGx+$ z@dby9iCQ=GQdrI#r=iUfZ4f>WHKlVi(P+QO445w_&KS5LUi}Yd6 zX;(hhC7iUqKS8(^H1)2`YwVYR?w8M=i%A<<;ZuFP6z!O2YH>%jq6ed2fC$tqZ(n(a z;`;{uJLr^8mIn=<5O*ai?j`A8K~dRRzH&KN(TnImihK7eq?ya!CHGppx>xs?bhXM@ zt0zl(Tf3K5cg>Lfr(;!Qe}rmGP{sNie2g)wm_8)i2&v-w3{J+=DhdZVYfMtnIED~o zoQnDeDK#=w@lLDe8KScmWpd%AIO|BJ03N+fi!*uipf(-LbdtwlYP-NPX6xs?ataek zHYW0DOv4EyjYs_*$^0lhZYVN^@t6_X&rELSln7nCDbP$G*7}=#%+RooV+t`d%C*iW zoEa?FF-<4TsoUBJW!09<*TgZCw&87K`8P7T^2QHv;S3{$Og25y-s>AWr491VQ*{%*--Rpp)-vud8tw#uBaC|Hk4#o zDVc3|;@-Z9_zopLSSg&9hHx9#k+7lU_bX*#?WkiP@>9?*o+;~qgi)U#N6+I4Gbk%jdWVoCMUs!1PnS=gOhfe*(X z655~y>7&y5$2sTleprmq3RRVU>%hW3iuK##96bI2c8_qE;IiAp$~k{VW?gmUsA&rH@%x_BL~*1YOC4K}J{sUT zMr^XMW2&hIXuta&3Sz^EZL&J%4EjiJ7B-F8!gq{UQ-aW*_ucD>w<2vxYO+1rHNc%u z{MN!YLOo7kNd_jtKY%<0PyNo}=}s1J;HQ?V!NC^6paT3AxPTnfh!dweGHC;ypblUo zftqG`GA+}0pdNezoJRsQO^K7PnR^HL;6vaX;;(rSU0wSiqng?%(x@|L`!9e>073w2 zgM;RBhN_a>+!}LQKNP9BROA1~k^iwFP!00002|E-*3uw_Y?hPyu7wv9A~-`KV%v2twNwr$(CZQHhuXPu6i z*yrEQm|NWyU484`Jc)%DxwpA>+vS)3=XtmP@iy1J&r|Fu!-nOYjM}TXRa0BCe2Ol) z>{D9BX46coq0BZz8@A6V#oXaAmihnvDtElOvErR>a9(%5xv}hBZf-1d*Bcz`ZZ|mA z-EVNLd)(ky_q@Te?sbD>-TMZ|y3Y-cb>ABt>wY&l*8OjAtOwlSSP#6xu^x1TV?Fo= z$9l*Oj`h&fV^wE;9m)14wuP|F8ab63O%676Nu@k&Z(cT;AwgWOvC!;jX2Pg`MXAj_ z{Pb9H4mW*XDVE{8o5?H}axl`Z?lVLfJZzf55@R?&(dMQmD&MVjG~E$*?W$8vKn$sAj5))fQQXY?+eclQ;Hjlu)`1>8bTxOj^_TQrP`of~9OcWp7?N6X{dP=T&Dg zmMrSFElU)S^3?P36-l3-e9Eq6@0T+`?pAXORIjjNFi+b%$H`7dwtD3_S*0tHg9v35 zTYGu>`8vVWsD8$b-dky|F6x%G(fSejw@&kny?LdRqhZ!+>KWC4jx;*Tt=~;XA%jRk|losW^$jHV8&XH=^!}8DG=tC)5+f4BE z9%7Bg6I&UH*5Pt%xy|S7&8x0vLnFBd=V)d6)_Y$Zfpexk_w-l<_wX(jDUOhj$w=(6 z+cLT+jVoEq-n<$?5keeV;OVmxr}O%M(rk(L^jL*YPI+0FV(6n68g`2&h31oAiEZp| zFCW9C-vm$0+u>yq3@kYHB(C@1&(|+sgeKO58y)~bq4&&H%IeFy=l}v?Z@)~cX3an? zojq8}J&Q}-7YhCy@$^_0=G`dDoq`{2r@*^zr4%;3R-YVwb1xr5VK^~l_sAnrSpCd} zowqCbkx!46NXVRR4GOvZL=Q+a=ayV-(mLsB%iew^8rv+@ah$AhwM@}Z(JV7VHuZe{ zqGOEGLL$JO9fsoNS0-yLVC}(zoWds-Z=uCO!H_pfoRClJI*kUI(nw|zu@`$)#;4Ouw%s2n6ojl zn!KflQ=yek_Ap{B0re>qHaZ%SC-VD?obN*h0Pbruoz4+B$3|;hgqDfDde@h{=x%)F z-hexG3N5XyV)zZftd4g1V&`#Ax1_Tzb*47UX?gG2nt79FTUBgK@@;Ezvc)& zOfua$O0-X5*7y<|e29wWY5Zh&4>d2Xv7OcgMs%+7F)z7yj-;^S@8p1jg{DzKmsP*8 ze26b~eykKDjoPGcsiRJ@0LQ>up@6Ng;O$HA?bm2yEh-f`jtuG}P<4gIYN=@cWzNS} zq*TCaYn(n1R%zPc0oP=ZgiE!3*}eTTD7a`USgog&t&T zkqKm-Q4cbfs7(aUXI_3U?kPHT7zJL|N+nXsO9xk`d3*B;=j#N3k~-5$&1DvdwaG)vIE;@&w5bD};E2X#c;E@34*l%lS<;wzo6tH8Ge9U?F(TRl)EGOZWbOS0!V z?Re$AbA%G~)=Z9II(WQ^k!I8zF=A4Fl?^;$MY0G)vG*vU8@O>Y)xrS1AClaw?ww<3 zLdPcP>5xP$csS2dskJ`1@@nVj1>|LIH6+z1==y|#!)#bkaEe=);nnx%RmSq-Tptig zNU~6KglI7+JQ82ye0+fh3qna5y4mnF3t3}64&+IaQx0EqQ#T1iVz7$?wHgS)gt6LU z11N{Db-vzb`gB~Y2k<1w5G4WWLmZsX_{w_MYj4eqHkfLJ!W2j;$Tgq`Q2POLtTnvO z`LRl&S^J*H9rqdzOKM*MZoHb9^UT*{6Q|4D7 zpa$wvqT+ym6UC$Eo}+_7zWy$MT&Ppb20)KQ=uyE&P$h+wtZ#6h4G+0w^NQ$b84%1d&Pwy{AORMS$^#dDqw{?`K-9a-5QP-bLKFWC8I(a% zK{Io_@n&6(C_%`Nu~;aU2S!6Ys!)My80~yMqM13gLP(TQ0ufth3aULZm?j0}V{cw- z1=5ghLw50!(F~CRphz7o?8o`BdUir6WeZ`C6G>XkD$J8q)WVNmx`}fDI23t`A(V>F z<{2_09Y)xU$a$Uyf+sqIpaOxNhym5sx^db8;NPyTy?N2m20u-W@!&3(V=9Zx{&JpFs1ErA~J!m41h^!DPX4BI!-|V~| zotA0`D6A8~7nxNBcS6F=EXatV#5do>UkZ<;g7YJB9Jdwqj zoI@C>+!?QfeL~Fi4XF8+d;8TP$7tf!=@~}227KOmzFd1~Z*_iN%QfThB|>3xN$lvY z55O<`M*)dq-+EKWWJXgpDl`T9e??G}8OE5hqI;W-bMz`mR|4N&>K0mldc0x1 z?Ot6KX-VbOjH*Vpw%~#VRS3{56W{K9+=GuSolByf9?(USEAFB|JFT=am$%>BFR)0Z z=MG05?bGy$jD`_2dlxs};k<5w>H@k)D0ewvf>~Evwib5E2{^juc*jj2h7OAs!8YN5 zA>~jv`gGZ;U>@G-d>>)}I}K@!{NH`D5f18Lw9Jj}4YYXY&HaM30L=#zfi_kpw-wup zK1l<9m-F*N{r8SMY^=%=VJKG>sZSUXC6O`TbrVOM2@Dk5ktjF=i}yt)*&t9G|8D2U z3Za1reQR{-fklMal!3w?fCxqhNWJ@}&V;oJmIm;GD(H*Dc^12H=|(BN`s4zfzqq2K} z0@@6?wjfXP^1U~GD+e%)n1;Z(tV*r0QE34cXCLo#daUe2Zmf@}!m=g1%QDLU2newh z$GYBkvu{HenOcB&XVy$R0!u)&)VZDSw-H};rUJ6Pu5-W*Cu1;&-e{)_D znW^KaQJgQJC)m0utuSO;Kj8d1x`=&Ahix=%RVsSbMPjakGAI_#e&D7rs)ZGS-H(bc zhhC^~vw_DjRLlpR?~egZLx~U`kwbMs-ljEy^1&(CMYQk_-rFxqvQe&qnp{G*-6G$k zJVw6W!iSviL&gULoLGgj9Ffd1tPpII?jj@*q3%OB@x7(W&?{zx2I2o^s2W`ku>!pR zVds5O7zRy(n7}zikW(QhX`0d^K}^lf58vdEwC=1r$D*W$+9#SQ_yidRaQG1$KA&nj z5)9lit88zv=LT#jX#2pJl#krS(F@@VEqNq@iz1t7yxq)JP#8YyeB9&Tz@# zB(2W*9zS+dCr1`FtPN=?0?H*>ln?5^2b#E#JFf$D5c$Hwv?!}!eYgfjr0LWMWe>^t zkKgpIa250l42EBUY2i6_B0iOX3Yk7(L(hRIPGn1v1vA1DE$o;>U{wPU{x&{wQ;%Nw zjm@eZhJMmBGIA<8ky4ADY1 z_81FBQVRvt>HMdh*Q1e?SYvN0m*`z(N*5j0mFCgT!4~;bcl|V=NUsyL1A>D(BpkM+ z9Nx$9Y3F%|bT7b3ZwuRxkVY|++)Np1ChUckKYdfjtVC62!PBYptfq_-m#B%to^Kn1K0o|}% zu+}K^dC|!|9tD!9$56K6vp4y6UKWzQ=thAI0-snEL%mf&WBHupSY92wtOBeWcVNh# zO%f`*5$H53Hi!mSJ50TqGw}~IaZk~~T5hNTpL-a~l2wJ4Of7~lp`KC&tXg{z7vxKy z9iI~gzyuA#8jXVPF;(CT6}=&T-f>=$wvM14@#fLeiRV_3S1>v#Fp!o?P0)Py3ghq~ z(Z_KhXUAWliiv#wajby28BOuXBPw_DW3i%x!`3IQdf+JthNCkc0HIOeB1QNEqPMuBEaNA}uJk3??|8|Kj6VAT?yI$j@{HW^6ant49sSuNErjK}&^3 zEhNl{}ar(03 zIaW~~O-F|i;>r*9AB(UP{+P~9Z~EZF2pT&vHVfL7-tWw2bq*6w`sK%Y1r;44Ep@7M z7MUQ8Lg?{QmuLce?L3$W7`6@D9)d<}n~ODyB>fe~dC}NGdwe$ttBc}Vn5)I3icX6< z%kgOK#$T*ZN>63a7HAW;ZA9u<9`~Nl#(qLu0j^Qoz(<7&i1$_qfCYEKHJjZcC1B=E zBOn}z8z@s-c`;vgb&g?JkC{iOX%K0d*(Rms60mRNa@z#Dp;!!^5J-)9c?lvS%|ms+ z`nX?6cuYTQ@}Sg^%1GF8>g^B|8@+B!ywN5I4n~QL-z=6~&GZ0PpTFj~UtRqHLtwIR z-6~K+ZG$I6dY#? zQHIQp!V8w>a8XZ?C-fRJoJeI~+rI9&#_BHjfs5Yho%}Tg7F61U1pj7uX+b<9hruD z==6r81c$aB*Q6(cO!&c-hTbx@AUQUiEvMFC2`IL%*?-e#u zSM(5C!%{V)TJ%M7oI?dG5y9wYF5rK;i{poy1K%!`?G#q|_8mN75#PvaYsd?uw^nh} zAb*Hykx0tlu{GAb2-Wa8a5_aQ{xsVnoAYzeBdL0o@BGAbr=t)=)={;Kwu!U|$RSTAhRKRLna})^ja~R5E zgr9TD2qgdvm;pW-4t#85>iwpwY$&GhJ&rXOU86z-q|nDGzzvydskdPv*a-eK-(2wk zfM1Z74xiz>tEL{RQTV>A`UU8qlw+zC#7eX+un5K_u{6*R7}BU|+!fMM2O3kvS{>x8 zN5)F#`>*P14fO6pix^q!Jxw*D@}|VuDXT^<#N^8aYjyM_KB-ctN?52B;Lsnq!g8P` zVh8VavZ)c}E@;yyr~nlJ^2sT~$yBxn;4v$>=;}eQ+-AVT4<6Up^fN914qL>+&r#;5 zL1i>x8frWh5eJh`%RbVTl`O19yzp_DAo+)``s1bH_o{+;oLHcN5M(ADq($4p{+tR` z78_O=rV5n@Nq0!mrA(k7z8*TwwhFt-eyn(4L*m;Fi3+q`8J|1g^9O- z{za88c~$o#$Gx`{PEenAHN8WP!ppx%*vtrreNY}%8R7#?)T=&FMpnpYtU*0lIpUwEGO^pvSN~s3MSvorCuoM(u&ba}CfBN)T9K7hWTc8B6X`zvNq!>{t za%H_P=YD2$UKt@80a+t#+OBl1x=_&a@j-s}wi|nH&i~w14pHJN_AbB-5CBa9PAa>A z4L|?|rFUY&!C@+BN0~2WmbRR5AH|gQ^XK;qz@IAYA3c?lia%=PW zK*ixF1^5;kzhINpD7_McN%xEAHRRO5|40DH;+3v*Gdu8{Dp0Xi ze*L_b){Wl9h06d|eWm0FZMIWgWe+bf{x@#hd`7C_?#&YpA*W)@FQ_)exL;ifC2D1<0DF)Ye7Di{8&czWM$09_z3u0>o*z`XtLjNr5Gx0i(l% zTg0b7*z^M6p_e;Nno6#rD7uN10$QCe{Ned?gwyJMw5C`JFKkS`Sqm;Pny8fS{zsef z1u)hprHFN-_hK+NIapGGZX5o1!_z@eqh&A}N(if?pz4hPR5>S*h>`zfbH6~Y7>7{7 zIx_GqXgfg@gC-UKr>Dm%bPm`u53N)Y&*NAw^>|H z;V;j7U=b*(I}~2X^tm^E9?ub6C|z;=_4ztM z)LV!b$Xp+nvSm4nEGUe5O;Nr4Z*JSXivc*X2%y%`iz7uMp9p zx>cg5u>RAgXJ{jSEvHtnRmTb}8ZjF8923m^^LY;+`w~QgSn81_QJb9Z5r1Om@M91~ z|7CAppl3qCLANh=6y4H0NJ!*p>pA}G`S?QmDi@oAbc9V|iU@yb`Xp+k?0frfd+`Mp zCy$W+!2XhvS<$SZ@%oHXl7BxxFNcf|sG;eDS%HI>hwuQxF)~+tB>Nw`brpRUwTPl` zLh$&bg=tPiwKm&-Zs1iQCFEW`6&d=-913DpS+>wA=!JRO$Sz0PZ17?+Cv|o&Hc}n z27`2&t9Iq=41JZ_JC|vC!CoI;0>dZ7Po(^yR}2JUQ>iklw>1E>pfBzEzj;wUkaT_1 zEBjNeE_5#pIgZFD7HDk#9!2=3Ocqi#a(cfBbX_->2H67jlV zt&!4J((PG8FLytRPSMAeSU|ngoQ-P6=s_^Op?Qs{ zk#NKye1ZfS(Pwn)8?HHe{2gyyX~-a{=4rwWIK)Qart*)^sa(u|Pl477h`)&qZ}lb- z8W@@|;w>L}r{fy2aX{}X=5a3S6j?)(0T)#!wZabH>&? zAJ>TXK;}d95s$i3U(S!2)PawHRyZwTf}d=_s_Y^a2VVr;(3y*&HSTg;BgcdqY(<}x zQC|VC(Grqa{d4H!=$G`g5WQAyP6kK>Ol_GQ$-07S{D`_;bzo&a**nZWrSFm z8$2WNO<0Aj#Gno%Ki>VgUkwwa&V@LZ1_E-+k<}0)Y=&gd7Fg7M^hu&wEty1y@E06+ z*4I3=dmP8oN9qubMR_Ko4H=@3g#x|x=^hhs-y;($I6QcCWLBh#v0`S9;5z+1kNbt1 zjk1-8bV!te=69nv#zij(W1_|Z2*Hmb7}P#OC_=VXfk$uYWNYqq+%JRzd}8*8-xL#e zEn}lx9?(+Ji)Igi9;`nxEK&geE$9$oTAiX$?|ob&7JkP_)GJSf7d-+aj0nd`!0>nI zivfI8m63*lYu63Qz==~PNbm0Na~vxYMEZ^deaL~()`!{kT@&yWeTkB^Nd+?w0twN8 zDNL&`a-luyrFDYxzSr~8lgWCYni7JZP7Yhq#}tSmy|6$Jg8yW|GvctJ^NxT>3bvL; z)xF=Krxg$@mW1f~Dx`&`y!1^9)Kzp>@J4*#aje=Fb_#fbcwoQuB7#;zvITTSVb`0g zu!N*nnkFuw2j{4hPJ1G7SLcS(l=H;DIW1@{KVDzL*adrXfdhkp{BILyv1*2uP^+wAFEW z+!5$eF;)p8u=vCwvNT6nGTjgk$^JzJ7{ntI77hAgSGf)Dz~ky_%#*~XG<^+C%WPtD zKtBOYW_{r$cDGi@q;In5vM?d&(*EJM&f^JS62cyh1QlSaDNHJPCEeO6%LtFy7;C98 z$g;fN7tv;gO^bKywR))2_sH{OSqdX2Egb|a4n9Tg*9X(OUX@e-Z9M9jLz60lBkpWe zbi_HJ%o6LQOO>I|_Xx%bYJif4Mt>Y^s=;OSIL9FW8loOCWG*!of{4{E= z;c`vvb&6yL7}DAG7C)7hibiOFQu8n4gJ`kqwkYW!}>>hiZ7c5xcR;4d~Q>=}Yu`rIY>N@@gN~qq_qO=1r1dm86A0e9H zj=0BHc-+}O@&g!%sv-JmYM4;d2G)AkE2q_KOMU#+8uis8 zG!L99+;38&HhL_%JsSH#8iH`W2EK~zY#3H1B#o0DOgY&nTw$i9uUViL>)YAr9$BsN zmeIJ>0W9hvr|hvxl)A9H`idI<2G7k_)aoZ5V?v>g-o>WjTqF|oF%WVChEVT}BD1Ma z!zOSm1N)*2pfzy#4U8dAI?fB6;lNu0R$r=K8@=&>kgW<(R!5n`$_tprd}34cRqvw! zCnMc))+b-puL6(51O%~FVijjw@5^4^&cT-HgF0ySfY~;Qw}A}FeyB5?W$>T)lv`JK zM2ec&6t>{?*}X^&@`_SfIeLcIIxqV65jDU>tuHKFNCPKEe%q&>&PPOTfF`p0q=%^X zHT0SsXhLW&`oax)+O0jUV-+H-G1m$`UYEzP0d-)Wx#rVPkF`9SmF<99*0r=eB2?5B z^%W9DU$r=%acfWG(|4pyDt!X*>uXBLb|g%FvfPJfo*!#T$3*6=lRoPK&;X7w>bm_# zreEV(d-Lj(q6WkD`VtUoVkR{fjj-vHEYChYmcHc)KS*zFIlW^F^0A6v>E$#O@MC_? q-n>XZv^kI%$X_l!Zs~10y@O@Kb8p?4XK2g~j`hD6<9 bool; +trait MyEq { + fn my_eq(self, other: Self) -> bool; } -impl Eq for [T; 3] where T: Eq { - fn eq(self, other: Self) -> bool { +impl MyEq for [T; 3] where T: MyEq { + fn my_eq(self, other: Self) -> bool { let mut ret = true; for i in 0 .. self.len() { - ret &= self[i].eq(other[i]); + ret &= self[i].my_eq(other[i]); } ret } } -impl Eq for Field { - fn eq(self, other: Field) -> bool { +impl MyEq for Field { + fn my_eq(self, other: Field) -> bool { self == other } } diff --git a/noir/test_programs/compile_success_empty/trait_default_implementation/src/main.nr b/noir/test_programs/compile_success_empty/trait_default_implementation/src/main.nr index e1f29ce3f48..2f5bff8c40c 100644 --- a/noir/test_programs/compile_success_empty/trait_default_implementation/src/main.nr +++ b/noir/test_programs/compile_success_empty/trait_default_implementation/src/main.nr @@ -1,12 +1,11 @@ use dep::std; -trait Default { - fn default(x: Field, y: Field) -> Self; +trait MyDefault { + fn my_default(x: Field, y: Field) -> Self; fn method2(x: Field) -> Field { - x + x } - } struct Foo { @@ -14,8 +13,8 @@ struct Foo { array: [Field; 2], } -impl Default for Foo { - fn default(x: Field,y: Field) -> Self { +impl MyDefault for Foo { + fn my_default(x: Field,y: Field) -> Self { Self { bar: x, array: [x,y] } } } diff --git a/noir/test_programs/compile_success_empty/trait_override_implementation/src/main.nr b/noir/test_programs/compile_success_empty/trait_override_implementation/src/main.nr index a385efc63fd..85528291870 100644 --- a/noir/test_programs/compile_success_empty/trait_override_implementation/src/main.nr +++ b/noir/test_programs/compile_success_empty/trait_override_implementation/src/main.nr @@ -1,7 +1,7 @@ use dep::std; -trait Default { - fn default(x: Field, y: Field) -> Self; +trait MyDefault { + fn my_default(x: Field, y: Field) -> Self; fn method2(x: Field) -> Field { x @@ -13,8 +13,8 @@ struct Foo { array: [Field; 2], } -impl Default for Foo { - fn default(x: Field,y: Field) -> Self { +impl MyDefault for Foo { + fn my_default(x: Field,y: Field) -> Self { Self { bar: x, array: [x,y] } } @@ -25,18 +25,18 @@ impl Default for Foo { trait F { fn f1(self) -> Field; - fn f2(self) -> Field { 2 } - fn f3(self) -> Field { 3 } - fn f4(self) -> Field { 4 } - fn f5(self) -> Field { 5 } + fn f2(_self: Self) -> Field { 2 } + fn f3(_self: Self) -> Field { 3 } + fn f4(_self: Self) -> Field { 4 } + fn f5(_self: Self) -> Field { 5 } } struct Bar {} impl F for Bar { - fn f5(self) -> Field { 50 } - fn f1(self) -> Field { 10 } - fn f3(self) -> Field { 30 } + fn f5(_self: Self) -> Field { 50 } + fn f1(_self: Self) -> Field { 10 } + fn f3(_self: Self) -> Field { 30 } } // Impls on mutable references are temporarily disabled // impl F for &mut Bar { diff --git a/noir/test_programs/compile_success_empty/traits/src/main.nr b/noir/test_programs/compile_success_empty/traits/src/main.nr index 784ff01a883..ed804559fed 100644 --- a/noir/test_programs/compile_success_empty/traits/src/main.nr +++ b/noir/test_programs/compile_success_empty/traits/src/main.nr @@ -1,7 +1,7 @@ use dep::std; -trait Default { - fn default(x: Field, y: Field) -> Self; +trait MyDefault { + fn my_default(x: Field, y: Field) -> Self; } struct Foo { @@ -9,13 +9,13 @@ struct Foo { array: [Field; 2], } -impl Default for Foo { - fn default(x: Field,y: Field) -> Self { +impl MyDefault for Foo { + fn my_default(x: Field,y: Field) -> Self { Self { bar: x, array: [x,y] } } } fn main(x: Field, y: Field) { - let first = Foo::default(x, y); + let first = Foo::my_default(x, y); assert(first.bar == x); } diff --git a/noir/test_programs/execution_success/bit_shifts_runtime/src/main.nr b/noir/test_programs/execution_success/bit_shifts_runtime/src/main.nr index a2c873a7e7f..33d68765598 100644 --- a/noir/test_programs/execution_success/bit_shifts_runtime/src/main.nr +++ b/noir/test_programs/execution_success/bit_shifts_runtime/src/main.nr @@ -5,4 +5,15 @@ fn main(x: u64, y: u64) { // runtime shifts on runtime values assert(x << y == 128); assert(x >> y == 32); + + // Bit-shift with signed integers + let mut a :i8 = y as i8; + let mut b: i8 = x as i8; + assert(b << 1 == -128); + assert(b >> 2 == 16); + assert(b >> a == 32); + a = -a; + assert(a << 7 == -128); + assert(a << -a == -2); + } diff --git a/noir/test_programs/execution_success/brillig_ecdsa/Nargo.toml b/noir/test_programs/execution_success/brillig_array_eq/Nargo.toml similarity index 67% rename from noir/test_programs/execution_success/brillig_ecdsa/Nargo.toml rename to noir/test_programs/execution_success/brillig_array_eq/Nargo.toml index 972dd9ce93b..62ce392f96b 100644 --- a/noir/test_programs/execution_success/brillig_ecdsa/Nargo.toml +++ b/noir/test_programs/execution_success/brillig_array_eq/Nargo.toml @@ -1,5 +1,5 @@ [package] -name = "brillig_ecdsa" +name = "brillig_array_eq" type = "bin" authors = [""] diff --git a/noir/test_programs/execution_success/brillig_array_eq/Prover.toml b/noir/test_programs/execution_success/brillig_array_eq/Prover.toml new file mode 100644 index 00000000000..ecfed7de213 --- /dev/null +++ b/noir/test_programs/execution_success/brillig_array_eq/Prover.toml @@ -0,0 +1,2 @@ +a = [77,75,108,209,54,16,50,202,155,210,174,185,217,0,170,77,69,217,234,216,10,201,66,51,116,196,81,167,37,77,7,102] +b = [77,75,108,209,54,16,50,202,155,210,174,185,217,0,170,77,69,217,234,216,10,201,66,51,116,196,81,167,37,77,7,102] diff --git a/noir/test_programs/execution_success/brillig_array_eq/src/main.nr b/noir/test_programs/execution_success/brillig_array_eq/src/main.nr new file mode 100644 index 00000000000..90f631dbed8 --- /dev/null +++ b/noir/test_programs/execution_success/brillig_array_eq/src/main.nr @@ -0,0 +1,4 @@ +// Simple example of checking where two arrays are equal +unconstrained fn main(a: [Field; 32], b: [Field; 32]) { + assert(a == b); +} diff --git a/noir/test_programs/execution_success/brillig_ecdsa_secp256k1/Nargo.toml b/noir/test_programs/execution_success/brillig_ecdsa_secp256k1/Nargo.toml new file mode 100644 index 00000000000..495a49f2247 --- /dev/null +++ b/noir/test_programs/execution_success/brillig_ecdsa_secp256k1/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "brillig_ecdsa_secp256k1" +type = "bin" +authors = [""] + +[dependencies] diff --git a/noir/test_programs/execution_success/brillig_ecdsa/Prover.toml b/noir/test_programs/execution_success/brillig_ecdsa_secp256k1/Prover.toml similarity index 100% rename from noir/test_programs/execution_success/brillig_ecdsa/Prover.toml rename to noir/test_programs/execution_success/brillig_ecdsa_secp256k1/Prover.toml diff --git a/noir/test_programs/execution_success/brillig_ecdsa/src/main.nr b/noir/test_programs/execution_success/brillig_ecdsa_secp256k1/src/main.nr similarity index 52% rename from noir/test_programs/execution_success/brillig_ecdsa/src/main.nr rename to noir/test_programs/execution_success/brillig_ecdsa_secp256k1/src/main.nr index 23f017aa336..5d84d885567 100644 --- a/noir/test_programs/execution_success/brillig_ecdsa/src/main.nr +++ b/noir/test_programs/execution_success/brillig_ecdsa_secp256k1/src/main.nr @@ -2,10 +2,15 @@ use dep::std; // Tests a very simple program. // // The features being tested is ecdsa in brillig -fn main(hashed_message: [u8;32], pub_key_x: [u8;32], pub_key_y: [u8;32], signature: [u8;64]) { +fn main(hashed_message: [u8; 32], pub_key_x: [u8; 32], pub_key_y: [u8; 32], signature: [u8; 64]) { assert(ecdsa(hashed_message, pub_key_x, pub_key_y, signature)); } -unconstrained fn ecdsa(hashed_message: [u8;32], pub_key_x: [u8;32], pub_key_y: [u8;32], signature: [u8;64]) -> bool { +unconstrained fn ecdsa( + hashed_message: [u8; 32], + pub_key_x: [u8; 32], + pub_key_y: [u8; 32], + signature: [u8; 64] +) -> bool { std::ecdsa_secp256k1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message) } diff --git a/noir/test_programs/execution_success/brillig_ecdsa_secp256r1/Nargo.toml b/noir/test_programs/execution_success/brillig_ecdsa_secp256r1/Nargo.toml new file mode 100644 index 00000000000..0a71e782104 --- /dev/null +++ b/noir/test_programs/execution_success/brillig_ecdsa_secp256r1/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "brillig_ecdsa_secp256r1" +type = "bin" +authors = [""] + +[dependencies] diff --git a/noir/test_programs/execution_success/brillig_ecdsa_secp256r1/Prover.toml b/noir/test_programs/execution_success/brillig_ecdsa_secp256r1/Prover.toml new file mode 100644 index 00000000000..a45f799877b --- /dev/null +++ b/noir/test_programs/execution_success/brillig_ecdsa_secp256r1/Prover.toml @@ -0,0 +1,20 @@ +hashed_message = [ + 84, 112, 91, 163, 186, 175, 219, 223, 186, 140, 95, 154, 112, 247, 168, 155, 238, 152, + 217, 6, 181, 62, 49, 7, 77, 167, 186, 236, 220, 13, 169, 173, +] +pub_key_x = [ + 85, 15, 71, 16, 3, 243, 223, 151, 195, 223, 80, 106, 199, 151, 246, 114, 31, 177, 161, + 251, 123, 143, 111, 131, 210, 36, 73, 138, 101, 200, 142, 36, +] +pub_key_y = [ + 19, 96, 147, 215, 1, 46, 80, 154, 115, 113, 92, 189, 11, 0, 163, 204, 15, 244, 181, + 192, 27, 63, 250, 25, 106, 177, 251, 50, 112, 54, 184, 230, +] +signature = [ + 44, 112, 168, 208, 132, 182, 43, 252, 92, 224, 54, 65, 202, 249, 247, 42, + 212, 218, 140, 129, 191, 230, 236, 148, 135, 187, 94, 27, 239, 98, 161, 50, + 24, 173, 158, 226, 158, 175, 53, 31, 220, 80, 241, 82, 12, 66, 94, 155, + 144, 138, 7, 39, 139, 67, 176, 236, 123, 135, 39, 120, 193, 78, 7, 132 +] + + diff --git a/noir/test_programs/execution_success/brillig_ecdsa_secp256r1/src/main.nr b/noir/test_programs/execution_success/brillig_ecdsa_secp256r1/src/main.nr new file mode 100644 index 00000000000..9da07f531aa --- /dev/null +++ b/noir/test_programs/execution_success/brillig_ecdsa_secp256r1/src/main.nr @@ -0,0 +1,16 @@ +use dep::std; +// Tests a very simple program. +// +// The features being tested is ecdsa in brillig +fn main(hashed_message: [u8; 32], pub_key_x: [u8; 32], pub_key_y: [u8; 32], signature: [u8; 64]) { + assert(ecdsa(hashed_message, pub_key_x, pub_key_y, signature)); +} + +unconstrained fn ecdsa( + hashed_message: [u8; 32], + pub_key_x: [u8; 32], + pub_key_y: [u8; 32], + signature: [u8; 64] +) -> bool { + std::ecdsa_secp256r1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message) +} diff --git a/noir/test_programs/compile_success_empty/brillig_set_slice_of_slice/Nargo.toml b/noir/test_programs/execution_success/brillig_set_slice_of_slice/Nargo.toml similarity index 100% rename from noir/test_programs/compile_success_empty/brillig_set_slice_of_slice/Nargo.toml rename to noir/test_programs/execution_success/brillig_set_slice_of_slice/Nargo.toml diff --git a/noir/test_programs/compile_success_empty/brillig_set_slice_of_slice/src/main.nr b/noir/test_programs/execution_success/brillig_set_slice_of_slice/src/main.nr similarity index 100% rename from noir/test_programs/compile_success_empty/brillig_set_slice_of_slice/src/main.nr rename to noir/test_programs/execution_success/brillig_set_slice_of_slice/src/main.nr diff --git a/noir/test_programs/compile_success_empty/brillig_to_bits/Nargo.toml b/noir/test_programs/execution_success/brillig_to_bits/Nargo.toml similarity index 100% rename from noir/test_programs/compile_success_empty/brillig_to_bits/Nargo.toml rename to noir/test_programs/execution_success/brillig_to_bits/Nargo.toml diff --git a/noir/test_programs/compile_success_empty/brillig_to_bits/src/main.nr b/noir/test_programs/execution_success/brillig_to_bits/src/main.nr similarity index 100% rename from noir/test_programs/compile_success_empty/brillig_to_bits/src/main.nr rename to noir/test_programs/execution_success/brillig_to_bits/src/main.nr diff --git a/noir/test_programs/execution_success/global_consts/src/main.nr b/noir/test_programs/execution_success/global_consts/src/main.nr index a749ec77da6..70c7a745a22 100644 --- a/noir/test_programs/execution_success/global_consts/src/main.nr +++ b/noir/test_programs/execution_success/global_consts/src/main.nr @@ -55,17 +55,17 @@ fn main( let t: [Field; T_LEN] = [N, M]; assert(t[1] == 32); - assert(15 == mysubmodule::my_helper()); + assert(15 == my_submodule::my_helper()); - let add_submodules_N = mysubmodule::N + foo::bar::N; + let add_submodules_N = my_submodule::N + foo::bar::N; assert(15 == add_submodules_N); - let add_from_bar_N = mysubmodule::N + foo::bar::from_bar(1); + let add_from_bar_N = my_submodule::N + foo::bar::from_bar(1); assert(15 == add_from_bar_N); - // Example showing an array filled with (mysubmodule::N + 2) 0's - let sugared = [0; mysubmodule::N + 2]; - assert(sugared[mysubmodule::N + 1] == 0); + // Example showing an array filled with (my_submodule::N + 2) 0's + let sugared = [0; my_submodule::N + 2]; + assert(sugared[my_submodule::N + 1] == 0); - let arr: [Field; mysubmodule::N] = [N; 10]; + let arr: [Field; my_submodule::N] = [N; 10]; assert((arr[0] == 5) & (arr[9] == 5)); foo::from_foo(d); @@ -80,7 +80,7 @@ fn arrays_neq(a: [Field; M], b: [Field; M]) { assert(a != b); } -mod mysubmodule { +mod my_submodule { global N: Field = 10; global L: Field = 50; diff --git a/noir/test_programs/execution_success/operator_overloading/Nargo.toml b/noir/test_programs/execution_success/operator_overloading/Nargo.toml new file mode 100644 index 00000000000..7f9f18ff567 --- /dev/null +++ b/noir/test_programs/execution_success/operator_overloading/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "operator_overloading" +type = "bin" +authors = [""] +compiler_version = ">=0.20.0" + +[dependencies] diff --git a/noir/test_programs/execution_success/operator_overloading/Prover.toml b/noir/test_programs/execution_success/operator_overloading/Prover.toml new file mode 100644 index 00000000000..516b7b4074c --- /dev/null +++ b/noir/test_programs/execution_success/operator_overloading/Prover.toml @@ -0,0 +1,2 @@ +x = 3 +y = 9 diff --git a/noir/test_programs/execution_success/operator_overloading/src/main.nr b/noir/test_programs/execution_success/operator_overloading/src/main.nr new file mode 100644 index 00000000000..3867531abca --- /dev/null +++ b/noir/test_programs/execution_success/operator_overloading/src/main.nr @@ -0,0 +1,154 @@ +use dep::std::ops::{ Add, Sub, Mul, Div, Rem, BitAnd, BitOr, BitXor, Shl, Shr }; +use dep::std::cmp::Ordering; + +// x = 3, y = 9 +fn main(x: u32, y: u32) { + let wx = Wrapper::new(x); + let wy = Wrapper::new(y); + + // expected x and expected y values + let ex: u32 = 3; + let ey: u32 = 9; + + assert((wx + wy).inner == ex + ey); + assert((wy - wx).inner == ey - ex); + assert((wx * wy).inner == ex * ey); + assert((wx / wy).inner == ex / ey); + assert((wx % wy).inner == ex % ey); + + assert((wx & wy).inner == (ex & ey)); + assert((wx | wy).inner == (ex | ey)); + assert((wx ^ wy).inner == (ex ^ ey)); + + assert((wy << wx).inner == (ey << ex)); + assert((wy >> wx).inner == (ey >> ex)); + + assert((wx == wy) == (ex == ey)); + assert((wx < wy) == (ex < ey)); + assert((wx <= wy) == (ex <= ey)); + assert((wx > wy) == (ex > ey)); + assert((wx >= wy) == (ex >= ey)); + assert(wx.cmp(wy) == ex.cmp(ey)); + + // Ensure operator overloading still works with more complex types + let pair_ascending = Pair { x: wx, y: wy }; + let pair_descending = Pair { x: wy, y: wx }; + + assert(pair_ascending != pair_descending); + + assert(pair_ascending < pair_descending); + assert(pair_ascending <= pair_descending); + assert(pair_descending > pair_ascending); + assert(pair_descending >= pair_ascending); + + assert(pair_ascending.cmp(pair_descending) == Ordering::less()); +} + +struct Wrapper { + inner: u32 +} + +impl Wrapper { + fn new(inner: u32) -> Self { + Wrapper { inner } + } +} + +impl Add for Wrapper { + fn add(self, other: Self) -> Self { + Wrapper::new(self.inner + other.inner) + } +} + +impl Sub for Wrapper { + fn sub(self, other: Self) -> Self { + Wrapper::new(self.inner - other.inner) + } +} + +impl Mul for Wrapper { + fn mul(self, other: Self) -> Self { + Wrapper::new(self.inner * other.inner) + } +} + +impl Div for Wrapper { + fn div(self, other: Self) -> Self { + Wrapper::new(self.inner / other.inner) + } +} + +impl Rem for Wrapper { + fn rem(self, other: Self) -> Self { + Wrapper::new(self.inner % other.inner) + } +} + +impl BitAnd for Wrapper { + fn bitand(self, other: Self) -> Self { + Wrapper::new(self.inner & other.inner) + } +} + +impl BitOr for Wrapper { + fn bitor(self, other: Self) -> Self { + Wrapper::new(self.inner | other.inner) + } +} + +impl BitXor for Wrapper { + fn bitxor(self, other: Self) -> Self { + Wrapper::new(self.inner ^ other.inner) + } +} + +impl Shl for Wrapper { + fn shl(self, other: Self) -> Self { + Wrapper::new(self.inner << other.inner) + } +} + +impl Shr for Wrapper { + fn shr(self, other: Self) -> Self { + Wrapper::new(self.inner >> other.inner) + } +} + +impl Eq for Wrapper { + fn eq(self, other: Self) -> bool { + self.inner == other.inner + } +} + +impl Ord for Wrapper { + fn cmp(self, other: Self) -> Ordering { + self.inner.cmp(other.inner) + } +} + + + + + +struct Pair { + x: Wrapper, + y: Wrapper, +} + +impl Eq for Pair { + fn eq(self, o: Self) -> bool { + (self.x == o.x) & (self.y == o.y) + } +} + +impl Ord for Pair { + fn cmp(self, o: Self) -> Ordering { + let mut result = self.x.cmp(o.x); + + if result == Ordering::equal() { + result = self.y.cmp(o.y); + } + + result + } +} diff --git a/noir/test_programs/execution_success/regression_3889/Nargo.toml b/noir/test_programs/execution_success/regression_3889/Nargo.toml new file mode 100644 index 00000000000..d212d24473f --- /dev/null +++ b/noir/test_programs/execution_success/regression_3889/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "regression_3889" +version = "0.1.0" +type = "bin" +authors = [""] + +[dependencies] diff --git a/noir/test_programs/execution_success/regression_3889/Prover.toml b/noir/test_programs/execution_success/regression_3889/Prover.toml new file mode 100644 index 00000000000..a81ab67fe3e --- /dev/null +++ b/noir/test_programs/execution_success/regression_3889/Prover.toml @@ -0,0 +1,10 @@ +[works] +a = "5" + +[fails] +a = "6" + + +[also_fails] +a = "7" + diff --git a/noir/test_programs/execution_success/regression_3889/src/main.nr b/noir/test_programs/execution_success/regression_3889/src/main.nr new file mode 100644 index 00000000000..10b8ecabee3 --- /dev/null +++ b/noir/test_programs/execution_success/regression_3889/src/main.nr @@ -0,0 +1,23 @@ +mod Foo { + struct NewType{ + a: Field, + } +} + +mod Bar { + use crate::Foo::NewType as BarStruct; + use crate::Foo::NewType; +} + +mod Baz { + struct Works { + a: Field, + } + use crate::Bar::BarStruct; + use crate::Bar::NewType; +} + + +fn main(works: Baz::Works, fails: Baz::BarStruct, also_fails: Bar::NewType) -> pub Field { + works.a + fails.a + also_fails.a +} diff --git a/noir/test_programs/execution_success/signed_comparison/Nargo.toml b/noir/test_programs/execution_success/signed_comparison/Nargo.toml new file mode 100644 index 00000000000..c8de162877b --- /dev/null +++ b/noir/test_programs/execution_success/signed_comparison/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "signed_comparison" +type = "bin" +authors = [""] + +[dependencies] diff --git a/noir/test_programs/execution_success/signed_comparison/Prover.toml b/noir/test_programs/execution_success/signed_comparison/Prover.toml new file mode 100644 index 00000000000..e0e584b7380 --- /dev/null +++ b/noir/test_programs/execution_success/signed_comparison/Prover.toml @@ -0,0 +1,3 @@ +x = "5" +y = "8" +z = "-15" diff --git a/noir/test_programs/execution_success/signed_comparison/src/main.nr b/noir/test_programs/execution_success/signed_comparison/src/main.nr new file mode 100644 index 00000000000..d020be380fb --- /dev/null +++ b/noir/test_programs/execution_success/signed_comparison/src/main.nr @@ -0,0 +1,13 @@ +use dep::std; + +fn main(mut x: i8, mut y: i8, z: i8) { + let mut s1: i8 = 5; + let mut s2: i8 = 8; + assert(-1 as i8 < 0); + assert(x < y); + assert(-x < y); + assert(-y < -x); + assert((z > x) == false); + assert(x <= s1); + assert(z < x - y - s2); +} diff --git a/noir/test_programs/execution_success/submodules/src/main.nr b/noir/test_programs/execution_success/submodules/src/main.nr index 813f3a26a20..f937af74627 100644 --- a/noir/test_programs/execution_success/submodules/src/main.nr +++ b/noir/test_programs/execution_success/submodules/src/main.nr @@ -1,11 +1,11 @@ -use mysubmodule::my_helper; +use my_submodule::my_helper; fn main(x: u1, y: u1) { my_helper(); - mysubmodule::my_bool_or(x, y); + my_submodule::my_bool_or(x, y); } -mod mysubmodule { +mod my_submodule { pub fn my_bool_or(x: u1, y: u1) { assert(x | y == 1); } diff --git a/noir/test_programs/compile_success_empty/unconstrained_empty/Nargo.toml b/noir/test_programs/execution_success/unconstrained_empty/Nargo.toml similarity index 100% rename from noir/test_programs/compile_success_empty/unconstrained_empty/Nargo.toml rename to noir/test_programs/execution_success/unconstrained_empty/Nargo.toml diff --git a/noir/test_programs/compile_success_empty/unconstrained_empty/src/main.nr b/noir/test_programs/execution_success/unconstrained_empty/src/main.nr similarity index 100% rename from noir/test_programs/compile_success_empty/unconstrained_empty/src/main.nr rename to noir/test_programs/execution_success/unconstrained_empty/src/main.nr diff --git a/noir/test_programs/noir_test_failure/should_fail_mismatch/src/main.nr b/noir/test_programs/noir_test_failure/should_fail_mismatch/src/main.nr index a677b10b0cd..253e999ce07 100644 --- a/noir/test_programs/noir_test_failure/should_fail_mismatch/src/main.nr +++ b/noir/test_programs/noir_test_failure/should_fail_mismatch/src/main.nr @@ -10,5 +10,6 @@ fn test_with_extra_space() { // The assert message has a space #[test(should_fail_with = "Not equal")] fn test_runtime_mismatch() { - assert_eq(dep::std::hash::pedersen_commitment([27])[0], 0, "Not equal "); + // We use a pedersen commitment here so that the assertion failure is only known at runtime. + assert_eq(dep::std::hash::pedersen_commitment([27]).x, 0, "Not equal "); } diff --git a/noir/tooling/backend_interface/Cargo.toml b/noir/tooling/backend_interface/Cargo.toml index 32c5d28e3b0..2d991f9ae6c 100644 --- a/noir/tooling/backend_interface/Cargo.toml +++ b/noir/tooling/backend_interface/Cargo.toml @@ -16,7 +16,7 @@ thiserror.workspace = true serde.workspace = true serde_json.workspace = true bb_abstraction_leaks.workspace = true -log.workspace = true +tracing.workspace = true tempfile.workspace = true diff --git a/noir/tooling/backend_interface/src/cli/write_vk.rs b/noir/tooling/backend_interface/src/cli/write_vk.rs index 8d4aa9cc7e3..da9fc04cbef 100644 --- a/noir/tooling/backend_interface/src/cli/write_vk.rs +++ b/noir/tooling/backend_interface/src/cli/write_vk.rs @@ -12,6 +12,7 @@ pub(crate) struct WriteVkCommand { } impl WriteVkCommand { + #[tracing::instrument(level = "trace", name = "vk_generation", skip_all)] pub(crate) fn run(self, binary_path: &Path) -> Result<(), BackendError> { let mut command = std::process::Command::new(binary_path); diff --git a/noir/tooling/backend_interface/src/lib.rs b/noir/tooling/backend_interface/src/lib.rs index 8ed164fc217..c15383086eb 100644 --- a/noir/tooling/backend_interface/src/lib.rs +++ b/noir/tooling/backend_interface/src/lib.rs @@ -12,6 +12,7 @@ pub use bb_abstraction_leaks::ACVM_BACKEND_BARRETENBERG; use bb_abstraction_leaks::BB_VERSION; use cli::VersionCommand; pub use download::download_backend; +use tracing::warn; const BACKENDS_DIR: &str = ".nargo/backends"; @@ -115,7 +116,7 @@ impl Backend { // If version doesn't match then download the correct version. Ok(version_string) => { - log::warn!("`{ACVM_BACKEND_BARRETENBERG}` version `{version_string}` is different from expected `{BB_VERSION}`. Downloading expected version..."); + warn!("`{ACVM_BACKEND_BARRETENBERG}` version `{version_string}` is different from expected `{BB_VERSION}`. Downloading expected version..."); let bb_url = std::env::var("BB_BINARY_URL") .unwrap_or_else(|_| bb_abstraction_leaks::BB_DOWNLOAD_URL.to_owned()); download_backend(&bb_url, binary_path)?; @@ -123,7 +124,7 @@ impl Backend { // If `bb` fails to report its version, then attempt to fix it by re-downloading the binary. Err(_) => { - log::warn!("Could not determine version of `{ACVM_BACKEND_BARRETENBERG}`. Downloading expected version..."); + warn!("Could not determine version of `{ACVM_BACKEND_BARRETENBERG}`. Downloading expected version..."); let bb_url = std::env::var("BB_BINARY_URL") .unwrap_or_else(|_| bb_abstraction_leaks::BB_DOWNLOAD_URL.to_owned()); download_backend(&bb_url, binary_path)?; diff --git a/noir/tooling/backend_interface/src/proof_system.rs b/noir/tooling/backend_interface/src/proof_system.rs index 01842a81da9..595cd7e2020 100644 --- a/noir/tooling/backend_interface/src/proof_system.rs +++ b/noir/tooling/backend_interface/src/proof_system.rs @@ -6,6 +6,7 @@ use acvm::acir::{circuit::Circuit, native_types::WitnessMap}; use acvm::ExpressionWidth; use acvm::FieldElement; use tempfile::tempdir; +use tracing::warn; use crate::cli::{ GatesCommand, InfoCommand, ProofAsFieldsCommand, ProveCommand, VerifyCommand, @@ -42,13 +43,14 @@ impl Backend { if let Ok(expression_width) = self.get_backend_info() { expression_width } else { - log::warn!( + warn!( "No valid backend found, ExpressionWidth defaulting to Bounded with a width of 3" ); ExpressionWidth::Bounded { width: 3 } } } + #[tracing::instrument(level = "trace", skip_all)] pub fn prove( &self, circuit: &Circuit, @@ -89,6 +91,7 @@ impl Backend { Ok(proof) } + #[tracing::instrument(level = "trace", skip_all)] pub fn verify( &self, proof: &[u8], diff --git a/noir/tooling/backend_interface/src/smart_contract.rs b/noir/tooling/backend_interface/src/smart_contract.rs index 5dac57c4072..2548079f8e3 100644 --- a/noir/tooling/backend_interface/src/smart_contract.rs +++ b/noir/tooling/backend_interface/src/smart_contract.rs @@ -47,7 +47,7 @@ mod tests { #[test] fn test_smart_contract() -> Result<(), BackendError> { let expression = &(Witness(1) + Witness(2)) - &Expression::from(Witness(3)); - let constraint = Opcode::Arithmetic(expression); + let constraint = Opcode::AssertZero(expression); let circuit = Circuit { current_witness_index: 4, diff --git a/noir/tooling/debugger/Cargo.toml b/noir/tooling/debugger/Cargo.toml index 53c71754da4..4d37f801d78 100644 --- a/noir/tooling/debugger/Cargo.toml +++ b/noir/tooling/debugger/Cargo.toml @@ -6,14 +6,26 @@ authors.workspace = true edition.workspace = true license.workspace = true -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[build-dependencies] +rustc_version = "0.4.0" +build-data.workspace = true [dependencies] acvm.workspace = true nargo.workspace = true noirc_printable_type.workspace = true noirc_errors.workspace = true +noirc_driver.workspace = true +fm.workspace = true thiserror.workspace = true codespan-reporting.workspace = true +dap.workspace = true easy-repl = "0.2.1" owo-colors = "3" +serde_json.workspace = true + +[dev-dependencies] +assert_cmd = "2.0.12" +rexpect = "0.5.0" +test-binary = "3.0.1" +tempfile.workspace = true diff --git a/noir/tooling/debugger/README.md b/noir/tooling/debugger/README.md index 964784cb730..0ec3b6f0cd4 100644 --- a/noir/tooling/debugger/README.md +++ b/noir/tooling/debugger/README.md @@ -315,25 +315,105 @@ Finished execution Upon quitting the debugger after a solved circuit, the resulting circuit witness gets saved, equivalent to what would happen if we had run the same circuit with `nargo execute`. -## VS Code extension -We're working on extending Noir's official VS Code extension so it uses the editor's debugger facilities to debug Noir programs. +# Testing experimental features -This section will soon show how to load the extension from sources, in order to test the debugger. +There's a number of features that are in active development and that can't yet be merged to the main branch for different reasons. In this section we detail what those features are and how to try them out. +## Build from experimental branch at fork -## Variable value inspection (unstable) +Build Nargo by pulling the source version from https://github.com/manastech/noir/tree/dap-with-vars. -To enable the inspection of variable values at runtime from the debugger, we're in the process of instrumenting the compiler to track and collect the necessary mappings between source code level variable names and ACIR/Brillig state. +This will result in a Nargo binary being written to `PROJECT_ROOT/target/debug/nargo`. We will use this path later, so keep it at hand or export it to a an env var. For example: -At the time of writing, there are still some parts of the language that haven't been fully instrumented, which means certain programs will crash when compiled with this. +`export NARGO_EXP=PROJECT_ROOT/target/debug/nargo` -It is however possible to try out this feature, both from the REPL and VS Code, by building Nargo from branch https://github.com/manastech/noir/tree/dap-with-vars. +## About the experimental features -We'll soon expand this section with details on how to do so for the adventurous. +There are currently 2 experimental features in the debugger: +- Variables inspection +- Stacktrace inspection -## Towards debugging contracts +NOTE: Supporting variables inspection requires extensive instrumentation of the compiler, handling all cases of variable creation, types, and value assignment. At the time of writing this README, some cases are still not supported. For example, if your program uses slices or references, this compiler version might panic when trying to compile them, or at some point during the debugger step-by-step execution. This is the main reason why this feature has not yet been merged into master. + +## Trying out REPL experimental features + +To try out these features, go through the same steps as described at the REPL Debugger section above, but instead of using `nargo debug` use `$NARGO_EXP debug` (assuming you exported your custom built Nargo binary to NARGO_EXP). + +When entering `help` on this version, you'll find two new commands: + +``` +... +stacktrace display the current stack trace +... +vars show variable values available at this point + in execution +``` + +Running `vars` will print the current variables in scope, and its current values: + +``` +At /mul_1/src/main.nr:6:5 + 1 // Test unsafe integer multiplication with overflow: 12^8 = 429 981 696 + 2 // The circuit should handle properly the growth of the bit size + 3 fn main(mut x: u32, y: u32, z: u32) { + 4 x *= y; + 5 x *= x; //144 + 6 -> x *= x; //20736 + 7 x *= x; //429 981 696 + 8 assert(x == z); + 9 } +> vars +y:UnsignedInteger { width: 32 }=Field(4), z:UnsignedInteger { width: 32 }=Field(2¹⁶×6561), x:UnsignedInteger { width: 32 }=Field(2⁴×9) +> +``` + +Running `stacktrace` will print information about the current frame in the stacktrace: + +``` +> stacktrace +Frame #0, opcode 12: EXPR [ (1, _5, _5) (-1, _6) 0 ] +At /1_mul/src/main.nr:6:5 + 1 // Test unsafe integer multiplication with overflow: 12^8 = 429 981 696 + 2 // The circuit should handle properly the growth of the bit size + 3 fn main(mut x: u32, y: u32, z: u32) { + 4 x *= y; + 5 x *= x; //144 + 6 -> x *= x; //20736 + 7 x *= x; //429 981 696 + 8 assert(x == z); + 9 } +> +``` + +## Testing the VS Code extension (experimental) + +There is a fork of the official Noir Visual Studio extension which enables the debugger in VS Code. This fork is at: https://github.com/manastech/vscode-noir/tree/dap-support. + +In this section, we'll explain how to test the VS Code Noir debugger combining that extension fork with the experimental features branch discussed above. + +1. First, get a copy of the extension source code from https://github.com/manastech/vscode-noir/tree/dap-support. + +2. Package the extension by running `npm run package`. + +3. Open the root folder of the extension on VS Code. + +4. From VS Code, press fn+F5. This will open a new VS Code window with the extension loaded from source. + +5. Go to Code -> Settings -> Extensions -> Noir Language Server. Look for the property `Nargo Path` and enter the path to the experimental build you got as a result of following the steps at [Trying out REPL experimental features](#trying-out-repl-experimental-features). + +6. At the VS Code sidebar, go to the debugger section (see screenshot). Click "Add configuration". Overwrite the `projectFolder` property with the absolute path to the Nargo project you want to debug. + +Screenshot 2023-12-18 at 14 37 38 + +7. Go to a Noir file you want to debug. Navigate again to the debug section of VS Code, and click the "play" icon. + +The debugger should now have started. Current features exposed to the debugger include different kinds of stepping interactions, variable inspection and stacktraces. At the time of writing, Brillig registers and memory are not being exposed, but they will soon be. + +![Screen Recording 2023-12-18 at 14 14 28](https://github.com/manastech/noir/assets/651693/36b4becb-953a-4158-9c5a-7a185673f54f) + +## Towards debugging contracts ### Contracts Runtime diff --git a/noir/tooling/debugger/build.rs b/noir/tooling/debugger/build.rs new file mode 100644 index 00000000000..5d14ec2bae2 --- /dev/null +++ b/noir/tooling/debugger/build.rs @@ -0,0 +1,74 @@ +use rustc_version::{version, Version}; +use std::fs::File; +use std::io::Write; +use std::path::{Path, PathBuf}; +use std::{env, fs}; + +fn check_rustc_version() { + assert!( + version().unwrap() >= Version::parse("1.71.1").unwrap(), + "The minimal supported rustc version is 1.71.1." + ); +} + +const GIT_COMMIT: &&str = &"GIT_COMMIT"; + +fn main() { + // Rebuild if the tests have changed + println!("cargo:rerun-if-changed=tests"); + + check_rustc_version(); + + // Only use build_data if the environment variable isn't set + // The environment variable is always set when working via Nix + if std::env::var(GIT_COMMIT).is_err() { + build_data::set_GIT_COMMIT(); + build_data::set_GIT_DIRTY(); + build_data::no_debug_rebuilds(); + } + + let out_dir = env::var("OUT_DIR").unwrap(); + let destination = Path::new(&out_dir).join("debug.rs"); + let mut test_file = File::create(destination).unwrap(); + + // Try to find the directory that Cargo sets when it is running; otherwise fallback to assuming the CWD + // is the root of the repository and append the crate path + let root_dir = match std::env::var("CARGO_MANIFEST_DIR") { + Ok(dir) => PathBuf::from(dir).parent().unwrap().parent().unwrap().to_path_buf(), + Err(_) => std::env::current_dir().unwrap(), + }; + let test_dir = root_dir.join("test_programs"); + + generate_debugger_tests(&mut test_file, &test_dir); +} + +fn generate_debugger_tests(test_file: &mut File, test_data_dir: &Path) { + let test_sub_dir = "execution_success"; + let test_data_dir = test_data_dir.join(test_sub_dir); + + let test_case_dirs = + fs::read_dir(test_data_dir).unwrap().flatten().filter(|c| c.path().is_dir()); + + for test_dir in test_case_dirs { + let test_name = + test_dir.file_name().into_string().expect("Directory can't be converted to string"); + if test_name.contains('-') { + panic!( + "Invalid test directory: {test_name}. Cannot include `-`, please convert to `_`" + ); + }; + let test_dir = &test_dir.path(); + + write!( + test_file, + r#" +#[test] +fn debug_{test_name}() {{ + debugger_execution_success("{test_dir}"); +}} + "#, + test_dir = test_dir.display(), + ) + .expect("Could not write templated test file."); + } +} diff --git a/noir/tooling/debugger/src/context.rs b/noir/tooling/debugger/src/context.rs index 1475827fbea..74224ce3795 100644 --- a/noir/tooling/debugger/src/context.rs +++ b/noir/tooling/debugger/src/context.rs @@ -87,6 +87,109 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { .and_then(|location| self.debug_artifact.debug_symbols[0].opcode_location(location)) } + fn get_opcodes_sizes(&self) -> Vec { + self.get_opcodes() + .iter() + .map(|opcode| match opcode { + Opcode::Brillig(brillig_block) => brillig_block.bytecode.len(), + _ => 1, + }) + .collect() + } + + /// Offsets the given location by the given number of opcodes (including + /// Brillig opcodes). If the offset would move the location outside of a + /// valid circuit location, returns None and the number of remaining + /// opcodes/instructions left which span outside the valid range in the + /// second element of the returned tuple. + pub(super) fn offset_opcode_location( + &self, + location: &Option, + mut offset: i64, + ) -> (Option, i64) { + if offset == 0 { + return (*location, 0); + } + let Some(location) = location else { + return (None, offset); + }; + + let (mut acir_index, mut brillig_index) = match location { + OpcodeLocation::Acir(acir_index) => (*acir_index, 0), + OpcodeLocation::Brillig { acir_index, brillig_index } => (*acir_index, *brillig_index), + }; + let opcode_sizes = self.get_opcodes_sizes(); + if offset > 0 { + while offset > 0 { + let opcode_size = opcode_sizes[acir_index] as i64 - brillig_index as i64; + if offset >= opcode_size { + acir_index += 1; + offset -= opcode_size; + brillig_index = 0; + } else { + brillig_index += offset as usize; + offset = 0; + } + if acir_index >= opcode_sizes.len() { + return (None, offset); + } + } + } else { + while offset < 0 { + if brillig_index > 0 { + if brillig_index > (-offset) as usize { + brillig_index -= (-offset) as usize; + offset = 0; + } else { + offset += brillig_index as i64; + brillig_index = 0; + } + } else { + if acir_index == 0 { + return (None, offset); + } + acir_index -= 1; + let opcode_size = opcode_sizes[acir_index] as i64; + if opcode_size <= -offset { + offset += opcode_size; + } else { + brillig_index = (opcode_size + offset) as usize; + offset = 0; + } + } + } + } + if brillig_index > 0 { + (Some(OpcodeLocation::Brillig { acir_index, brillig_index }), 0) + } else { + (Some(OpcodeLocation::Acir(acir_index)), 0) + } + } + + pub(super) fn render_opcode_at_location(&self, location: &Option) -> String { + let opcodes = self.get_opcodes(); + match location { + None => String::from("invalid"), + Some(OpcodeLocation::Acir(acir_index)) => { + let opcode = &opcodes[*acir_index]; + if let Opcode::Brillig(ref brillig) = opcode { + let first_opcode = &brillig.bytecode[0]; + format!("BRILLIG {first_opcode:?}") + } else { + format!("{opcode:?}") + } + } + Some(OpcodeLocation::Brillig { acir_index, brillig_index }) => { + if let Opcode::Brillig(ref brillig) = opcodes[*acir_index] { + let opcode = &brillig.bytecode[*brillig_index]; + format!(" | {opcode:?}") + } else { + String::from(" | invalid") + } + } + } + } + fn step_brillig_opcode(&mut self) -> DebugCommandResult { let Some(mut solver) = self.brillig_solver.take() else { unreachable!("Missing Brillig solver"); @@ -311,6 +414,10 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { self.breakpoints.iter() } + pub(super) fn clear_breakpoints(&mut self) { + self.breakpoints.clear(); + } + pub(super) fn is_solved(&self) -> bool { matches!(self.acvm.get_status(), ACVMStatus::Solved) } @@ -327,7 +434,10 @@ mod tests { use acvm::{ acir::{ - circuit::brillig::{Brillig, BrilligInputs, BrilligOutputs}, + circuit::{ + brillig::{Brillig, BrilligInputs, BrilligOutputs}, + opcodes::BlockId, + }, native_types::Expression, }, brillig_vm::brillig::{ @@ -419,7 +529,7 @@ mod tests { circuit, debug_artifact, initial_witness, - Box::new(DefaultForeignCallExecutor::new(true)), + Box::new(DefaultForeignCallExecutor::new(true, None)), ); assert_eq!(context.get_current_opcode_location(), Some(OpcodeLocation::Acir(0))); @@ -492,7 +602,7 @@ mod tests { // z = x + y Opcode::Brillig(brillig_opcodes), // x + y - z = 0 - Opcode::Arithmetic(Expression { + Opcode::AssertZero(Expression { mul_terms: vec![], linear_combinations: vec![(fe_1, w_x), (fe_1, w_y), (-fe_1, w_z)], q_c: fe_0, @@ -513,7 +623,7 @@ mod tests { circuit, debug_artifact, initial_witness, - Box::new(DefaultForeignCallExecutor::new(true)), + Box::new(DefaultForeignCallExecutor::new(true, None)), ); // set breakpoint @@ -535,4 +645,116 @@ mod tests { assert!(matches!(result, DebugCommandResult::Done)); assert_eq!(context.get_current_opcode_location(), None); } + + #[test] + fn test_offset_opcode_location() { + let blackbox_solver = &StubbedSolver; + let opcodes = vec![ + Opcode::Brillig(Brillig { + inputs: vec![], + outputs: vec![], + bytecode: vec![BrilligOpcode::Stop, BrilligOpcode::Stop, BrilligOpcode::Stop], + predicate: None, + }), + Opcode::MemoryInit { block_id: BlockId(0), init: vec![] }, + Opcode::Brillig(Brillig { + inputs: vec![], + outputs: vec![], + bytecode: vec![BrilligOpcode::Stop, BrilligOpcode::Stop, BrilligOpcode::Stop], + predicate: None, + }), + Opcode::AssertZero(Expression::default()), + ]; + let circuit = Circuit { opcodes, ..Circuit::default() }; + let debug_artifact = + DebugArtifact { debug_symbols: vec![], file_map: BTreeMap::new(), warnings: vec![] }; + let context = DebugContext::new( + blackbox_solver, + &circuit, + &debug_artifact, + WitnessMap::new(), + Box::new(DefaultForeignCallExecutor::new(true, None)), + ); + + assert_eq!(context.offset_opcode_location(&None, 0), (None, 0)); + assert_eq!(context.offset_opcode_location(&None, 2), (None, 2)); + assert_eq!(context.offset_opcode_location(&None, -2), (None, -2)); + assert_eq!( + context.offset_opcode_location(&Some(OpcodeLocation::Acir(0)), 0), + (Some(OpcodeLocation::Acir(0)), 0) + ); + assert_eq!( + context.offset_opcode_location(&Some(OpcodeLocation::Acir(0)), 1), + (Some(OpcodeLocation::Brillig { acir_index: 0, brillig_index: 1 }), 0) + ); + assert_eq!( + context.offset_opcode_location(&Some(OpcodeLocation::Acir(0)), 2), + (Some(OpcodeLocation::Brillig { acir_index: 0, brillig_index: 2 }), 0) + ); + assert_eq!( + context.offset_opcode_location(&Some(OpcodeLocation::Acir(0)), 3), + (Some(OpcodeLocation::Acir(1)), 0) + ); + assert_eq!( + context.offset_opcode_location(&Some(OpcodeLocation::Acir(0)), 4), + (Some(OpcodeLocation::Acir(2)), 0) + ); + assert_eq!( + context.offset_opcode_location(&Some(OpcodeLocation::Acir(0)), 5), + (Some(OpcodeLocation::Brillig { acir_index: 2, brillig_index: 1 }), 0) + ); + assert_eq!( + context.offset_opcode_location(&Some(OpcodeLocation::Acir(0)), 7), + (Some(OpcodeLocation::Acir(3)), 0) + ); + assert_eq!(context.offset_opcode_location(&Some(OpcodeLocation::Acir(0)), 8), (None, 0)); + assert_eq!(context.offset_opcode_location(&Some(OpcodeLocation::Acir(0)), 20), (None, 12)); + assert_eq!( + context.offset_opcode_location(&Some(OpcodeLocation::Acir(1)), 2), + (Some(OpcodeLocation::Brillig { acir_index: 2, brillig_index: 1 }), 0) + ); + assert_eq!(context.offset_opcode_location(&Some(OpcodeLocation::Acir(0)), -1), (None, -1)); + assert_eq!( + context.offset_opcode_location(&Some(OpcodeLocation::Acir(0)), -10), + (None, -10) + ); + + assert_eq!( + context.offset_opcode_location( + &Some(OpcodeLocation::Brillig { acir_index: 0, brillig_index: 1 }), + -1 + ), + (Some(OpcodeLocation::Acir(0)), 0) + ); + assert_eq!( + context.offset_opcode_location( + &Some(OpcodeLocation::Brillig { acir_index: 0, brillig_index: 2 }), + -2 + ), + (Some(OpcodeLocation::Acir(0)), 0) + ); + assert_eq!( + context.offset_opcode_location(&Some(OpcodeLocation::Acir(1)), -3), + (Some(OpcodeLocation::Acir(0)), 0) + ); + assert_eq!( + context.offset_opcode_location(&Some(OpcodeLocation::Acir(2)), -4), + (Some(OpcodeLocation::Acir(0)), 0) + ); + assert_eq!( + context.offset_opcode_location( + &Some(OpcodeLocation::Brillig { acir_index: 2, brillig_index: 1 }), + -5 + ), + (Some(OpcodeLocation::Acir(0)), 0) + ); + assert_eq!( + context.offset_opcode_location(&Some(OpcodeLocation::Acir(3)), -7), + (Some(OpcodeLocation::Acir(0)), 0) + ); + assert_eq!( + context.offset_opcode_location(&Some(OpcodeLocation::Acir(2)), -2), + (Some(OpcodeLocation::Brillig { acir_index: 0, brillig_index: 2 }), 0) + ); + } } diff --git a/noir/tooling/debugger/src/dap.rs b/noir/tooling/debugger/src/dap.rs new file mode 100644 index 00000000000..803f9f108db --- /dev/null +++ b/noir/tooling/debugger/src/dap.rs @@ -0,0 +1,568 @@ +use std::collections::BTreeMap; +use std::io::{Read, Write}; +use std::str::FromStr; + +use acvm::acir::circuit::{Circuit, OpcodeLocation}; +use acvm::acir::native_types::WitnessMap; +use acvm::BlackBoxFunctionSolver; +use codespan_reporting::files::{Files, SimpleFile}; + +use crate::context::DebugCommandResult; +use crate::context::DebugContext; + +use dap::errors::ServerError; +use dap::events::StoppedEventBody; +use dap::prelude::Event; +use dap::requests::{Command, Request, SetBreakpointsArguments}; +use dap::responses::{ + ContinueResponse, DisassembleResponse, ResponseBody, ScopesResponse, SetBreakpointsResponse, + SetExceptionBreakpointsResponse, SetInstructionBreakpointsResponse, StackTraceResponse, + ThreadsResponse, +}; +use dap::server::Server; +use dap::types::{ + Breakpoint, DisassembledInstruction, Source, StackFrame, SteppingGranularity, + StoppedEventReason, Thread, +}; +use nargo::artifacts::debug::DebugArtifact; +use nargo::ops::DefaultForeignCallExecutor; + +use fm::FileId; +use noirc_driver::CompiledProgram; + +pub struct DapSession<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> { + server: Server, + context: DebugContext<'a, B>, + debug_artifact: &'a DebugArtifact, + running: bool, + source_to_opcodes: BTreeMap>, + next_breakpoint_id: i64, + instruction_breakpoints: Vec<(OpcodeLocation, i64)>, + source_breakpoints: BTreeMap>, +} + +// BTreeMap + +impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { + pub fn new( + server: Server, + solver: &'a B, + circuit: &'a Circuit, + debug_artifact: &'a DebugArtifact, + initial_witness: WitnessMap, + ) -> Self { + let source_to_opcodes = Self::build_source_to_opcode_debug_mappings(debug_artifact); + let context = DebugContext::new( + solver, + circuit, + debug_artifact, + initial_witness, + Box::new(DefaultForeignCallExecutor::new(true, None)), + ); + Self { + server, + context, + debug_artifact, + source_to_opcodes, + running: false, + next_breakpoint_id: 1, + instruction_breakpoints: vec![], + source_breakpoints: BTreeMap::new(), + } + } + + /// Builds a map from FileId to an ordered vector of tuples with line + /// numbers and opcode locations corresponding to those line numbers + fn build_source_to_opcode_debug_mappings( + debug_artifact: &'a DebugArtifact, + ) -> BTreeMap> { + if debug_artifact.debug_symbols.is_empty() { + return BTreeMap::new(); + } + let locations = &debug_artifact.debug_symbols[0].locations; + let simple_files: BTreeMap<_, _> = debug_artifact + .file_map + .iter() + .map(|(file_id, debug_file)| { + ( + file_id, + SimpleFile::new(debug_file.path.to_str().unwrap(), debug_file.source.as_str()), + ) + }) + .collect(); + + let mut result: BTreeMap> = BTreeMap::new(); + locations.iter().for_each(|(opcode_location, source_locations)| { + if source_locations.is_empty() { + return; + } + let source_location = source_locations[0]; + let span = source_location.span; + let file_id = source_location.file; + let Ok(line_index) = &simple_files[&file_id].line_index((), span.start() as usize) else { + return; + }; + let line_number = line_index + 1; + + result.entry(file_id).or_default().push((line_number, *opcode_location)); + }); + result.iter_mut().for_each(|(_, file_locations)| file_locations.sort_by_key(|x| x.0)); + result + } + + fn send_stopped_event(&mut self, reason: StoppedEventReason) -> Result<(), ServerError> { + let description = format!("{:?}", &reason); + self.server.send_event(Event::Stopped(StoppedEventBody { + reason, + description: Some(description), + thread_id: Some(0), + preserve_focus_hint: Some(false), + text: None, + all_threads_stopped: Some(false), + hit_breakpoint_ids: None, + }))?; + Ok(()) + } + + pub fn run_loop(&mut self) -> Result<(), ServerError> { + self.running = true; + + if matches!(self.context.get_current_source_location(), None) { + // TODO: remove this? This is to ensure that the tool has a proper + // source location to show when first starting the debugger, but + // maybe the default behavior should be to start executing until the + // first breakpoint set. + _ = self.context.next(); + } + + self.server.send_event(Event::Initialized)?; + self.send_stopped_event(StoppedEventReason::Entry)?; + + while self.running { + let req = match self.server.poll_request()? { + Some(req) => req, + None => break, + }; + match req.command { + Command::Disconnect(_) => { + eprintln!("INFO: ending debugging session"); + self.server.respond(req.ack()?)?; + break; + } + Command::SetBreakpoints(_) => { + self.handle_set_source_breakpoints(req)?; + } + Command::SetExceptionBreakpoints(_) => { + self.server.respond(req.success(ResponseBody::SetExceptionBreakpoints( + SetExceptionBreakpointsResponse { breakpoints: None }, + )))?; + } + Command::SetInstructionBreakpoints(_) => { + self.handle_set_instruction_breakpoints(req)?; + } + Command::Threads => { + self.server.respond(req.success(ResponseBody::Threads(ThreadsResponse { + threads: vec![Thread { id: 0, name: "main".to_string() }], + })))?; + } + Command::StackTrace(_) => { + self.handle_stack_trace(req)?; + } + Command::Disassemble(_) => { + self.handle_disassemble(req)?; + } + Command::StepIn(ref args) => { + let granularity = + args.granularity.as_ref().unwrap_or(&SteppingGranularity::Statement); + match granularity { + SteppingGranularity::Instruction => self.handle_step(req)?, + _ => self.handle_next(req)?, + } + } + Command::StepOut(ref args) => { + let granularity = + args.granularity.as_ref().unwrap_or(&SteppingGranularity::Statement); + match granularity { + SteppingGranularity::Instruction => self.handle_step(req)?, + _ => self.handle_next(req)?, + } + } + Command::Next(ref args) => { + let granularity = + args.granularity.as_ref().unwrap_or(&SteppingGranularity::Statement); + match granularity { + SteppingGranularity::Instruction => self.handle_step(req)?, + _ => self.handle_next(req)?, + } + } + Command::Continue(_) => { + self.handle_continue(req)?; + } + Command::Scopes(_) => { + // FIXME: this needs a proper implementation when we can + // show the parameters and variables + self.server.respond( + req.success(ResponseBody::Scopes(ScopesResponse { scopes: vec![] })), + )?; + } + _ => { + eprintln!("ERROR: unhandled command: {:?}", req.command); + } + } + } + Ok(()) + } + + fn handle_stack_trace(&mut self, req: Request) -> Result<(), ServerError> { + let opcode_location = self.context.get_current_opcode_location(); + let source_location = self.context.get_current_source_location(); + let frames = match source_location { + None => vec![], + Some(locations) => locations + .iter() + .enumerate() + .map(|(index, location)| { + let line_number = self.debug_artifact.location_line_number(*location).unwrap(); + let column_number = + self.debug_artifact.location_column_number(*location).unwrap(); + let ip_reference = opcode_location.map(|location| location.to_string()); + StackFrame { + id: index as i64, + name: format!("frame #{index}"), + source: Some(Source { + path: self.debug_artifact.file_map[&location.file] + .path + .to_str() + .map(String::from), + ..Source::default() + }), + line: line_number as i64, + column: column_number as i64, + instruction_pointer_reference: ip_reference, + ..StackFrame::default() + } + }) + .collect(), + }; + let total_frames = Some(frames.len() as i64); + self.server.respond(req.success(ResponseBody::StackTrace(StackTraceResponse { + stack_frames: frames, + total_frames, + })))?; + Ok(()) + } + + fn handle_disassemble(&mut self, req: Request) -> Result<(), ServerError> { + let Command::Disassemble(ref args) = req.command else { + unreachable!("handle_disassemble called on a non disassemble request"); + }; + let starting_ip = OpcodeLocation::from_str(args.memory_reference.as_str()).ok(); + let instruction_offset = args.instruction_offset.unwrap_or(0); + let (mut opcode_location, mut invalid_count) = + self.context.offset_opcode_location(&starting_ip, instruction_offset); + let mut count = args.instruction_count; + + let mut instructions: Vec = vec![]; + + // leading invalid locations (when the request goes back + // beyond the start of the program) + if invalid_count < 0 { + while invalid_count < 0 { + instructions.push(DisassembledInstruction { + address: String::from("---"), + instruction: String::from("---"), + ..DisassembledInstruction::default() + }); + invalid_count += 1; + count -= 1; + } + if count > 0 { + opcode_location = Some(OpcodeLocation::Acir(0)); + } + } + // the actual opcodes + while count > 0 && !matches!(opcode_location, None) { + instructions.push(DisassembledInstruction { + address: format!("{}", opcode_location.unwrap()), + instruction: self.context.render_opcode_at_location(&opcode_location), + ..DisassembledInstruction::default() + }); + (opcode_location, _) = self.context.offset_opcode_location(&opcode_location, 1); + count -= 1; + } + // any remaining instruction count is beyond the valid opcode + // vector so return invalid placeholders + while count > 0 { + instructions.push(DisassembledInstruction { + address: String::from("---"), + instruction: String::from("---"), + ..DisassembledInstruction::default() + }); + invalid_count -= 1; + count -= 1; + } + + self.server.respond( + req.success(ResponseBody::Disassemble(DisassembleResponse { instructions })), + )?; + Ok(()) + } + + fn handle_step(&mut self, req: Request) -> Result<(), ServerError> { + let result = self.context.step_into_opcode(); + eprintln!("INFO: stepped by instruction with result {result:?}"); + self.server.respond(req.ack()?)?; + self.handle_execution_result(result) + } + + fn handle_next(&mut self, req: Request) -> Result<(), ServerError> { + let result = self.context.next(); + eprintln!("INFO: stepped by statement with result {result:?}"); + self.server.respond(req.ack()?)?; + self.handle_execution_result(result) + } + + fn handle_continue(&mut self, req: Request) -> Result<(), ServerError> { + let result = self.context.cont(); + eprintln!("INFO: continue with result {result:?}"); + self.server.respond(req.success(ResponseBody::Continue(ContinueResponse { + all_threads_continued: Some(true), + })))?; + self.handle_execution_result(result) + } + + fn find_breakpoints_at_location(&self, opcode_location: &OpcodeLocation) -> Vec { + let mut result = vec![]; + for (location, id) in &self.instruction_breakpoints { + if opcode_location == location { + result.push(*id); + } + } + for breakpoints in self.source_breakpoints.values() { + for (location, id) in breakpoints { + if opcode_location == location { + result.push(*id); + } + } + } + result + } + + fn handle_execution_result(&mut self, result: DebugCommandResult) -> Result<(), ServerError> { + match result { + DebugCommandResult::Done => { + self.running = false; + } + DebugCommandResult::Ok => { + self.server.send_event(Event::Stopped(StoppedEventBody { + reason: StoppedEventReason::Pause, + description: None, + thread_id: Some(0), + preserve_focus_hint: Some(false), + text: None, + all_threads_stopped: Some(false), + hit_breakpoint_ids: None, + }))?; + } + DebugCommandResult::BreakpointReached(location) => { + let breakpoint_ids = self.find_breakpoints_at_location(&location); + self.server.send_event(Event::Stopped(StoppedEventBody { + reason: StoppedEventReason::Breakpoint, + description: Some(String::from("Paused at breakpoint")), + thread_id: Some(0), + preserve_focus_hint: Some(false), + text: None, + all_threads_stopped: Some(false), + hit_breakpoint_ids: Some(breakpoint_ids), + }))?; + } + DebugCommandResult::Error(err) => { + self.server.send_event(Event::Stopped(StoppedEventBody { + reason: StoppedEventReason::Exception, + description: Some(format!("{err:?}")), + thread_id: Some(0), + preserve_focus_hint: Some(false), + text: None, + all_threads_stopped: Some(false), + hit_breakpoint_ids: None, + }))?; + } + } + Ok(()) + } + + fn get_next_breakpoint_id(&mut self) -> i64 { + let id = self.next_breakpoint_id; + self.next_breakpoint_id += 1; + id + } + + fn reinstall_breakpoints(&mut self) { + self.context.clear_breakpoints(); + for (location, _) in &self.instruction_breakpoints { + self.context.add_breakpoint(*location); + } + for breakpoints in self.source_breakpoints.values() { + for (location, _) in breakpoints { + self.context.add_breakpoint(*location); + } + } + } + + fn handle_set_instruction_breakpoints(&mut self, req: Request) -> Result<(), ServerError> { + let Command::SetInstructionBreakpoints(ref args) = req.command else { + unreachable!("handle_set_instruction_breakpoints called on a different request"); + }; + + // compute breakpoints to set and return + let mut breakpoints_to_set: Vec<(OpcodeLocation, i64)> = vec![]; + let breakpoints: Vec = args.breakpoints.iter().map(|breakpoint| { + let Ok(location) = OpcodeLocation::from_str(breakpoint.instruction_reference.as_str()) else { + return Breakpoint { + verified: false, + message: Some(String::from("Missing instruction reference")), + ..Breakpoint::default() + }; + }; + if !self.context.is_valid_opcode_location(&location) { + return Breakpoint { + verified: false, + message: Some(String::from("Invalid opcode location")), + ..Breakpoint::default() + }; + } + let id = self.get_next_breakpoint_id(); + breakpoints_to_set.push((location, id)); + Breakpoint { + id: Some(id), + verified: true, + ..Breakpoint::default() + } + }).collect(); + + // actually set the computed breakpoints + self.instruction_breakpoints = breakpoints_to_set; + self.reinstall_breakpoints(); + + // response to request + self.server.respond(req.success(ResponseBody::SetInstructionBreakpoints( + SetInstructionBreakpointsResponse { breakpoints }, + )))?; + Ok(()) + } + + fn find_file_id(&self, source_path: &str) -> Option { + let file_map = &self.debug_artifact.file_map; + let found = file_map.iter().find(|(_, debug_file)| match debug_file.path.to_str() { + Some(debug_file_path) => debug_file_path == source_path, + None => false, + }); + found.map(|iter| *iter.0) + } + + // TODO: there are four possibilities for the return value of this function: + // 1. the source location is not found -> None + // 2. an exact unique location is found -> Some(opcode_location) + // 3. an exact but not unique location is found (ie. a source location may + // be mapped to multiple opcodes, and those may be disjoint, for example for + // functions called multiple times throughout the program) + // 4. exact location is not found, so an opcode for a nearby source location + // is returned (this again could actually be more than one opcodes) + // Case 3 is not supported yet, and 4 is not correctly handled. + fn find_opcode_for_source_location( + &self, + file_id: &FileId, + line: i64, + ) -> Option { + let line = line as usize; + let Some(line_to_opcodes) = self.source_to_opcodes.get(file_id) else { + return None; + }; + let found_index = match line_to_opcodes.binary_search_by(|x| x.0.cmp(&line)) { + Ok(index) => line_to_opcodes[index].1, + Err(index) => line_to_opcodes[index].1, + }; + Some(found_index) + } + + fn map_source_breakpoints(&mut self, args: &SetBreakpointsArguments) -> Vec { + let Some(ref source) = &args.source.path else { + return vec![]; + }; + let Some(file_id) = self.find_file_id(source) else { + eprintln!("WARN: file ID for source {source} not found"); + return vec![]; + }; + let Some(ref breakpoints) = &args.breakpoints else { + return vec![]; + }; + let mut breakpoints_to_set: Vec<(OpcodeLocation, i64)> = vec![]; + let breakpoints = breakpoints + .iter() + .map(|breakpoint| { + let line = breakpoint.line; + let Some(location) = self.find_opcode_for_source_location(&file_id, line) else { + return Breakpoint { + verified: false, + message: Some(String::from("Source location cannot be matched to opcode location")), + ..Breakpoint::default() + }; + }; + // TODO: line will not necessarily be the one requested; we + // should do the reverse mapping and retrieve the actual source + // code line number + if !self.context.is_valid_opcode_location(&location) { + return Breakpoint { + verified: false, + message: Some(String::from("Invalid opcode location")), + ..Breakpoint::default() + }; + } + let instruction_reference = format!("{}", location); + let breakpoint_id = self.get_next_breakpoint_id(); + breakpoints_to_set.push((location, breakpoint_id)); + Breakpoint { + id: Some(breakpoint_id), + verified: true, + source: Some(args.source.clone()), + instruction_reference: Some(instruction_reference), + line: Some(line), + ..Breakpoint::default() + } + }) + .collect(); + + self.source_breakpoints.insert(file_id, breakpoints_to_set); + + breakpoints + } + + fn handle_set_source_breakpoints(&mut self, req: Request) -> Result<(), ServerError> { + let Command::SetBreakpoints(ref args) = req.command else { + unreachable!("handle_set_source_breakpoints called on a different request"); + }; + let breakpoints = self.map_source_breakpoints(args); + self.reinstall_breakpoints(); + self.server.respond( + req.success(ResponseBody::SetBreakpoints(SetBreakpointsResponse { breakpoints })), + )?; + Ok(()) + } +} + +pub fn run_session( + server: Server, + solver: &B, + program: CompiledProgram, + initial_witness: WitnessMap, +) -> Result<(), ServerError> { + let debug_artifact = DebugArtifact { + debug_symbols: vec![program.debug], + file_map: program.file_map, + warnings: program.warnings, + }; + let mut session = + DapSession::new(server, solver, &program.circuit, &debug_artifact, initial_witness); + + session.run_loop() +} diff --git a/noir/tooling/debugger/src/lib.rs b/noir/tooling/debugger/src/lib.rs index 7c6a9e9f618..21834e44f93 100644 --- a/noir/tooling/debugger/src/lib.rs +++ b/noir/tooling/debugger/src/lib.rs @@ -1,12 +1,19 @@ mod context; +mod dap; mod repl; +mod source_code_printer; +use std::io::{Read, Write}; + +use ::dap::errors::ServerError; +use ::dap::server::Server; use acvm::BlackBoxFunctionSolver; use acvm::{acir::circuit::Circuit, acir::native_types::WitnessMap}; use nargo::artifacts::debug::DebugArtifact; use nargo::NargoError; +use noirc_driver::CompiledProgram; pub fn debug_circuit( blackbox_solver: &B, @@ -16,3 +23,12 @@ pub fn debug_circuit( ) -> Result, NargoError> { repl::run(blackbox_solver, circuit, &debug_artifact, initial_witness) } + +pub fn run_dap_loop( + server: Server, + solver: &B, + program: CompiledProgram, + initial_witness: WitnessMap, +) -> Result<(), ServerError> { + dap::run_session(server, solver, program, initial_witness) +} diff --git a/noir/tooling/debugger/src/repl.rs b/noir/tooling/debugger/src/repl.rs index cb6539cbdb1..b1af2bc2686 100644 --- a/noir/tooling/debugger/src/repl.rs +++ b/noir/tooling/debugger/src/repl.rs @@ -9,12 +9,7 @@ use nargo::{artifacts::debug::DebugArtifact, ops::DefaultForeignCallExecutor, Na use easy_repl::{command, CommandStatus, Repl}; use std::cell::RefCell; -use codespan_reporting::files::Files; -use noirc_errors::Location; - -use owo_colors::OwoColorize; - -use std::ops::Range; +use crate::source_code_printer::print_source_code_location; pub struct ReplDebugger<'a, B: BlackBoxFunctionSolver> { context: DebugContext<'a, B>, @@ -37,7 +32,7 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { circuit, debug_artifact, initial_witness.clone(), - Box::new(DefaultForeignCallExecutor::new(true)), + Box::new(DefaultForeignCallExecutor::new(true, None)), ); Self { context, @@ -70,73 +65,8 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { ); } } - self.show_source_code_location(&location); - } - } - } - - fn print_location_path(&self, loc: Location) { - let line_number = self.debug_artifact.location_line_number(loc).unwrap(); - let column_number = self.debug_artifact.location_column_number(loc).unwrap(); - - println!( - "At {}:{line_number}:{column_number}", - self.debug_artifact.name(loc.file).unwrap() - ); - } - - fn show_source_code_location(&self, location: &OpcodeLocation) { - let locations = self.debug_artifact.debug_symbols[0].opcode_location(location); - let Some(locations) = locations else { return }; - for loc in locations { - self.print_location_path(loc); - - let loc_line_index = self.debug_artifact.location_line_index(loc).unwrap(); - - // How many lines before or after the location's line we - // print - let context_lines = 5; - - let first_line_to_print = - if loc_line_index < context_lines { 0 } else { loc_line_index - context_lines }; - - let last_line_index = self.debug_artifact.last_line_index(loc).unwrap(); - let last_line_to_print = std::cmp::min(loc_line_index + context_lines, last_line_index); - - let source = self.debug_artifact.location_source_code(loc).unwrap(); - for (current_line_index, line) in source.lines().enumerate() { - let current_line_number = current_line_index + 1; - - if current_line_index < first_line_to_print { - // Ignore lines before range starts - continue; - } else if current_line_index == first_line_to_print && current_line_index > 0 { - // Denote that there's more lines before but we're not showing them - print_line_of_ellipsis(current_line_index); - } - if current_line_index > last_line_to_print { - // Denote that there's more lines after but we're not showing them, - // and stop printing - print_line_of_ellipsis(current_line_number); - break; - } - - if current_line_index == loc_line_index { - // Highlight current location - let Range { start: loc_start, end: loc_end } = - self.debug_artifact.location_in_line(loc).unwrap(); - println!( - "{:>3} {:2} {}{}{}", - current_line_number, - "->", - &line[0..loc_start].to_string().dimmed(), - &line[loc_start..loc_end], - &line[loc_end..].to_string().dimmed() - ); - } else { - print_dimmed_line(current_line_number, line); - } + print_source_code_location(self.debug_artifact, &location); } } } @@ -278,7 +208,7 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { self.circuit, self.debug_artifact, self.initial_witness.clone(), - Box::new(DefaultForeignCallExecutor::new(true)), + Box::new(DefaultForeignCallExecutor::new(true, None)), ); for opcode_location in breakpoints { self.context.add_breakpoint(opcode_location); @@ -384,14 +314,6 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { } } -fn print_line_of_ellipsis(line_number: usize) { - println!("{}", format!("{:>3} {}", line_number, "...").dimmed()); -} - -fn print_dimmed_line(line_number: usize, line: &str) { - println!("{}", format!("{:>3} {:2} {}", line_number, "", line).dimmed()); -} - pub fn run( blackbox_solver: &B, circuit: &Circuit, diff --git a/noir/tooling/debugger/src/source_code_printer.rs b/noir/tooling/debugger/src/source_code_printer.rs new file mode 100644 index 00000000000..1707f9066b7 --- /dev/null +++ b/noir/tooling/debugger/src/source_code_printer.rs @@ -0,0 +1,317 @@ +use acvm::acir::circuit::OpcodeLocation; +use codespan_reporting::files::Files; +use nargo::artifacts::debug::DebugArtifact; +use noirc_errors::Location; +use owo_colors::OwoColorize; +use std::ops::Range; + +#[derive(Debug, PartialEq)] +enum PrintedLine<'a> { + Skip, + Ellipsis { + line_number: usize, + }, + Content { + line_number: usize, + cursor: &'a str, + content: &'a str, + highlight: Option>, + }, +} + +#[derive(Clone, Debug)] +struct LocationPrintContext { + file_lines: Range, + printed_lines: Range, + location_lines: Range, + location_offset_in_first_line: Range, + location_offset_in_last_line: Range, +} + +// Given a DebugArtifact and an OpcodeLocation, prints all the source code +// locations the OpcodeLocation maps to, with some surrounding context and +// visual aids to highlight the location itself. +pub(crate) fn print_source_code_location( + debug_artifact: &DebugArtifact, + location: &OpcodeLocation, +) { + let locations = debug_artifact.debug_symbols[0].opcode_location(location); + let Some(locations) = locations else { return; }; + + let locations = locations.iter(); + + for loc in locations { + print_location_path(debug_artifact, *loc); + + let lines = render_location(debug_artifact, loc); + + for line in lines { + match line { + PrintedLine::Skip => {} + PrintedLine::Ellipsis { line_number } => print_ellipsis(line_number), + PrintedLine::Content { line_number, cursor, content, highlight } => { + print_content(line_number, cursor, content, highlight) + } + } + } + } +} + +fn print_location_path(debug_artifact: &DebugArtifact, loc: Location) { + let line_number = debug_artifact.location_line_number(loc).unwrap(); + let column_number = debug_artifact.location_column_number(loc).unwrap(); + + println!("At {}:{line_number}:{column_number}", debug_artifact.name(loc.file).unwrap()); +} + +fn print_ellipsis(line_number: usize) { + println!("{:>3} {:2} {}", line_number.dimmed(), "", "...".dimmed()); +} + +fn print_content(line_number: usize, cursor: &str, content: &str, highlight: Option>) { + match highlight { + Some(highlight) => { + println!( + "{:>3} {:2} {}{}{}", + line_number, + cursor, + content[0..highlight.start].to_string().dimmed(), + &content[highlight.start..highlight.end], + content[highlight.end..].to_string().dimmed(), + ); + } + None => { + println!( + "{:>3} {:2} {}", + line_number.dimmed(), + cursor.dimmed(), + content.to_string().dimmed(), + ); + } + } +} + +fn render_line( + current: usize, + content: &str, + loc_context: LocationPrintContext, +) -> PrintedLine<'_> { + let file_lines = loc_context.file_lines; + let printed_lines = loc_context.printed_lines; + let location_lines = loc_context.location_lines; + let line_number = current + 1; + + if current < printed_lines.start { + // Ignore lines before the context window we choose to show + PrintedLine::Skip + } else if 0 < current && current == printed_lines.start && current < location_lines.start { + // Denote that there's more lines before but we're not showing them + PrintedLine::Ellipsis { line_number } + } else if current < location_lines.start { + // Print lines before the location start without highlighting + PrintedLine::Content { line_number, cursor: "", content, highlight: None } + } else if current == location_lines.start { + // Highlight current location from where it starts to the end of the current line + PrintedLine::Content { + line_number, + cursor: "->", + content, + highlight: Some(loc_context.location_offset_in_first_line), + } + } else if current < location_lines.end { + // Highlight current line if it's contained by the current location + PrintedLine::Content { + line_number, + cursor: "", + content, + highlight: Some(Range { start: 0, end: content.len() }), + } + } else if current == location_lines.end { + // Highlight current location from the beginning of the line until the location's own end + PrintedLine::Content { + line_number, + cursor: "", + content, + highlight: Some(loc_context.location_offset_in_last_line), + } + } else if current < printed_lines.end || printed_lines.end == file_lines.end { + // Print lines after the location end without highlighting + PrintedLine::Content { line_number, cursor: "", content, highlight: None } + } else if current == printed_lines.end && printed_lines.end < file_lines.end { + // Denote that there's more lines after but we're not showing them + PrintedLine::Ellipsis { line_number } + } else { + PrintedLine::Skip + } +} + +// Given a Location in a DebugArtifact, returns a line iterator that specifies how to +// print the location's file. +// +// Consider for example the file (line numbers added to facilitate this doc): +// ``` +// 1 use dep::std::hash::poseidon; +// 2 +// 3 fn main(x1: [Field; 2], y1: pub Field, x2: [Field; 4], y2: pub Field) { +// 4 let hash1 = poseidon::bn254::hash_2(x1); +// 5 assert(hash1 == y1); +// 6 +// 7 let hash2 = poseidon::bn254::hash_4(x2); +// 8 assert(hash2 == y2); +// 9 } +// 10 +// ``` +// +// If the location to render is `poseidon::bn254::hash_2(x1)`, we'll render the file as: +// ``` +// 1 use dep::std::hash::poseidon; +// 2 +// 3 fn main(x1: [Field; 2], y1: pub Field, x2: [Field; 4], y2: pub Field) { +// 4 let hash1 = poseidon::bn254::hash_2(x1); +// 5 -> assert(hash1 == y1); +// 6 +// 7 let hash2 = poseidon::bn254::hash_4(x2); +// 8 assert(hash2 == y2); +// 9 } +// 10 ... +// ``` +// +// This is the result of: +// 1. Limiting the amount of printed lines to 5 before and 5 after the location. +// 2. Using ellipsis (...) to denote when some file lines have been left out of the render. +// 3. Using an arrow cursor (->) to denote where the rendered location starts. +// 4. Highlighting the location (here expressed as a block for the sake of the explanation). +// +// Note that locations may span multiple lines, so this function deals with that too. +fn render_location<'a>( + debug_artifact: &'a DebugArtifact, + loc: &'a Location, +) -> impl Iterator> { + let loc = *loc; + + let file_lines = Range { start: 0, end: debug_artifact.last_line_index(loc).unwrap() }; + + // Sub-range of file lines that this location spans + let location_lines = Range { + start: debug_artifact.location_line_index(loc).unwrap(), + end: debug_artifact.location_end_line_index(loc).unwrap(), + }; + + // How many lines before or after the location's lines we print + let context_lines = 5; + + // Sub-range of lines that we'll print, which includes location + context lines + let first_line_to_print = + if location_lines.start < context_lines { 0 } else { location_lines.start - context_lines }; + let last_line_to_print = std::cmp::min(location_lines.end + context_lines, file_lines.end); + let printed_lines = Range { start: first_line_to_print, end: last_line_to_print }; + + // Range of the location relative to its starting and ending lines + let location_offset_in_first_line = debug_artifact.location_in_line(loc).unwrap(); + let location_offset_in_last_line = debug_artifact.location_in_end_line(loc).unwrap(); + + let context = LocationPrintContext { + file_lines, + printed_lines, + location_lines, + location_offset_in_first_line, + location_offset_in_last_line, + }; + + let source = debug_artifact.location_source_code(loc).unwrap(); + source + .lines() + .enumerate() + .map(move |(index, content)| render_line(index, content, context.clone())) +} + +#[cfg(test)] +mod tests { + use crate::source_code_printer::render_location; + use crate::source_code_printer::PrintedLine::Content; + use acvm::acir::circuit::OpcodeLocation; + use fm::FileManager; + use nargo::artifacts::debug::DebugArtifact; + use noirc_errors::{debug_info::DebugInfo, Location, Span}; + use std::collections::BTreeMap; + use std::ops::Range; + use std::path::Path; + use std::path::PathBuf; + use tempfile::{tempdir, TempDir}; + + // Returns the absolute path to the file + fn create_dummy_file(dir: &TempDir, file_name: &Path) -> PathBuf { + let file_path = dir.path().join(file_name); + let _file = std::fs::File::create(&file_path).unwrap(); + file_path + } + + #[test] + fn render_multiple_line_location() { + let source_code = r##"pub fn main(mut state: [Field; 2]) -> [Field; 2] { + state = permute( + consts::x5_2_config(), + state); + + state +}"##; + + let dir = tempdir().unwrap(); + let file_name = Path::new("main.nr"); + create_dummy_file(&dir, file_name); + + let mut fm = FileManager::new(dir.path()); + let file_id = fm.add_file_with_source(file_name, source_code.to_string()).unwrap(); + + // Location of + // ``` + // permute( + // consts::x5_2_config(), + // state) + // ``` + let loc = Location::new(Span::inclusive(63, 116), file_id); + + // We don't care about opcodes in this context, + // we just use a dummy to construct debug_symbols + let mut opcode_locations = BTreeMap::>::new(); + opcode_locations.insert(OpcodeLocation::Acir(42), vec![loc]); + + let debug_symbols = vec![DebugInfo::new(opcode_locations)]; + let debug_artifact = DebugArtifact::new(debug_symbols, &fm); + + let location_rendered: Vec<_> = render_location(&debug_artifact, &loc).collect(); + + assert_eq!( + location_rendered, + vec![ + Content { + line_number: 1, + cursor: "", + content: "pub fn main(mut state: [Field; 2]) -> [Field; 2] {", + highlight: None, + }, + Content { + line_number: 2, + cursor: "->", + content: " state = permute(", + highlight: Some(Range { start: 12, end: 20 }), + }, + Content { + line_number: 3, + cursor: "", + content: " consts::x5_2_config(),", + highlight: Some(Range { start: 0, end: 30 }), + }, + Content { + line_number: 4, + cursor: "", + content: " state);", + highlight: Some(Range { start: 0, end: 14 }), + }, + Content { line_number: 5, cursor: "", content: "", highlight: None }, + Content { line_number: 6, cursor: "", content: " state", highlight: None }, + Content { line_number: 7, cursor: "", content: "}", highlight: None }, + ] + ); + } +} diff --git a/noir/tooling/debugger/tests/debug.rs b/noir/tooling/debugger/tests/debug.rs new file mode 100644 index 00000000000..b2f441f5606 --- /dev/null +++ b/noir/tooling/debugger/tests/debug.rs @@ -0,0 +1,55 @@ +#[cfg(test)] +mod tests { + // Some of these imports are consumed by the injected tests + use assert_cmd::cargo::cargo_bin; + + use rexpect::spawn_bash; + + test_binary::build_test_binary_once!(mock_backend, "../backend_interface/test-binaries"); + + // include tests generated by `build.rs` + include!(concat!(env!("OUT_DIR"), "/debug.rs")); + + pub fn debugger_execution_success(test_program_dir: &str) { + let nargo_bin = + cargo_bin("nargo").into_os_string().into_string().expect("Cannot parse nargo path"); + + let mock_backend_path = + path_to_mock_backend().into_string().expect("Cannot parse mock_backend path"); + + let mut dbg_session = spawn_bash(Some(10000)).expect("Could not start bash session"); + + dbg_session + .send_line(&format!("export NARGO_BACKEND_PATH={}", mock_backend_path)) + .expect("Could not export NARGO_BACKEND_PATH."); + dbg_session.wait_for_prompt().expect("Could not export NARGO_BACKEND_PATH."); + + // Start debugger and test that it loads for the given program. + dbg_session + .execute( + &format!("{} debug --program-dir {}", nargo_bin, test_program_dir), + &format!(".*\\Starting debugger.*"), + ) + .expect("Could not start debugger"); + + // While running the debugger, issue a "continue" cmd, + // which should run to the program to end given + // we haven't set any breakpoints. + // ">" is the debugger's prompt, so finding one + // after running "continue" indicates that the + // debugger has not panicked until the end of the program. + dbg_session + .send_line("c") + .expect("Debugger panicked while attempting to step through program."); + dbg_session + .exp_string(">") + .expect("Failed while waiting for debugger to step through program."); + + // Run the "quit" command, then check that the debugger confirms + // having successfully solved the circuit witness. + dbg_session.send_line("quit").expect("Failed to quit debugger"); + dbg_session + .exp_regex(&format!(".*Circuit witness successfully solved.*")) + .expect("Expected circuit witness to be successfully solved."); + } +} diff --git a/noir/tooling/lsp/Cargo.toml b/noir/tooling/lsp/Cargo.toml index 5f5e701da67..6371bcbac19 100644 --- a/noir/tooling/lsp/Cargo.toml +++ b/noir/tooling/lsp/Cargo.toml @@ -23,6 +23,7 @@ serde_json.workspace = true tower.workspace = true async-lsp = { workspace = true, features = ["omni-trait"] } serde_with = "3.2.0" +thiserror.workspace = true fm.workspace = true [target.'cfg(all(target_arch = "wasm32", not(target_os = "wasi")))'.dependencies] diff --git a/noir/tooling/lsp/src/lib.rs b/noir/tooling/lsp/src/lib.rs index 2ad8096a13f..271e1e40df3 100644 --- a/noir/tooling/lsp/src/lib.rs +++ b/noir/tooling/lsp/src/lib.rs @@ -7,7 +7,7 @@ use std::{ collections::HashMap, future::Future, ops::{self, ControlFlow}, - path::PathBuf, + path::{Path, PathBuf}, pin::Pin, task::{self, Poll}, }; @@ -18,19 +18,26 @@ use async_lsp::{ ResponseError, }; use fm::codespan_files as files; +use lsp_types::CodeLens; +use nargo::workspace::Workspace; +use nargo_toml::{find_file_manifest, resolve_workspace_from_toml, PackageSelection}; +use noirc_driver::{file_manager_with_stdlib, prepare_crate, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::{ graph::{CrateId, CrateName}, hir::{Context, FunctionNameMatch}, + node_interner::NodeInterner, }; + use notifications::{ on_did_change_configuration, on_did_change_text_document, on_did_close_text_document, on_did_open_text_document, on_did_save_text_document, on_exit, on_initialized, }; use requests::{ - on_formatting, on_goto_definition_request, on_initialize, on_profile_run_request, on_shutdown, - on_test_run_request, on_tests_request, + on_code_lens_request, on_formatting, on_goto_definition_request, on_initialize, + on_profile_run_request, on_shutdown, on_test_run_request, on_tests_request, }; use serde_json::Value as JsonValue; +use thiserror::Error; use tower::Service; mod notifications; @@ -41,12 +48,22 @@ mod types; use solver::WrapperSolver; use types::{notification, request, NargoTest, NargoTestId, Position, Range, Url}; +#[derive(Debug, Error)] +pub enum LspError { + /// Error while Resolving Workspace. + #[error("Failed to Resolve Workspace - {0}")] + WorkspaceResolutionError(String), +} + // State for the LSP gets implemented on this struct and is internal to the implementation pub struct LspState { root_path: Option, client: ClientSocket, solver: WrapperSolver, + open_documents_count: usize, input_files: HashMap, + cached_lenses: HashMap>, + cached_definitions: HashMap, } impl LspState { @@ -56,6 +73,9 @@ impl LspState { root_path: None, solver: WrapperSolver(Box::new(solver)), input_files: HashMap::new(), + cached_lenses: HashMap::new(), + cached_definitions: HashMap::new(), + open_documents_count: 0, } } } @@ -72,6 +92,7 @@ impl NargoLspService { .request::(on_initialize) .request::(on_formatting) .request::(on_shutdown) + .request::(on_code_lens_request) .request::(on_tests_request) .request::(on_test_run_request) .request::(on_profile_run_request) @@ -175,3 +196,60 @@ fn byte_span_to_range<'a, F: files::Files<'a> + ?Sized>( None } } + +pub(crate) fn resolve_workspace_for_source_path(file_path: &Path) -> Result { + let package_root = find_file_manifest(file_path); + + let toml_path = package_root.ok_or_else(|| { + LspError::WorkspaceResolutionError(format!( + "Nargo.toml not found for file: {:?}", + file_path + )) + })?; + + let workspace = resolve_workspace_from_toml( + &toml_path, + PackageSelection::All, + Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), + ) + .map_err(|err| LspError::WorkspaceResolutionError(err.to_string()))?; + + Ok(workspace) +} + +/// Prepares a package from a source string +/// This is useful for situations when we don't need dependencies +/// and just need to operate on single file. +/// +/// Use case for this is the LSP server and code lenses +/// which operate on single file and need to understand this file +/// in order to offer code lenses to the user +fn prepare_source(source: String) -> (Context<'static>, CrateId) { + let root = Path::new(""); + let file_name = Path::new("main.nr"); + let mut file_manager = file_manager_with_stdlib(root); + file_manager.add_file_with_source(file_name, source).expect( + "Adding source buffer to file manager should never fail when file manager is empty", + ); + + let mut context = Context::new(file_manager); + let root_crate_id = prepare_crate(&mut context, file_name); + + (context, root_crate_id) +} + +#[test] +fn prepare_package_from_source_string() { + let source = r#" + fn main() { + let x = 1; + let y = 2; + let z = x + y; + } + "#; + + let (mut context, crate_id) = crate::prepare_source(source.to_string()); + let _check_result = noirc_driver::check_crate(&mut context, crate_id, false, false); + let main_func_id = context.get_main_function(&crate_id); + assert!(main_func_id.is_some()); +} diff --git a/noir/tooling/lsp/src/notifications/mod.rs b/noir/tooling/lsp/src/notifications/mod.rs index 61f0d231738..aec3bf61f4e 100644 --- a/noir/tooling/lsp/src/notifications/mod.rs +++ b/noir/tooling/lsp/src/notifications/mod.rs @@ -1,19 +1,21 @@ use std::ops::ControlFlow; use async_lsp::{ErrorCode, LanguageClient, ResponseError}; -use nargo::prepare_package; -use nargo_toml::{find_file_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{check_crate, NOIR_ARTIFACT_VERSION_STRING}; +use nargo::{insert_all_files_for_workspace_into_file_manager, prepare_package}; +use noirc_driver::{check_crate, file_manager_with_stdlib}; use noirc_errors::{DiagnosticKind, FileDiagnostic}; +use crate::requests::collect_lenses_for_package; use crate::types::{ notification, Diagnostic, DiagnosticSeverity, DidChangeConfigurationParams, DidChangeTextDocumentParams, DidCloseTextDocumentParams, DidOpenTextDocumentParams, - DidSaveTextDocumentParams, InitializedParams, LogMessageParams, MessageType, NargoPackageTests, - PublishDiagnosticsParams, + DidSaveTextDocumentParams, InitializedParams, NargoPackageTests, PublishDiagnosticsParams, }; -use crate::{byte_span_to_range, get_package_tests_in_crate, LspState}; +use crate::{ + byte_span_to_range, get_package_tests_in_crate, prepare_source, + resolve_workspace_for_source_path, LspState, +}; pub(super) fn on_initialized( _state: &mut LspState, @@ -34,7 +36,16 @@ pub(super) fn on_did_open_text_document( params: DidOpenTextDocumentParams, ) -> ControlFlow> { state.input_files.insert(params.text_document.uri.to_string(), params.text_document.text); - ControlFlow::Continue(()) + + let document_uri = params.text_document.uri; + + match process_noir_document(document_uri, state) { + Ok(_) => { + state.open_documents_count += 1; + ControlFlow::Continue(()) + } + Err(err) => ControlFlow::Break(Err(err)), + } } pub(super) fn on_did_change_text_document( @@ -42,7 +53,38 @@ pub(super) fn on_did_change_text_document( params: DidChangeTextDocumentParams, ) -> ControlFlow> { let text = params.content_changes.into_iter().next().unwrap().text; - state.input_files.insert(params.text_document.uri.to_string(), text); + state.input_files.insert(params.text_document.uri.to_string(), text.clone()); + + let (mut context, crate_id) = prepare_source(text); + let _ = check_crate(&mut context, crate_id, false, false); + + let workspace = match resolve_workspace_for_source_path( + params.text_document.uri.to_file_path().unwrap().as_path(), + ) { + Ok(workspace) => workspace, + Err(lsp_error) => { + return ControlFlow::Break(Err(ResponseError::new( + ErrorCode::REQUEST_FAILED, + lsp_error.to_string(), + ) + .into())) + } + }; + let package = match workspace.members.first() { + Some(package) => package, + None => { + return ControlFlow::Break(Err(ResponseError::new( + ErrorCode::REQUEST_FAILED, + "Selected workspace has no members", + ) + .into())) + } + }; + + let lenses = collect_lenses_for_package(&context, crate_id, &workspace, package, None); + + state.cached_lenses.insert(params.text_document.uri.to_string(), lenses); + ControlFlow::Continue(()) } @@ -51,6 +93,14 @@ pub(super) fn on_did_close_text_document( params: DidCloseTextDocumentParams, ) -> ControlFlow> { state.input_files.remove(¶ms.text_document.uri.to_string()); + state.cached_lenses.remove(¶ms.text_document.uri.to_string()); + + state.open_documents_count -= 1; + + if state.open_documents_count == 0 { + state.cached_definitions.clear(); + } + ControlFlow::Continue(()) } @@ -58,58 +108,41 @@ pub(super) fn on_did_save_text_document( state: &mut LspState, params: DidSaveTextDocumentParams, ) -> ControlFlow> { - let file_path = match params.text_document.uri.to_file_path() { - Ok(file_path) => file_path, - Err(()) => { - return ControlFlow::Break(Err(ResponseError::new( - ErrorCode::REQUEST_FAILED, - "URI is not a valid file path", - ) - .into())) - } - }; + let document_uri = params.text_document.uri; - let package_root = find_file_manifest(file_path.as_path()); + match process_noir_document(document_uri, state) { + Ok(_) => ControlFlow::Continue(()), + Err(err) => ControlFlow::Break(Err(err)), + } +} - let toml_path = match package_root { - Some(toml_path) => toml_path, - None => { - // If we cannot find a manifest, we log a warning but return no diagnostics - // We can reconsider this when we can build a file without the need for a Nargo.toml file to resolve deps - let _ = state.client.log_message(LogMessageParams { - typ: MessageType::WARNING, - message: format!("Nargo.toml not found for file: {:}", file_path.display()), - }); - return ControlFlow::Continue(()); - } - }; +fn process_noir_document( + document_uri: lsp_types::Url, + state: &mut LspState, +) -> Result<(), async_lsp::Error> { + let file_path = document_uri.to_file_path().map_err(|_| { + ResponseError::new(ErrorCode::REQUEST_FAILED, "URI is not a valid file path") + })?; - let workspace = match resolve_workspace_from_toml( - &toml_path, - PackageSelection::All, - Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), - ) { - Ok(workspace) => workspace, - Err(err) => { - // If we found a manifest, but the workspace is invalid, we raise an error about it - return ControlFlow::Break(Err(ResponseError::new( - ErrorCode::REQUEST_FAILED, - format!("{err}"), - ) - .into())); - } - }; + let workspace = resolve_workspace_for_source_path(&file_path).map_err(|lsp_error| { + ResponseError::new(ErrorCode::REQUEST_FAILED, lsp_error.to_string()) + })?; + + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); let diagnostics: Vec<_> = workspace .into_iter() .flat_map(|package| -> Vec { - let (mut context, crate_id) = prepare_package(package); + let (mut context, crate_id) = prepare_package(&workspace_file_manager, package); let file_diagnostics = match check_crate(&mut context, crate_id, false, false) { Ok(((), warnings)) => warnings, Err(errors_and_warnings) => errors_and_warnings, }; + let package_root_dir: String = package.root_dir.as_os_str().to_string_lossy().into(); + // We don't add test headings for a package if it contains no `#[test]` functions if let Some(tests) = get_package_tests_in_crate(&context, &crate_id, &package.name) { let _ = state.client.notify::(NargoPackageTests { @@ -118,6 +151,17 @@ pub(super) fn on_did_save_text_document( }); } + let collected_lenses = crate::requests::collect_lenses_for_package( + &context, + crate_id, + &workspace, + package, + Some(&file_path), + ); + state.cached_lenses.insert(document_uri.to_string(), collected_lenses); + + state.cached_definitions.insert(package_root_dir, context.def_interner); + let fm = &context.file_manager; let files = fm.as_file_map(); @@ -152,14 +196,13 @@ pub(super) fn on_did_save_text_document( .collect() }) .collect(); - let _ = state.client.publish_diagnostics(PublishDiagnosticsParams { - uri: params.text_document.uri, + uri: document_uri, version: None, diagnostics, }); - ControlFlow::Continue(()) + Ok(()) } pub(super) fn on_exit( diff --git a/noir/tooling/lsp/src/requests/code_lens_request.rs b/noir/tooling/lsp/src/requests/code_lens_request.rs new file mode 100644 index 00000000000..4b1d38a137e --- /dev/null +++ b/noir/tooling/lsp/src/requests/code_lens_request.rs @@ -0,0 +1,236 @@ +use std::future::{self, Future}; + +use async_lsp::{ErrorCode, ResponseError}; + +use nargo::{package::Package, workspace::Workspace}; +use noirc_driver::check_crate; +use noirc_frontend::hir::FunctionNameMatch; + +use crate::{ + byte_span_to_range, prepare_source, resolve_workspace_for_source_path, + types::{CodeLens, CodeLensParams, CodeLensResult, Command}, + LspState, +}; + +const ARROW: &str = "▶\u{fe0e}"; +const TEST_COMMAND: &str = "nargo.test"; +const TEST_CODELENS_TITLE: &str = "Run Test"; +const COMPILE_COMMAND: &str = "nargo.compile"; +const COMPILE_CODELENS_TITLE: &str = "Compile"; +const INFO_COMMAND: &str = "nargo.info"; +const INFO_CODELENS_TITLE: &str = "Info"; +const EXECUTE_COMMAND: &str = "nargo.execute"; +const EXECUTE_CODELENS_TITLE: &str = "Execute"; + +const PROFILE_COMMAND: &str = "nargo.profile"; +const PROFILE_CODELENS_TITLE: &str = "Profile"; + +fn with_arrow(title: &str) -> String { + format!("{ARROW} {title}") +} + +fn package_selection_args(workspace: &Workspace, package: &Package) -> Vec { + vec![ + "--program-dir".into(), + workspace.root_dir.display().to_string().into(), + "--package".into(), + package.name.to_string().into(), + ] +} + +pub(crate) fn on_code_lens_request( + state: &mut LspState, + params: CodeLensParams, +) -> impl Future> { + future::ready(on_code_lens_request_inner(state, params)) +} + +fn on_code_lens_request_inner( + state: &mut LspState, + params: CodeLensParams, +) -> Result { + let file_path = params.text_document.uri.to_file_path().map_err(|_| { + ResponseError::new(ErrorCode::REQUEST_FAILED, "URI is not a valid file path") + })?; + + if let Some(collected_lenses) = state.cached_lenses.get(¶ms.text_document.uri.to_string()) { + return Ok(Some(collected_lenses.clone())); + } + + let source_string = std::fs::read_to_string(&file_path).map_err(|_| { + ResponseError::new(ErrorCode::REQUEST_FAILED, "Could not read file from disk") + })?; + + let workspace = resolve_workspace_for_source_path(file_path.as_path()).unwrap(); + let package = workspace.members.first().unwrap(); + + let (mut context, crate_id) = prepare_source(source_string); + // We ignore the warnings and errors produced by compilation for producing code lenses + // because we can still get the test functions even if compilation fails + let _ = check_crate(&mut context, crate_id, false, false); + + let collected_lenses = + collect_lenses_for_package(&context, crate_id, &workspace, package, None); + + if collected_lenses.is_empty() { + state.cached_lenses.remove(¶ms.text_document.uri.to_string()); + Ok(None) + } else { + state + .cached_lenses + .insert(params.text_document.uri.to_string().clone(), collected_lenses.clone()); + Ok(Some(collected_lenses)) + } +} + +pub(crate) fn collect_lenses_for_package( + context: &noirc_frontend::macros_api::HirContext, + crate_id: noirc_frontend::macros_api::CrateId, + workspace: &Workspace, + package: &Package, + file_path: Option<&std::path::PathBuf>, +) -> Vec { + let mut lenses: Vec = vec![]; + let fm = &context.file_manager; + let files = fm.as_file_map(); + let tests = + context.get_all_test_functions_in_crate_matching(&crate_id, FunctionNameMatch::Anything); + for (func_name, test_function) in tests { + let location = context.function_meta(&test_function.get_id()).name.location; + let file_id = location.file; + + // Ignore diagnostics for any file that wasn't the file we saved + // TODO: In the future, we could create "related" diagnostics for these files + if let Some(file_path) = file_path { + if fm.path(file_id) != *file_path { + continue; + } + } + + let range = byte_span_to_range(files, file_id, location.span.into()).unwrap_or_default(); + + let test_command = Command { + title: with_arrow(TEST_CODELENS_TITLE), + command: TEST_COMMAND.into(), + arguments: Some( + [ + package_selection_args(workspace, package), + vec!["--exact".into(), func_name.into()], + ] + .concat(), + ), + }; + + let test_lens = CodeLens { range, command: Some(test_command), data: None }; + + lenses.push(test_lens); + } + if package.is_binary() { + if let Some(main_func_id) = context.get_main_function(&crate_id) { + let location = context.function_meta(&main_func_id).name.location; + let file_id = location.file; + + // Ignore diagnostics for any file that wasn't the file we saved + // TODO: In the future, we could create "related" diagnostics for these files + if let Some(file_path) = file_path { + if fm.path(file_id) != *file_path { + return lenses; + } + } + + let range = + byte_span_to_range(files, file_id, location.span.into()).unwrap_or_default(); + + let compile_command = Command { + title: with_arrow(COMPILE_CODELENS_TITLE), + command: COMPILE_COMMAND.into(), + arguments: Some(package_selection_args(workspace, package)), + }; + + let compile_lens = CodeLens { range, command: Some(compile_command), data: None }; + + lenses.push(compile_lens); + + let info_command = Command { + title: INFO_CODELENS_TITLE.to_string(), + command: INFO_COMMAND.into(), + arguments: Some(package_selection_args(workspace, package)), + }; + + let info_lens = CodeLens { range, command: Some(info_command), data: None }; + + lenses.push(info_lens); + + let execute_command = Command { + title: EXECUTE_CODELENS_TITLE.to_string(), + command: EXECUTE_COMMAND.into(), + arguments: Some(package_selection_args(workspace, package)), + }; + + let execute_lens = CodeLens { range, command: Some(execute_command), data: None }; + + lenses.push(execute_lens); + + let profile_command = Command { + title: PROFILE_CODELENS_TITLE.to_string(), + command: PROFILE_COMMAND.into(), + arguments: Some(package_selection_args(workspace, package)), + }; + + let profile_lens = CodeLens { range, command: Some(profile_command), data: None }; + + lenses.push(profile_lens); + } + } + + if package.is_contract() { + // Currently not looking to deduplicate this since we don't have a clear decision on if the Contract stuff is staying + for contract in context.get_all_contracts(&crate_id) { + let location = contract.location; + let file_id = location.file; + + // Ignore diagnostics for any file that wasn't the file we saved + // TODO: In the future, we could create "related" diagnostics for these files + if let Some(file_path) = file_path { + if fm.path(file_id) != *file_path { + continue; + } + } + + let range = + byte_span_to_range(files, file_id, location.span.into()).unwrap_or_default(); + + let compile_command = Command { + title: with_arrow(COMPILE_CODELENS_TITLE), + command: COMPILE_COMMAND.into(), + arguments: Some(package_selection_args(workspace, package)), + }; + + let compile_lens = CodeLens { range, command: Some(compile_command), data: None }; + + lenses.push(compile_lens); + + let info_command = Command { + title: INFO_CODELENS_TITLE.to_string(), + command: INFO_COMMAND.into(), + arguments: Some(package_selection_args(workspace, package)), + }; + + let info_lens = CodeLens { range, command: Some(info_command), data: None }; + + lenses.push(info_lens); + + let profile_command = Command { + title: PROFILE_CODELENS_TITLE.to_string(), + command: PROFILE_COMMAND.into(), + arguments: Some(package_selection_args(workspace, package)), + }; + + let profile_lens = CodeLens { range, command: Some(profile_command), data: None }; + + lenses.push(profile_lens); + } + } + + lenses +} diff --git a/noir/tooling/lsp/src/requests/goto_definition.rs b/noir/tooling/lsp/src/requests/goto_definition.rs index 558851d4ecf..2ff5901ff9c 100644 --- a/noir/tooling/lsp/src/requests/goto_definition.rs +++ b/noir/tooling/lsp/src/requests/goto_definition.rs @@ -1,12 +1,13 @@ use std::future::{self, Future}; +use crate::resolve_workspace_for_source_path; use crate::{types::GotoDefinitionResult, LspState}; -use async_lsp::{ErrorCode, LanguageClient, ResponseError}; +use async_lsp::{ErrorCode, ResponseError}; use fm::codespan_files::Error; use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse, Location}; use lsp_types::{Position, Url}; -use nargo_toml::{find_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::NOIR_ARTIFACT_VERSION_STRING; +use nargo::insert_all_files_for_workspace_into_file_manager; +use noirc_driver::file_manager_with_stdlib; pub(crate) fn on_goto_definition_request( state: &mut LspState, @@ -17,78 +18,62 @@ pub(crate) fn on_goto_definition_request( } fn on_goto_definition_inner( - state: &mut LspState, + _state: &mut LspState, params: GotoDefinitionParams, ) -> Result { - let root_path = state.root_path.as_deref().ok_or_else(|| { - ResponseError::new(ErrorCode::REQUEST_FAILED, "Could not find project root") - })?; - let file_path = params.text_document_position_params.text_document.uri.to_file_path().map_err(|_| { ResponseError::new(ErrorCode::REQUEST_FAILED, "URI is not a valid file path") })?; - let toml_path = match find_package_manifest(root_path, &file_path) { - Ok(toml_path) => toml_path, - Err(err) => { - let _ = state.client.log_message(lsp_types::LogMessageParams { - typ: lsp_types::MessageType::WARNING, - message: err.to_string(), - }); - return Ok(None); - } - }; - let workspace = resolve_workspace_from_toml( - &toml_path, - PackageSelection::All, - Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), - ) - .map_err(|err| { - // If we found a manifest, but the workspace is invalid, we raise an error about it - ResponseError::new(ErrorCode::REQUEST_FAILED, err) - })?; + let workspace = resolve_workspace_for_source_path(file_path.as_path()).unwrap(); + let package = workspace.members.first().unwrap(); - let mut definition_position = None; + let package_root_path: String = package.root_dir.as_os_str().to_string_lossy().into(); - for package in &workspace { - let (mut context, crate_id) = nargo::prepare_package(package); + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let (mut context, crate_id) = nargo::prepare_package(&workspace_file_manager, package); + + let interner; + if let Some(def_interner) = _state.cached_definitions.get(&package_root_path) { + interner = def_interner; + } else { // We ignore the warnings and errors produced by compilation while resolving the definition let _ = noirc_driver::check_crate(&mut context, crate_id, false, false); - - let files = context.file_manager.as_file_map(); - let file_id = context.file_manager.name_to_id(file_path.clone()); - - if let Some(file_id) = file_id { - let byte_index = position_to_byte_index( - files, - file_id, - ¶ms.text_document_position_params.position, - ); - - if let Ok(byte_index) = byte_index { - let search_for_location = noirc_errors::Location { - file: file_id, - span: noirc_errors::Span::single_char(byte_index as u32), - }; - let found_location = context.get_definition_location_from(search_for_location); - - if let Some(found_location) = found_location { - let file_id = found_location.file; - definition_position = to_lsp_location(files, file_id, found_location.span); - } - } - } + interner = &context.def_interner; } - if let Some(definition_position) = definition_position { - let response: GotoDefinitionResponse = - GotoDefinitionResponse::from(definition_position).to_owned(); - Ok(Some(response)) - } else { - Ok(None) - } + let files = context.file_manager.as_file_map(); + let file_id = context.file_manager.name_to_id(file_path.clone()).ok_or(ResponseError::new( + ErrorCode::REQUEST_FAILED, + format!("Could not find file in file manager. File path: {:?}", file_path), + ))?; + let byte_index = + position_to_byte_index(files, file_id, ¶ms.text_document_position_params.position) + .map_err(|err| { + ResponseError::new( + ErrorCode::REQUEST_FAILED, + format!("Could not convert position to byte index. Error: {:?}", err), + ) + })?; + + let search_for_location = noirc_errors::Location { + file: file_id, + span: noirc_errors::Span::single_char(byte_index as u32), + }; + + let goto_definition_response = + interner.get_definition_location_from(search_for_location).and_then(|found_location| { + let file_id = found_location.file; + let definition_position = to_lsp_location(files, file_id, found_location.span)?; + let response: GotoDefinitionResponse = + GotoDefinitionResponse::from(definition_position).to_owned(); + Some(response) + }); + + Ok(goto_definition_response) } fn to_lsp_location<'a, F>( diff --git a/noir/tooling/lsp/src/requests/mod.rs b/noir/tooling/lsp/src/requests/mod.rs index e2fdcdf08da..2711c597bcf 100644 --- a/noir/tooling/lsp/src/requests/mod.rs +++ b/noir/tooling/lsp/src/requests/mod.rs @@ -1,9 +1,10 @@ use std::future::Future; -use crate::types::InitializeParams; +use crate::types::{CodeLensOptions, InitializeParams}; use async_lsp::ResponseError; use lsp_types::{Position, TextDocumentSyncCapability, TextDocumentSyncKind}; use nargo_fmt::Config; +use serde::{Deserialize, Serialize}; use crate::{ types::{InitializeResult, NargoCapability, NargoTestsOptions, ServerCapabilities}, @@ -20,25 +21,58 @@ use crate::{ // They are not attached to the `NargoLspService` struct so they can be unit tested with only `LspState` // and params passed in. +mod code_lens_request; mod goto_definition; mod profile_run; mod test_run; mod tests; pub(crate) use { + code_lens_request::collect_lenses_for_package, code_lens_request::on_code_lens_request, goto_definition::on_goto_definition_request, profile_run::on_profile_run_request, test_run::on_test_run_request, tests::on_tests_request, }; +/// LSP client will send initialization request after the server has started. +/// [InitializeParams].`initialization_options` will contain the options sent from the client. +#[derive(Debug, Deserialize, Serialize)] +struct LspInitializationOptions { + /// Controls whether code lens is enabled by the server + /// By default this will be set to true (enabled). + #[serde(rename = "enableCodeLens", default = "default_enable_code_lens")] + enable_code_lens: bool, +} + +fn default_enable_code_lens() -> bool { + true +} + +impl Default for LspInitializationOptions { + fn default() -> Self { + Self { enable_code_lens: default_enable_code_lens() } + } +} + pub(crate) fn on_initialize( state: &mut LspState, params: InitializeParams, ) -> impl Future> { state.root_path = params.root_uri.and_then(|root_uri| root_uri.to_file_path().ok()); - async { + let initialization_options: LspInitializationOptions = params + .initialization_options + .and_then(|value| serde_json::from_value(value).ok()) + .unwrap_or_default(); + + async move { let text_document_sync = TextDocumentSyncCapability::Kind(TextDocumentSyncKind::FULL); + let code_lens = if initialization_options.enable_code_lens { + Some(CodeLensOptions { resolve_provider: Some(false) }) + } else { + None + }; + let nargo = NargoCapability { tests: Some(NargoTestsOptions { fetch: Some(true), @@ -50,6 +84,7 @@ pub(crate) fn on_initialize( Ok(InitializeResult { capabilities: ServerCapabilities { text_document_sync: Some(text_document_sync), + code_lens_provider: code_lens, document_formatting_provider: true, nargo: Some(nargo), definition_provider: Some(lsp_types::OneOf::Left(true)), @@ -105,7 +140,9 @@ pub(crate) fn on_shutdown( #[cfg(test)] mod initialization { use async_lsp::ClientSocket; - use lsp_types::{InitializeParams, TextDocumentSyncCapability, TextDocumentSyncKind}; + use lsp_types::{ + CodeLensOptions, InitializeParams, TextDocumentSyncCapability, TextDocumentSyncKind, + }; use tokio::test; use crate::{ @@ -125,6 +162,7 @@ mod initialization { text_document_sync: Some(TextDocumentSyncCapability::Kind( TextDocumentSyncKind::FULL )), + code_lens_provider: Some(CodeLensOptions { resolve_provider: Some(false) }), document_formatting_provider: true, .. } diff --git a/noir/tooling/lsp/src/requests/profile_run.rs b/noir/tooling/lsp/src/requests/profile_run.rs index 4c4d7f11fde..6664475a68c 100644 --- a/noir/tooling/lsp/src/requests/profile_run.rs +++ b/noir/tooling/lsp/src/requests/profile_run.rs @@ -5,9 +5,11 @@ use std::{ use acvm::ExpressionWidth; use async_lsp::{ErrorCode, ResponseError}; -use nargo::artifacts::debug::DebugArtifact; +use nargo::{artifacts::debug::DebugArtifact, insert_all_files_for_workspace_into_file_manager}; use nargo_toml::{find_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{CompileOptions, DebugFile, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{ + file_manager_with_stdlib, CompileOptions, DebugFile, NOIR_ARTIFACT_VERSION_STRING, +}; use noirc_errors::{debug_info::OpCodesCount, Location}; use crate::{ @@ -48,6 +50,9 @@ fn on_profile_run_request_inner( ResponseError::new(ErrorCode::REQUEST_FAILED, err) })?; + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + // Since we filtered on crate name, this should be the only item in the iterator match workspace.into_iter().next() { Some(_package) => { @@ -60,6 +65,7 @@ fn on_profile_run_request_inner( let expression_width = ExpressionWidth::Bounded { width: 3 }; let (compiled_programs, compiled_contracts) = nargo::ops::compile_workspace( + &workspace_file_manager, &workspace, &binary_packages, &contract_packages, diff --git a/noir/tooling/lsp/src/requests/test_run.rs b/noir/tooling/lsp/src/requests/test_run.rs index e5245de426f..c2181d7839d 100644 --- a/noir/tooling/lsp/src/requests/test_run.rs +++ b/noir/tooling/lsp/src/requests/test_run.rs @@ -2,11 +2,14 @@ use std::future::{self, Future}; use async_lsp::{ErrorCode, ResponseError}; use nargo::{ + insert_all_files_for_workspace_into_file_manager, ops::{run_test, TestStatus}, prepare_package, }; use nargo_toml::{find_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{check_crate, CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{ + check_crate, file_manager_with_stdlib, CompileOptions, NOIR_ARTIFACT_VERSION_STRING, +}; use noirc_frontend::hir::FunctionNameMatch; use crate::{ @@ -47,10 +50,13 @@ fn on_test_run_request_inner( ResponseError::new(ErrorCode::REQUEST_FAILED, err) })?; + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + // Since we filtered on crate name, this should be the only item in the iterator match workspace.into_iter().next() { Some(package) => { - let (mut context, crate_id) = prepare_package(package); + let (mut context, crate_id) = prepare_package(&workspace_file_manager, package); if check_crate(&mut context, crate_id, false, false).is_err() { let result = NargoTestRunResult { id: params.id.clone(), @@ -72,8 +78,14 @@ fn on_test_run_request_inner( ) })?; - let test_result = - run_test(&state.solver, &context, test_function, false, &CompileOptions::default()); + let test_result = run_test( + &state.solver, + &context, + test_function, + false, + None, + &CompileOptions::default(), + ); let result = match test_result { TestStatus::Pass => NargoTestRunResult { id: params.id.clone(), diff --git a/noir/tooling/lsp/src/requests/tests.rs b/noir/tooling/lsp/src/requests/tests.rs index 9a67eaae6db..0f717b9fb9e 100644 --- a/noir/tooling/lsp/src/requests/tests.rs +++ b/noir/tooling/lsp/src/requests/tests.rs @@ -2,9 +2,9 @@ use std::future::{self, Future}; use async_lsp::{ErrorCode, LanguageClient, ResponseError}; use lsp_types::{LogMessageParams, MessageType}; -use nargo::prepare_package; +use nargo::{insert_all_files_for_workspace_into_file_manager, prepare_package}; use nargo_toml::{find_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{check_crate, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{check_crate, file_manager_with_stdlib, NOIR_ARTIFACT_VERSION_STRING}; use crate::{ get_package_tests_in_crate, @@ -50,10 +50,13 @@ fn on_tests_request_inner( ResponseError::new(ErrorCode::REQUEST_FAILED, err) })?; + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let package_tests: Vec<_> = workspace .into_iter() .filter_map(|package| { - let (mut context, crate_id) = prepare_package(package); + let (mut context, crate_id) = prepare_package(&workspace_file_manager, package); // We ignore the warnings and errors produced by compilation for producing tests // because we can still get the test functions even if compilation fails let _ = check_crate(&mut context, crate_id, false, false); diff --git a/noir/tooling/lsp/src/types.rs b/noir/tooling/lsp/src/types.rs index 48c412eb5ad..b2960964e7c 100644 --- a/noir/tooling/lsp/src/types.rs +++ b/noir/tooling/lsp/src/types.rs @@ -9,10 +9,10 @@ use std::collections::{BTreeMap, HashMap}; // Re-providing lsp_types that we don't need to override pub(crate) use lsp_types::{ - Diagnostic, DiagnosticSeverity, DidChangeConfigurationParams, DidChangeTextDocumentParams, - DidCloseTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, - InitializeParams, InitializedParams, LogMessageParams, MessageType, Position, - PublishDiagnosticsParams, Range, ServerInfo, TextDocumentSyncCapability, Url, + CodeLens, CodeLensOptions, CodeLensParams, Command, Diagnostic, DiagnosticSeverity, + DidChangeConfigurationParams, DidChangeTextDocumentParams, DidCloseTextDocumentParams, + DidOpenTextDocumentParams, DidSaveTextDocumentParams, InitializeParams, InitializedParams, + Position, PublishDiagnosticsParams, Range, ServerInfo, TextDocumentSyncCapability, Url, }; pub(crate) mod request { @@ -24,7 +24,9 @@ pub(crate) mod request { }; // Re-providing lsp_types that we don't need to override - pub(crate) use lsp_types::request::{Formatting, GotoDefinition, Shutdown}; + pub(crate) use lsp_types::request::{ + CodeLensRequest as CodeLens, Formatting, GotoDefinition, Shutdown, + }; #[derive(Debug)] pub(crate) struct Initialize; @@ -112,6 +114,10 @@ pub(crate) struct ServerCapabilities { #[serde(skip_serializing_if = "Option::is_none")] pub(crate) definition_provider: Option>, + /// The server provides code lens. + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) code_lens_provider: Option, + /// The server provides document formatting. pub(crate) document_formatting_provider: bool, @@ -214,4 +220,5 @@ pub(crate) struct NargoProfileRunResult { pub(crate) opcodes_counts: HashMap, } +pub(crate) type CodeLensResult = Option>; pub(crate) type GotoDefinitionResult = Option; diff --git a/noir/tooling/nargo/Cargo.toml b/noir/tooling/nargo/Cargo.toml index f0733d7ad44..cd97980b9e0 100644 --- a/noir/tooling/nargo/Cargo.toml +++ b/noir/tooling/nargo/Cargo.toml @@ -24,10 +24,16 @@ iter-extended.workspace = true serde.workspace = true thiserror.workspace = true codespan-reporting.workspace = true -log.workspace = true +tracing.workspace = true rayon = "1.8.0" +jsonrpc.workspace = true [dev-dependencies] # TODO: This dependency is used to generate unit tests for `get_all_paths_in_dir` # TODO: once that method is moved to nargo_cli, we can move this dependency to nargo_cli -tempfile.workspace = true \ No newline at end of file +tempfile.workspace = true +jsonrpc-http-server = "18.0" +jsonrpc-core-client = "18.0" +jsonrpc-derive = "18.0" +jsonrpc-core = "18.0" +serial_test = "2.0" diff --git a/noir/tooling/nargo/src/artifacts/debug.rs b/noir/tooling/nargo/src/artifacts/debug.rs index 324c476d13d..633fc7a8ded 100644 --- a/noir/tooling/nargo/src/artifacts/debug.rs +++ b/noir/tooling/nargo/src/artifacts/debug.rs @@ -59,6 +59,12 @@ impl DebugArtifact { self.line_index(location.file, location_start) } + /// Given a location, returns the index of the line it ends at + pub fn location_end_line_index(&self, location: Location) -> Result { + let location_end = location.span.end() as usize; + self.line_index(location.file, location_end) + } + /// Given a location, returns the line number it starts at pub fn location_line_number(&self, location: Location) -> Result { let location_start = location.span.start() as usize; @@ -82,12 +88,28 @@ impl DebugArtifact { let line_index = self.line_index(location.file, location_start)?; let line_span = self.line_range(location.file, line_index)?; + let line_length = line_span.end - (line_span.start + 1); let start_in_line = location_start - line_span.start; + + // The location might continue beyond the line, + // so we need a bounds check let end_in_line = location_end - line_span.start; + let end_in_line = std::cmp::min(end_in_line, line_length); Ok(Range { start: start_in_line, end: end_in_line }) } + /// Given a location, returns a Span relative to its last line's + /// position in the file. This is useful when processing a file's + /// contents on a per-line-basis. + pub fn location_in_end_line(&self, location: Location) -> Result, Error> { + let end_line_index = self.location_end_line_index(location)?; + let line_span = self.line_range(location.file, end_line_index)?; + let location_end = location.span.end() as usize; + let end_in_line = location_end - line_span.start; + Ok(Range { start: 0, end: end_in_line }) + } + /// Given a location, returns the last line index /// of its file pub fn last_line_index(&self, location: Location) -> Result { @@ -149,3 +171,70 @@ impl<'a> Files<'a> for DebugArtifact { }) } } + +#[cfg(test)] +mod tests { + use crate::artifacts::debug::DebugArtifact; + use acvm::acir::circuit::OpcodeLocation; + use fm::FileManager; + use noirc_errors::{debug_info::DebugInfo, Location, Span}; + use std::collections::BTreeMap; + use std::ops::Range; + use std::path::Path; + use std::path::PathBuf; + use tempfile::{tempdir, TempDir}; + + // Returns the absolute path to the file + fn create_dummy_file(dir: &TempDir, file_name: &Path) -> PathBuf { + let file_path = dir.path().join(file_name); + let _file = std::fs::File::create(&file_path).unwrap(); + file_path + } + + // Tests that location_in_line correctly handles + // locations spanning multiple lines. + // For example, given the snippet: + // ``` + // permute( + // consts::x5_2_config(), + // state); + // ``` + // We want location_in_line to return the range + // containing `permute(` + #[test] + fn location_in_line_stops_at_end_of_line() { + let source_code = r##"pub fn main(mut state: [Field; 2]) -> [Field; 2] { + state = permute( + consts::x5_2_config(), + state); + + state +}"##; + + let dir = tempdir().unwrap(); + let file_name = Path::new("main.nr"); + create_dummy_file(&dir, file_name); + + let mut fm = FileManager::new(dir.path()); + let file_id = fm.add_file_with_source(file_name, source_code.to_string()).unwrap(); + + // Location of + // ``` + // permute( + // consts::x5_2_config(), + // state) + // ``` + let loc = Location::new(Span::inclusive(63, 116), file_id); + + // We don't care about opcodes in this context, + // we just use a dummy to construct debug_symbols + let mut opcode_locations = BTreeMap::>::new(); + opcode_locations.insert(OpcodeLocation::Acir(42), vec![loc]); + + let debug_symbols = vec![DebugInfo::new(opcode_locations)]; + let debug_artifact = DebugArtifact::new(debug_symbols, &fm); + + let location_in_line = debug_artifact.location_in_line(loc).expect("Expected a range"); + assert_eq!(location_in_line, Range { start: 12, end: 20 }); + } +} diff --git a/noir/tooling/nargo/src/lib.rs b/noir/tooling/nargo/src/lib.rs index f0c7277060f..db54fd3d574 100644 --- a/noir/tooling/nargo/src/lib.rs +++ b/noir/tooling/nargo/src/lib.rs @@ -42,13 +42,21 @@ pub fn prepare_dependencies( } } +pub fn insert_all_files_for_workspace_into_file_manager( + workspace: &workspace::Workspace, + file_manager: &mut FileManager, +) { + for package in workspace.clone().into_iter() { + insert_all_files_for_package_into_file_manager(package, file_manager); + } +} // We will pre-populate the file manager with all the files in the package // This is so that we can avoid having to read from disk when we are compiling // // This does not require parsing because we are interested in the files under the src directory // it may turn out that we do not need to include some Noir files that we add to the file // manager -pub fn insert_all_files_for_package_into_file_manager( +fn insert_all_files_for_package_into_file_manager( package: &Package, file_manager: &mut FileManager, ) { @@ -87,11 +95,11 @@ fn insert_all_files_for_packages_dependencies_into_file_manager( } } -pub fn prepare_package(package: &Package) -> (Context, CrateId) { - let mut fm = FileManager::new(&package.root_dir); - insert_all_files_for_package_into_file_manager(package, &mut fm); - - let mut context = Context::new(fm); +pub fn prepare_package<'file_manager>( + file_manager: &'file_manager FileManager, + package: &Package, +) -> (Context<'file_manager>, CrateId) { + let mut context = Context::from_ref_file_manager(file_manager); let crate_id = prepare_crate(&mut context, &package.entry_path); diff --git a/noir/tooling/nargo/src/ops/compile.rs b/noir/tooling/nargo/src/ops/compile.rs index 1a9e0a6c115..bd395d03f67 100644 --- a/noir/tooling/nargo/src/ops/compile.rs +++ b/noir/tooling/nargo/src/ops/compile.rs @@ -14,6 +14,7 @@ use rayon::prelude::*; /// /// This function will return an error if there are any compilations errors reported. pub fn compile_workspace( + file_manager: &FileManager, workspace: &Workspace, binary_packages: &[Package], contract_packages: &[Package], @@ -21,23 +22,24 @@ pub fn compile_workspace( compile_options: &CompileOptions, ) -> Result<(Vec, Vec), CompileError> { // Compile all of the packages in parallel. - let program_results: Vec<(FileManager, CompilationResult)> = binary_packages + let program_results: Vec> = binary_packages + .par_iter() + .map(|package| { + compile_program(file_manager, workspace, package, compile_options, expression_width) + }) + .collect(); + let contract_results: Vec> = contract_packages .par_iter() - .map(|package| compile_program(workspace, package, compile_options, expression_width)) + .map(|package| compile_contract(file_manager, package, compile_options, expression_width)) .collect(); - let contract_results: Vec<(FileManager, CompilationResult)> = - contract_packages - .par_iter() - .map(|package| compile_contract(package, compile_options, expression_width)) - .collect(); // Report any warnings/errors which were encountered during compilation. let compiled_programs: Vec = program_results .into_iter() - .map(|(file_manager, compilation_result)| { + .map(|compilation_result| { report_errors( compilation_result, - &file_manager, + file_manager, compile_options.deny_warnings, compile_options.silence_warnings, ) @@ -45,10 +47,10 @@ pub fn compile_workspace( .collect::>()?; let compiled_contracts: Vec = contract_results .into_iter() - .map(|(file_manager, compilation_result)| { + .map(|compilation_result| { report_errors( compilation_result, - &file_manager, + file_manager, compile_options.deny_warnings, compile_options.silence_warnings, ) @@ -59,12 +61,13 @@ pub fn compile_workspace( } pub fn compile_program( + file_manager: &FileManager, workspace: &Workspace, package: &Package, compile_options: &CompileOptions, expression_width: ExpressionWidth, -) -> (FileManager, CompilationResult) { - let (mut context, crate_id) = prepare_package(package); +) -> CompilationResult { + let (mut context, crate_id) = prepare_package(file_manager, package); let program_artifact_path = workspace.package_build_path(package); let mut debug_artifact_path = program_artifact_path.clone(); @@ -74,33 +77,34 @@ pub fn compile_program( match noirc_driver::compile_main(&mut context, crate_id, compile_options, None, true) { Ok(program_and_warnings) => program_and_warnings, Err(errors) => { - return (context.file_manager, Err(errors)); + return Err(errors); } }; // Apply backend specific optimizations. let optimized_program = crate::ops::optimize_program(program, expression_width); - (context.file_manager, Ok((optimized_program, warnings))) + Ok((optimized_program, warnings)) } fn compile_contract( + file_manager: &FileManager, package: &Package, compile_options: &CompileOptions, expression_width: ExpressionWidth, -) -> (FileManager, CompilationResult) { - let (mut context, crate_id) = prepare_package(package); +) -> CompilationResult { + let (mut context, crate_id) = prepare_package(file_manager, package); let (contract, warnings) = match noirc_driver::compile_contract(&mut context, crate_id, compile_options) { Ok(contracts_and_warnings) => contracts_and_warnings, Err(errors) => { - return (context.file_manager, Err(errors)); + return Err(errors); } }; let optimized_contract = crate::ops::optimize_contract(contract, expression_width); - (context.file_manager, Ok((optimized_contract, warnings))) + Ok((optimized_contract, warnings)) } pub(crate) fn report_errors( diff --git a/noir/tooling/nargo/src/ops/execute.rs b/noir/tooling/nargo/src/ops/execute.rs index 2ac85781410..4fc7f7b599f 100644 --- a/noir/tooling/nargo/src/ops/execute.rs +++ b/noir/tooling/nargo/src/ops/execute.rs @@ -7,13 +7,13 @@ use crate::NargoError; use super::foreign_calls::ForeignCallExecutor; +#[tracing::instrument(level = "trace", skip_all)] pub fn execute_circuit( circuit: &Circuit, initial_witness: WitnessMap, blackbox_solver: &B, foreign_call_executor: &mut F, ) -> Result { - log::trace!("Start circuit execution"); let mut acvm = ACVM::new(blackbox_solver, &circuit.opcodes, initial_witness); loop { @@ -55,8 +55,5 @@ pub fn execute_circuit( } } - let solved_witness = acvm.finalize(); - - log::trace!("Finish circuit execution"); - Ok(solved_witness) + Ok(acvm.finalize()) } diff --git a/noir/tooling/nargo/src/ops/foreign_calls.rs b/noir/tooling/nargo/src/ops/foreign_calls.rs index bc1e19cdcf4..cbe40c92b4e 100644 --- a/noir/tooling/nargo/src/ops/foreign_calls.rs +++ b/noir/tooling/nargo/src/ops/foreign_calls.rs @@ -2,6 +2,7 @@ use acvm::{ acir::brillig::{ForeignCallParam, ForeignCallResult, Value}, pwg::ForeignCallWaitInfo, }; +use jsonrpc::{arg as build_json_rpc_arg, minreq_http::Builder, Client}; use noirc_printable_type::{decode_string_value, ForeignCallError, PrintableValueDisplay}; pub trait ForeignCallExecutor { @@ -94,11 +95,22 @@ pub struct DefaultForeignCallExecutor { mocked_responses: Vec, /// Whether to print [`ForeignCall::Print`] output. show_output: bool, + /// JSON RPC client to resolve foreign calls + external_resolver: Option, } impl DefaultForeignCallExecutor { - pub fn new(show_output: bool) -> Self { - DefaultForeignCallExecutor { show_output, ..DefaultForeignCallExecutor::default() } + pub fn new(show_output: bool, resolver_url: Option<&str>) -> Self { + let oracle_resolver = resolver_url.map(|resolver_url| { + let transport_builder = + Builder::new().url(resolver_url).expect("Invalid oracle resolver URL"); + Client::with_transport(transport_builder.build()) + }); + DefaultForeignCallExecutor { + show_output, + external_resolver: oracle_resolver, + ..DefaultForeignCallExecutor::default() + } } } @@ -190,27 +202,136 @@ impl ForeignCallExecutor for DefaultForeignCallExecutor { Ok(ForeignCallResult { values: vec![] }) } None => { - let response_position = self + let mock_response_position = self .mocked_responses .iter() - .position(|response| response.matches(foreign_call_name, &foreign_call.inputs)) - .unwrap_or_else(|| panic!("Unknown foreign call {}", foreign_call_name)); + .position(|response| response.matches(foreign_call_name, &foreign_call.inputs)); - let mock = self - .mocked_responses - .get_mut(response_position) - .expect("Invalid position of mocked response"); - let result = mock.result.values.clone(); - - if let Some(times_left) = &mut mock.times_left { - *times_left -= 1; - if *times_left == 0 { - self.mocked_responses.remove(response_position); + match (mock_response_position, &self.external_resolver) { + (Some(response_position), _) => { + let mock = self + .mocked_responses + .get_mut(response_position) + .expect("Invalid position of mocked response"); + let result = mock.result.values.clone(); + + if let Some(times_left) = &mut mock.times_left { + *times_left -= 1; + if *times_left == 0 { + self.mocked_responses.remove(response_position); + } + } + + Ok(ForeignCallResult { values: result }) + } + (None, Some(external_resolver)) => { + let encoded_params: Vec<_> = + foreign_call.inputs.iter().map(build_json_rpc_arg).collect(); + + let req = + external_resolver.build_request(foreign_call_name, &encoded_params); + + let response = external_resolver.send_request(req)?; + + let parsed_response: ForeignCallResult = response.result()?; + + Ok(parsed_response) } + (None, None) => panic!("Unknown foreign call {}", foreign_call_name), } + } + } + } +} + +#[cfg(test)] +mod tests { + use acvm::{ + acir::brillig::ForeignCallParam, + brillig_vm::brillig::{ForeignCallResult, Value}, + pwg::ForeignCallWaitInfo, + FieldElement, + }; + use jsonrpc_core::Result as RpcResult; + use jsonrpc_derive::rpc; + use jsonrpc_http_server::{Server, ServerBuilder}; + use serial_test::serial; + + use crate::ops::{DefaultForeignCallExecutor, ForeignCallExecutor}; - Ok(ForeignCallResult { values: result }) + #[allow(unreachable_pub)] + #[rpc] + pub trait OracleResolver { + #[rpc(name = "echo")] + fn echo(&self, param: ForeignCallParam) -> RpcResult; + + #[rpc(name = "sum")] + fn sum(&self, array: ForeignCallParam) -> RpcResult; + } + + struct OracleResolverImpl; + + impl OracleResolver for OracleResolverImpl { + fn echo(&self, param: ForeignCallParam) -> RpcResult { + Ok(vec![param].into()) + } + + fn sum(&self, array: ForeignCallParam) -> RpcResult { + let mut res: FieldElement = 0_usize.into(); + + for value in array.values() { + res += value.to_field(); } + + Ok(Value::from(res).into()) } } + + fn build_oracle_server() -> (Server, String) { + let mut io = jsonrpc_core::IoHandler::new(); + io.extend_with(OracleResolverImpl.to_delegate()); + + let server = ServerBuilder::new(io) + .start_http(&"127.0.0.1:5555".parse().expect("Invalid address")) + .expect("Could not start server"); + + let url = format!("http://{}", server.address()); + (server, url) + } + + #[serial] + #[test] + fn test_oracle_resolver_echo() { + let (server, url) = build_oracle_server(); + + let mut executor = DefaultForeignCallExecutor::new(false, Some(&url)); + + let foreign_call = ForeignCallWaitInfo { + function: "echo".to_string(), + inputs: vec![ForeignCallParam::Single(1_u128.into())], + }; + + let result = executor.execute(&foreign_call); + assert_eq!(result.unwrap(), ForeignCallResult { values: foreign_call.inputs }); + + server.close(); + } + + #[serial] + #[test] + fn test_oracle_resolver_sum() { + let (server, url) = build_oracle_server(); + + let mut executor = DefaultForeignCallExecutor::new(false, Some(&url)); + + let foreign_call = ForeignCallWaitInfo { + function: "sum".to_string(), + inputs: vec![ForeignCallParam::Array(vec![1_usize.into(), 2_usize.into()])], + }; + + let result = executor.execute(&foreign_call); + assert_eq!(result.unwrap(), Value::from(3_usize).into()); + + server.close(); + } } diff --git a/noir/tooling/nargo/src/ops/test.rs b/noir/tooling/nargo/src/ops/test.rs index 5bfdd6d15d0..f38dcad0c2f 100644 --- a/noir/tooling/nargo/src/ops/test.rs +++ b/noir/tooling/nargo/src/ops/test.rs @@ -19,6 +19,7 @@ pub fn run_test( context: &Context, test_function: TestFunction, show_output: bool, + foreign_call_resolver_url: Option<&str>, config: &CompileOptions, ) -> TestStatus { let program = compile_no_check(context, config, test_function.get_id(), None, false); @@ -30,7 +31,7 @@ pub fn run_test( &program.circuit, WitnessMap::new(), blackbox_solver, - &mut DefaultForeignCallExecutor::new(show_output), + &mut DefaultForeignCallExecutor::new(show_output, foreign_call_resolver_url), ); test_status_program_compile_pass(test_function, program.debug, circuit_execution) } diff --git a/noir/tooling/nargo_cli/Cargo.toml b/noir/tooling/nargo_cli/Cargo.toml index 2f99fefb778..f280682e15c 100644 --- a/noir/tooling/nargo_cli/Cargo.toml +++ b/noir/tooling/nargo_cli/Cargo.toml @@ -32,7 +32,7 @@ noirc_frontend.workspace = true noirc_abi.workspace = true noirc_errors.workspace = true acvm.workspace = true -barretenberg_blackbox_solver.workspace = true +bn254_blackbox_solver.workspace = true toml.workspace = true serde.workspace = true serde_json.workspace = true @@ -46,16 +46,15 @@ hex.workspace = true similar-asserts.workspace = true termcolor = "1.1.2" color-eyre = "0.6.2" -env_logger = "0.9.0" tokio = { version = "1.0", features = ["io-std"] } +dap.workspace = true # Backends backend-interface = { path = "../backend_interface" } bb_abstraction_leaks.workspace = true # Logs -tracing.workspace = true -tracing-subscriber = "0.3.18" +tracing-subscriber.workspace = true tracing-appender = "0.2.3" [target.'cfg(not(unix))'.dependencies] diff --git a/noir/tooling/nargo_cli/src/cli/check_cmd.rs b/noir/tooling/nargo_cli/src/cli/check_cmd.rs index 0ea8186a237..e2db492fe9c 100644 --- a/noir/tooling/nargo_cli/src/cli/check_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/check_cmd.rs @@ -2,12 +2,17 @@ use crate::backends::Backend; use crate::errors::CliError; use clap::Args; +use fm::FileManager; use iter_extended::btree_map; -use nargo::{errors::CompileError, package::Package, prepare_package}; +use nargo::{ + errors::CompileError, insert_all_files_for_workspace_into_file_manager, package::Package, + prepare_package, +}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::{AbiParameter, AbiType, MAIN_RETURN_NAME}; use noirc_driver::{ - check_crate, compute_function_abi, CompileOptions, NOIR_ARTIFACT_VERSION_STRING, + check_crate, compute_function_abi, file_manager_with_stdlib, CompileOptions, + NOIR_ARTIFACT_VERSION_STRING, }; use noirc_frontend::{ graph::{CrateId, CrateName}, @@ -47,15 +52,22 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + for package in &workspace { - check_package(package, &args.compile_options)?; + check_package(&workspace_file_manager, package, &args.compile_options)?; println!("[{}] Constraint system successfully built!", package.name); } Ok(()) } -fn check_package(package: &Package, compile_options: &CompileOptions) -> Result<(), CompileError> { - let (mut context, crate_id) = prepare_package(package); +fn check_package( + file_manager: &FileManager, + package: &Package, + compile_options: &CompileOptions, +) -> Result<(), CompileError> { + let (mut context, crate_id) = prepare_package(file_manager, package); check_crate_and_report_errors( &mut context, crate_id, diff --git a/noir/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs b/noir/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs index b72ce01e1a9..fe79c0b8c23 100644 --- a/noir/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs @@ -9,10 +9,12 @@ use crate::errors::CliError; use acvm::ExpressionWidth; use bb_abstraction_leaks::ACVM_BACKEND_BARRETENBERG; use clap::Args; +use fm::FileManager; +use nargo::insert_all_files_for_workspace_into_file_manager; use nargo::package::Package; use nargo::workspace::Workspace; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{file_manager_with_stdlib, CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::graph::CrateName; /// Generates a Solidity verifier smart contract for the program @@ -45,9 +47,13 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let expression_width = backend.get_backend_info()?; for package in &workspace { let smart_contract_string = smart_contract_for_package( + &workspace_file_manager, &workspace, backend, package, @@ -67,13 +73,15 @@ pub(crate) fn run( } fn smart_contract_for_package( + file_manager: &FileManager, workspace: &Workspace, backend: &Backend, package: &Package, compile_options: &CompileOptions, expression_width: ExpressionWidth, ) -> Result { - let program = compile_bin_package(workspace, package, compile_options, expression_width)?; + let program = + compile_bin_package(file_manager, workspace, package, compile_options, expression_width)?; let mut smart_contract_string = backend.eth_contract(&program.circuit)?; diff --git a/noir/tooling/nargo_cli/src/cli/compile_cmd.rs b/noir/tooling/nargo_cli/src/cli/compile_cmd.rs index 5ee053c5088..661081778c3 100644 --- a/noir/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -8,10 +8,12 @@ use nargo::artifacts::contract::PreprocessedContractFunction; use nargo::artifacts::debug::DebugArtifact; use nargo::artifacts::program::PreprocessedProgram; use nargo::errors::CompileError; +use nargo::insert_all_files_for_workspace_into_file_manager; use nargo::package::Package; use nargo::prepare_package; use nargo::workspace::Workspace; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; +use noirc_driver::file_manager_with_stdlib; use noirc_driver::NOIR_ARTIFACT_VERSION_STRING; use noirc_driver::{CompilationResult, CompileOptions, CompiledContract, CompiledProgram}; use noirc_frontend::graph::CrateName; @@ -61,6 +63,9 @@ pub(crate) fn run( )?; let circuit_dir = workspace.target_directory_path(); + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let (binary_packages, contract_packages): (Vec<_>, Vec<_>) = workspace .into_iter() .filter(|package| !package.is_library()) @@ -69,6 +74,7 @@ pub(crate) fn run( let expression_width = backend.get_backend_info_or_default(); let (_, compiled_contracts) = compile_workspace( + &workspace_file_manager, &workspace, &binary_packages, &contract_packages, @@ -85,6 +91,7 @@ pub(crate) fn run( } pub(super) fn compile_workspace( + file_manager: &FileManager, workspace: &Workspace, binary_packages: &[Package], contract_packages: &[Package], @@ -92,23 +99,24 @@ pub(super) fn compile_workspace( compile_options: &CompileOptions, ) -> Result<(Vec, Vec), CliError> { // Compile all of the packages in parallel. - let program_results: Vec<(FileManager, CompilationResult)> = binary_packages + let program_results: Vec> = binary_packages + .par_iter() + .map(|package| { + compile_program(file_manager, workspace, package, compile_options, expression_width) + }) + .collect(); + let contract_results: Vec> = contract_packages .par_iter() - .map(|package| compile_program(workspace, package, compile_options, expression_width)) + .map(|package| compile_contract(file_manager, package, compile_options, expression_width)) .collect(); - let contract_results: Vec<(FileManager, CompilationResult)> = - contract_packages - .par_iter() - .map(|package| compile_contract(package, compile_options, expression_width)) - .collect(); // Report any warnings/errors which were encountered during compilation. let compiled_programs: Vec = program_results .into_iter() - .map(|(file_manager, compilation_result)| { + .map(|compilation_result| { report_errors( compilation_result, - &file_manager, + file_manager, compile_options.deny_warnings, compile_options.silence_warnings, ) @@ -116,10 +124,10 @@ pub(super) fn compile_workspace( .collect::>()?; let compiled_contracts: Vec = contract_results .into_iter() - .map(|(file_manager, compilation_result)| { + .map(|compilation_result| { report_errors( compilation_result, - &file_manager, + file_manager, compile_options.deny_warnings, compile_options.silence_warnings, ) @@ -130,6 +138,7 @@ pub(super) fn compile_workspace( } pub(crate) fn compile_bin_package( + file_manager: &FileManager, workspace: &Workspace, package: &Package, compile_options: &CompileOptions, @@ -139,12 +148,12 @@ pub(crate) fn compile_bin_package( return Err(CompileError::LibraryCrate(package.name.clone()).into()); } - let (file_manager, compilation_result) = - compile_program(workspace, package, compile_options, expression_width); + let compilation_result = + compile_program(file_manager, workspace, package, compile_options, expression_width); let program = report_errors( compilation_result, - &file_manager, + file_manager, compile_options.deny_warnings, compile_options.silence_warnings, )?; @@ -153,12 +162,13 @@ pub(crate) fn compile_bin_package( } fn compile_program( + file_manager: &FileManager, workspace: &Workspace, package: &Package, compile_options: &CompileOptions, expression_width: ExpressionWidth, -) -> (FileManager, CompilationResult) { - let (mut context, crate_id) = prepare_package(package); +) -> CompilationResult { + let (mut context, crate_id) = prepare_package(file_manager, package); let program_artifact_path = workspace.package_build_path(package); let mut debug_artifact_path = program_artifact_path.clone(); @@ -191,7 +201,7 @@ fn compile_program( ) { Ok(program_and_warnings) => program_and_warnings, Err(errors) => { - return (context.file_manager, Err(errors)); + return Err(errors); } }; @@ -200,26 +210,27 @@ fn compile_program( let only_acir = compile_options.only_acir; save_program(optimized_program.clone(), package, &workspace.target_directory_path(), only_acir); - (context.file_manager, Ok((optimized_program, warnings))) + Ok((optimized_program, warnings)) } fn compile_contract( + file_manager: &FileManager, package: &Package, compile_options: &CompileOptions, expression_width: ExpressionWidth, -) -> (FileManager, CompilationResult) { - let (mut context, crate_id) = prepare_package(package); +) -> CompilationResult { + let (mut context, crate_id) = prepare_package(file_manager, package); let (contract, warnings) = match noirc_driver::compile_contract(&mut context, crate_id, compile_options) { Ok(contracts_and_warnings) => contracts_and_warnings, Err(errors) => { - return (context.file_manager, Err(errors)); + return Err(errors); } }; let optimized_contract = nargo::ops::optimize_contract(contract, expression_width); - (context.file_manager, Ok((optimized_contract, warnings))) + Ok((optimized_contract, warnings)) } fn save_program( diff --git a/noir/tooling/nargo_cli/src/cli/dap_cmd.rs b/noir/tooling/nargo_cli/src/cli/dap_cmd.rs new file mode 100644 index 00000000000..29e696ea608 --- /dev/null +++ b/noir/tooling/nargo_cli/src/cli/dap_cmd.rs @@ -0,0 +1,180 @@ +use acvm::acir::native_types::WitnessMap; +use backend_interface::Backend; +use clap::Args; +use nargo::constants::PROVER_INPUT_FILE; +use nargo::insert_all_files_for_workspace_into_file_manager; +use nargo::workspace::Workspace; +use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; +use noirc_abi::input_parser::Format; +use noirc_driver::{ + file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, +}; +use noirc_frontend::graph::CrateName; + +use std::io::{BufReader, BufWriter, Read, Write}; +use std::path::Path; + +use dap::errors::ServerError; +use dap::requests::Command; +use dap::responses::ResponseBody; +use dap::server::Server; +use dap::types::Capabilities; +use serde_json::Value; + +use super::compile_cmd::compile_bin_package; +use super::fs::inputs::read_inputs_from_file; +use crate::errors::CliError; + +use super::NargoConfig; + +#[derive(Debug, Clone, Args)] +pub(crate) struct DapCommand; + +struct LoadError(&'static str); + +fn find_workspace(project_folder: &str, package: Option<&str>) -> Option { + let Ok(toml_path) = get_package_manifest(Path::new(project_folder)) else { + eprintln!("ERROR: Failed to get package manifest"); + return None; + }; + let package = package.and_then(|p| serde_json::from_str::(p).ok()); + let selection = package.map_or(PackageSelection::DefaultOrAll, PackageSelection::Selected); + match resolve_workspace_from_toml( + &toml_path, + selection, + Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), + ) { + Ok(workspace) => Some(workspace), + Err(err) => { + eprintln!("ERROR: Failed to resolve workspace: {err}"); + None + } + } +} + +fn load_and_compile_project( + backend: &Backend, + project_folder: &str, + package: Option<&str>, + prover_name: &str, +) -> Result<(CompiledProgram, WitnessMap), LoadError> { + let workspace = + find_workspace(project_folder, package).ok_or(LoadError("Cannot open workspace"))?; + + let expression_width = + backend.get_backend_info().map_err(|_| LoadError("Failed to get backend info"))?; + let package = workspace + .into_iter() + .find(|p| p.is_binary()) + .ok_or(LoadError("No matching binary packages found in workspace"))?; + + let mut workspace_file_manager = file_manager_with_stdlib(std::path::Path::new("")); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + + let compiled_program = compile_bin_package( + &workspace_file_manager, + &workspace, + package, + &CompileOptions::default(), + expression_width, + ) + .map_err(|_| LoadError("Failed to compile project"))?; + + let (inputs_map, _) = + read_inputs_from_file(&package.root_dir, prover_name, Format::Toml, &compiled_program.abi) + .map_err(|_| LoadError("Failed to read program inputs"))?; + let initial_witness = compiled_program + .abi + .encode(&inputs_map, None) + .map_err(|_| LoadError("Failed to encode inputs"))?; + + Ok((compiled_program, initial_witness)) +} + +fn loop_uninitialized_dap( + mut server: Server, + backend: &Backend, +) -> Result<(), ServerError> { + loop { + let req = match server.poll_request()? { + Some(req) => req, + None => break, + }; + + match req.command { + Command::Initialize(_) => { + let rsp = req.success(ResponseBody::Initialize(Capabilities { + supports_disassemble_request: Some(true), + supports_instruction_breakpoints: Some(true), + supports_stepping_granularity: Some(true), + ..Default::default() + })); + server.respond(rsp)?; + } + + Command::Launch(ref arguments) => { + let Some(Value::Object(ref additional_data)) = arguments.additional_data else { + server.respond(req.error("Missing launch arguments"))?; + continue; + }; + let Some(Value::String(ref project_folder)) = additional_data.get("projectFolder") else { + server.respond(req.error("Missing project folder argument"))?; + continue; + }; + + let project_folder = project_folder.as_str(); + let package = additional_data.get("package").and_then(|v| v.as_str()); + let prover_name = additional_data + .get("proverName") + .and_then(|v| v.as_str()) + .unwrap_or(PROVER_INPUT_FILE); + + eprintln!("Project folder: {}", project_folder); + eprintln!("Package: {}", package.unwrap_or("(default)")); + eprintln!("Prover name: {}", prover_name); + + match load_and_compile_project(backend, project_folder, package, prover_name) { + Ok((compiled_program, initial_witness)) => { + server.respond(req.ack()?)?; + + let blackbox_solver = bn254_blackbox_solver::Bn254BlackBoxSolver::new(); + + noir_debugger::run_dap_loop( + server, + &blackbox_solver, + compiled_program, + initial_witness, + )?; + break; + } + Err(LoadError(message)) => { + server.respond(req.error(message))?; + } + } + } + + Command::Disconnect(_) => { + server.respond(req.ack()?)?; + break; + } + + _ => { + let command = req.command; + eprintln!("ERROR: unhandled command: {command:?}"); + } + } + } + Ok(()) +} + +pub(crate) fn run( + backend: &Backend, + _args: DapCommand, + _config: NargoConfig, +) -> Result<(), CliError> { + let output = BufWriter::new(std::io::stdout()); + let input = BufReader::new(std::io::stdin()); + let server = Server::new(input, output); + + loop_uninitialized_dap(server, backend).map_err(CliError::DapError) +} diff --git a/noir/tooling/nargo_cli/src/cli/debug_cmd.rs b/noir/tooling/nargo_cli/src/cli/debug_cmd.rs index 6eab626a08d..f78a683aa8f 100644 --- a/noir/tooling/nargo_cli/src/cli/debug_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/debug_cmd.rs @@ -1,15 +1,19 @@ use std::path::PathBuf; use acvm::acir::native_types::WitnessMap; +use bn254_blackbox_solver::Bn254BlackBoxSolver; use clap::Args; use nargo::artifacts::debug::DebugArtifact; use nargo::constants::PROVER_INPUT_FILE; +use nargo::insert_all_files_for_workspace_into_file_manager; use nargo::package::Package; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::{Format, InputValue}; use noirc_abi::InputMap; -use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{ + file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, +}; use noirc_frontend::graph::CrateName; use super::compile_cmd::compile_bin_package; @@ -51,6 +55,9 @@ pub(crate) fn run( let target_dir = &workspace.target_directory_path(); let expression_width = backend.get_backend_info()?; + let mut workspace_file_manager = file_manager_with_stdlib(std::path::Path::new("")); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let Some(package) = workspace.into_iter().find(|p| p.is_binary()) else { println!( "No matching binary packages found in workspace. Only binary packages can be debugged." @@ -58,8 +65,13 @@ pub(crate) fn run( return Ok(()); }; - let compiled_program = - compile_bin_package(&workspace, package, &args.compile_options, expression_width)?; + let compiled_program = compile_bin_package( + &workspace_file_manager, + &workspace, + package, + &args.compile_options, + expression_width, + )?; run_async(package, compiled_program, &args.prover_name, &args.witness_name, target_dir) } @@ -123,8 +135,7 @@ pub(crate) fn debug_program( compiled_program: &CompiledProgram, inputs_map: &InputMap, ) -> Result, CliError> { - #[allow(deprecated)] - let blackbox_solver = barretenberg_blackbox_solver::BarretenbergSolver::new(); + let blackbox_solver = Bn254BlackBoxSolver::new(); let initial_witness = compiled_program.abi.encode(inputs_map, None)?; diff --git a/noir/tooling/nargo_cli/src/cli/execute_cmd.rs b/noir/tooling/nargo_cli/src/cli/execute_cmd.rs index 10760f43a45..7f695c42fa4 100644 --- a/noir/tooling/nargo_cli/src/cli/execute_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/execute_cmd.rs @@ -1,15 +1,19 @@ use acvm::acir::native_types::WitnessMap; +use bn254_blackbox_solver::Bn254BlackBoxSolver; use clap::Args; use nargo::artifacts::debug::DebugArtifact; use nargo::constants::PROVER_INPUT_FILE; use nargo::errors::try_to_diagnose_runtime_error; +use nargo::insert_all_files_for_workspace_into_file_manager; use nargo::ops::DefaultForeignCallExecutor; use nargo::package::Package; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::{Format, InputValue}; use noirc_abi::InputMap; -use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{ + file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, +}; use noirc_frontend::graph::CrateName; use super::compile_cmd::compile_bin_package; @@ -38,6 +42,10 @@ pub(crate) struct ExecuteCommand { #[clap(flatten)] compile_options: CompileOptions, + + /// JSON RPC url to solve oracle calls + #[clap(long)] + oracle_resolver: Option, } pub(crate) fn run( @@ -56,13 +64,25 @@ pub(crate) fn run( )?; let target_dir = &workspace.target_directory_path(); + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let expression_width = backend.get_backend_info_or_default(); for package in &workspace { - let compiled_program = - compile_bin_package(&workspace, package, &args.compile_options, expression_width)?; - - let (return_value, solved_witness) = - execute_program_and_decode(compiled_program, package, &args.prover_name)?; + let compiled_program = compile_bin_package( + &workspace_file_manager, + &workspace, + package, + &args.compile_options, + expression_width, + )?; + + let (return_value, solved_witness) = execute_program_and_decode( + compiled_program, + package, + &args.prover_name, + args.oracle_resolver.as_deref(), + )?; println!("[{}] Circuit witness successfully solved", package.name); if let Some(return_value) = return_value { @@ -81,11 +101,12 @@ fn execute_program_and_decode( program: CompiledProgram, package: &Package, prover_name: &str, + foreign_call_resolver_url: Option<&str>, ) -> Result<(Option, WitnessMap), CliError> { // Parse the initial witness values from Prover.toml let (inputs_map, _) = read_inputs_from_file(&package.root_dir, prover_name, Format::Toml, &program.abi)?; - let solved_witness = execute_program(&program, &inputs_map)?; + let solved_witness = execute_program(&program, &inputs_map, foreign_call_resolver_url)?; let public_abi = program.abi.public_abi(); let (_, return_value) = public_abi.decode(&solved_witness)?; @@ -95,9 +116,9 @@ fn execute_program_and_decode( pub(crate) fn execute_program( compiled_program: &CompiledProgram, inputs_map: &InputMap, + foreign_call_resolver_url: Option<&str>, ) -> Result { - #[allow(deprecated)] - let blackbox_solver = barretenberg_blackbox_solver::BarretenbergSolver::new(); + let blackbox_solver = Bn254BlackBoxSolver::new(); let initial_witness = compiled_program.abi.encode(inputs_map, None)?; @@ -105,7 +126,7 @@ pub(crate) fn execute_program( &compiled_program.circuit, initial_witness, &blackbox_solver, - &mut DefaultForeignCallExecutor::new(true), + &mut DefaultForeignCallExecutor::new(true, foreign_call_resolver_url), ); match solved_witness_err { Ok(solved_witness) => Ok(solved_witness), diff --git a/noir/tooling/nargo_cli/src/cli/fmt_cmd.rs b/noir/tooling/nargo_cli/src/cli/fmt_cmd.rs index e62fc560217..78678559547 100644 --- a/noir/tooling/nargo_cli/src/cli/fmt_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/fmt_cmd.rs @@ -1,10 +1,9 @@ use std::{fs::DirEntry, path::Path}; use clap::Args; -use fm::FileManager; -use nargo::insert_all_files_for_package_into_file_manager; +use nargo::insert_all_files_for_workspace_into_file_manager; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::NOIR_ARTIFACT_VERSION_STRING; +use noirc_driver::{file_manager_with_stdlib, NOIR_ARTIFACT_VERSION_STRING}; use noirc_errors::CustomDiagnostic; use noirc_frontend::{hir::def_map::parse_file, parser::ParserError}; @@ -30,18 +29,18 @@ pub(crate) fn run(args: FormatCommand, config: NargoConfig) -> Result<(), CliErr Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let config = nargo_fmt::Config::read(&config.program_dir) .map_err(|err| CliError::Generic(err.to_string()))?; let mut check_exit_code_one = false; for package in &workspace { - let mut file_manager = FileManager::new(&package.root_dir); - insert_all_files_for_package_into_file_manager(package, &mut file_manager); - visit_noir_files(&package.root_dir.join("src"), &mut |entry| { - let file_id = file_manager.name_to_id(entry.path().to_path_buf()).expect("The file should exist since we added all files in the package into the file manager"); - let (parsed_module, errors) = parse_file(&file_manager, file_id); + let file_id = workspace_file_manager.name_to_id(entry.path().to_path_buf()).expect("The file should exist since we added all files in the package into the file manager"); + let (parsed_module, errors) = parse_file(&workspace_file_manager, file_id); let is_all_warnings = errors.iter().all(ParserError::is_warning); if !is_all_warnings { @@ -55,14 +54,14 @@ pub(crate) fn run(args: FormatCommand, config: NargoConfig) -> Result<(), CliErr let _ = super::compile_cmd::report_errors::<()>( Err(errors), - &file_manager, + &workspace_file_manager, false, false, ); return Ok(()); } - let original = file_manager.fetch_file(file_id); + let original = workspace_file_manager.fetch_file(file_id); let formatted = nargo_fmt::format(original, parsed_module, &config); if check_mode { diff --git a/noir/tooling/nargo_cli/src/cli/info_cmd.rs b/noir/tooling/nargo_cli/src/cli/info_cmd.rs index e25051c1df7..f983a19c0fd 100644 --- a/noir/tooling/nargo_cli/src/cli/info_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/info_cmd.rs @@ -4,10 +4,14 @@ use acvm::ExpressionWidth; use backend_interface::BackendError; use clap::Args; use iter_extended::vecmap; -use nargo::{artifacts::debug::DebugArtifact, package::Package}; +use nargo::{ + artifacts::debug::DebugArtifact, insert_all_files_for_workspace_into_file_manager, + package::Package, +}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_driver::{ - CompileOptions, CompiledContract, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, + file_manager_with_stdlib, CompileOptions, CompiledContract, CompiledProgram, + NOIR_ARTIFACT_VERSION_STRING, }; use noirc_errors::{debug_info::OpCodesCount, Location}; use noirc_frontend::graph::CrateName; @@ -61,6 +65,9 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let (binary_packages, contract_packages): (Vec<_>, Vec<_>) = workspace .into_iter() .filter(|package| !package.is_library()) @@ -69,6 +76,7 @@ pub(crate) fn run( let expression_width = backend.get_backend_info_or_default(); let (compiled_programs, compiled_contracts) = compile_workspace( + &workspace_file_manager, &workspace, &binary_packages, &contract_packages, diff --git a/noir/tooling/nargo_cli/src/cli/lsp_cmd.rs b/noir/tooling/nargo_cli/src/cli/lsp_cmd.rs index e1f0a9dd8b9..1428b8070c8 100644 --- a/noir/tooling/nargo_cli/src/cli/lsp_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/lsp_cmd.rs @@ -1,7 +1,8 @@ use async_lsp::{ - concurrency::ConcurrencyLayer, - panic::CatchUnwindLayer, server::LifecycleLayer, tracing::TracingLayer, + concurrency::ConcurrencyLayer, panic::CatchUnwindLayer, server::LifecycleLayer, + tracing::TracingLayer, }; +use bn254_blackbox_solver::Bn254BlackBoxSolver; use clap::Args; use noir_lsp::NargoLspService; use tower::ServiceBuilder; @@ -30,8 +31,7 @@ pub(crate) fn run( runtime.block_on(async { let (server, _) = async_lsp::MainLoop::new_server(|client| { - #[allow(deprecated)] - let blackbox_solver = barretenberg_blackbox_solver::BarretenbergSolver::new(); + let blackbox_solver = Bn254BlackBoxSolver::new(); let router = NargoLspService::new(&client, blackbox_solver); ServiceBuilder::new() diff --git a/noir/tooling/nargo_cli/src/cli/mod.rs b/noir/tooling/nargo_cli/src/cli/mod.rs index 448e28fb6a7..cbed65593a1 100644 --- a/noir/tooling/nargo_cli/src/cli/mod.rs +++ b/noir/tooling/nargo_cli/src/cli/mod.rs @@ -14,6 +14,7 @@ mod backend_cmd; mod check_cmd; mod codegen_verifier_cmd; mod compile_cmd; +mod dap_cmd; mod debug_cmd; mod execute_cmd; mod fmt_cmd; @@ -74,6 +75,8 @@ enum NargoCommand { Test(test_cmd::TestCommand), Info(info_cmd::InfoCommand), Lsp(lsp_cmd::LspCommand), + #[command(hide = true)] + Dap(dap_cmd::DapCommand), } pub(crate) fn start_cli() -> eyre::Result<()> { @@ -91,6 +94,7 @@ pub(crate) fn start_cli() -> eyre::Result<()> { | NargoCommand::Init(_) | NargoCommand::Lsp(_) | NargoCommand::Backend(_) + | NargoCommand::Dap(_) ) { config.program_dir = find_package_root(&config.program_dir)?; } @@ -112,6 +116,7 @@ pub(crate) fn start_cli() -> eyre::Result<()> { NargoCommand::CodegenVerifier(args) => codegen_verifier_cmd::run(&backend, args, config), NargoCommand::Backend(args) => backend_cmd::run(args), NargoCommand::Lsp(args) => lsp_cmd::run(&backend, args, config), + NargoCommand::Dap(args) => dap_cmd::run(&backend, args, config), NargoCommand::Fmt(args) => fmt_cmd::run(args, config), }?; diff --git a/noir/tooling/nargo_cli/src/cli/prove_cmd.rs b/noir/tooling/nargo_cli/src/cli/prove_cmd.rs index cb1751e7cef..167ab541bc5 100644 --- a/noir/tooling/nargo_cli/src/cli/prove_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/prove_cmd.rs @@ -1,10 +1,13 @@ use clap::Args; use nargo::constants::{PROVER_INPUT_FILE, VERIFIER_INPUT_FILE}; +use nargo::insert_all_files_for_workspace_into_file_manager; use nargo::package::Package; use nargo::workspace::Workspace; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::Format; -use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{ + file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, +}; use noirc_frontend::graph::CrateName; use super::compile_cmd::compile_bin_package; @@ -40,6 +43,10 @@ pub(crate) struct ProveCommand { #[clap(flatten)] compile_options: CompileOptions, + + /// JSON RPC url to solve oracle calls + #[clap(long)] + oracle_resolver: Option, } pub(crate) fn run( @@ -57,10 +64,18 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let expression_width = backend.get_backend_info()?; for package in &workspace { - let program = - compile_bin_package(&workspace, package, &args.compile_options, expression_width)?; + let program = compile_bin_package( + &workspace_file_manager, + &workspace, + package, + &args.compile_options, + expression_width, + )?; prove_package( backend, @@ -70,12 +85,14 @@ pub(crate) fn run( &args.prover_name, &args.verifier_name, args.verify, + args.oracle_resolver.as_deref(), )?; } Ok(()) } +#[allow(clippy::too_many_arguments)] pub(crate) fn prove_package( backend: &Backend, workspace: &Workspace, @@ -84,12 +101,14 @@ pub(crate) fn prove_package( prover_name: &str, verifier_name: &str, check_proof: bool, + foreign_call_resolver_url: Option<&str>, ) -> Result<(), CliError> { // Parse the initial witness values from Prover.toml let (inputs_map, _) = read_inputs_from_file(&package.root_dir, prover_name, Format::Toml, &compiled_program.abi)?; - let solved_witness = execute_program(&compiled_program, &inputs_map)?; + let solved_witness = + execute_program(&compiled_program, &inputs_map, foreign_call_resolver_url)?; // Write public inputs into Verifier.toml let public_abi = compiled_program.abi.public_abi(); diff --git a/noir/tooling/nargo_cli/src/cli/test_cmd.rs b/noir/tooling/nargo_cli/src/cli/test_cmd.rs index fcad4d4ee9f..32893baa157 100644 --- a/noir/tooling/nargo_cli/src/cli/test_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/test_cmd.rs @@ -1,14 +1,17 @@ use std::io::Write; use acvm::BlackBoxFunctionSolver; +use bn254_blackbox_solver::Bn254BlackBoxSolver; use clap::Args; +use fm::FileManager; use nargo::{ + insert_all_files_for_workspace_into_file_manager, ops::{run_test, TestStatus}, package::Package, prepare_package, }; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{file_manager_with_stdlib, CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::{graph::CrateName, hir::FunctionNameMatch}; use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor}; @@ -40,6 +43,10 @@ pub(crate) struct TestCommand { #[clap(flatten)] compile_options: CompileOptions, + + /// JSON RPC url to solve oracle calls + #[clap(long)] + oracle_resolver: Option, } pub(crate) fn run( @@ -57,6 +64,9 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let pattern = match &args.test_name { Some(name) => { if args.exact { @@ -68,25 +78,34 @@ pub(crate) fn run( None => FunctionNameMatch::Anything, }; - #[allow(deprecated)] - let blackbox_solver = barretenberg_blackbox_solver::BarretenbergSolver::new(); + let blackbox_solver = Bn254BlackBoxSolver::new(); for package in &workspace { // By unwrapping here with `?`, we stop the test runner upon a package failing // TODO: We should run the whole suite even if there are failures in a package - run_tests(&blackbox_solver, package, pattern, args.show_output, &args.compile_options)?; + run_tests( + &workspace_file_manager, + &blackbox_solver, + package, + pattern, + args.show_output, + args.oracle_resolver.as_deref(), + &args.compile_options, + )?; } Ok(()) } fn run_tests( + file_manager: &FileManager, blackbox_solver: &S, package: &Package, fn_name: FunctionNameMatch, show_output: bool, + foreign_call_resolver_url: Option<&str>, compile_options: &CompileOptions, ) -> Result<(), CliError> { - let (mut context, crate_id) = prepare_package(package); + let (mut context, crate_id) = prepare_package(file_manager, package); check_crate_and_report_errors( &mut context, crate_id, @@ -128,7 +147,14 @@ fn run_tests( .expect("Failed to write to stdout"); writer.flush().expect("Failed to flush writer"); - match run_test(blackbox_solver, &context, test_function, show_output, compile_options) { + match run_test( + blackbox_solver, + &context, + test_function, + show_output, + foreign_call_resolver_url, + compile_options, + ) { TestStatus::Pass { .. } => { writer .set_color(ColorSpec::new().set_fg(Some(Color::Green))) diff --git a/noir/tooling/nargo_cli/src/cli/verify_cmd.rs b/noir/tooling/nargo_cli/src/cli/verify_cmd.rs index 9659286b5ab..86d5e774cbe 100644 --- a/noir/tooling/nargo_cli/src/cli/verify_cmd.rs +++ b/noir/tooling/nargo_cli/src/cli/verify_cmd.rs @@ -7,11 +7,14 @@ use crate::{backends::Backend, errors::CliError}; use clap::Args; use nargo::constants::{PROOF_EXT, VERIFIER_INPUT_FILE}; +use nargo::insert_all_files_for_workspace_into_file_manager; use nargo::package::Package; use nargo::workspace::Workspace; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::Format; -use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{ + file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, +}; use noirc_frontend::graph::CrateName; /// Given a proof and a program, verify whether the proof is valid @@ -48,10 +51,18 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let expression_width = backend.get_backend_info()?; for package in &workspace { - let program = - compile_bin_package(&workspace, package, &args.compile_options, expression_width)?; + let program = compile_bin_package( + &workspace_file_manager, + &workspace, + package, + &args.compile_options, + expression_width, + )?; verify_package(backend, &workspace, package, program, &args.verifier_name)?; } diff --git a/noir/tooling/nargo_cli/src/errors.rs b/noir/tooling/nargo_cli/src/errors.rs index 92da74c71d4..4636772231b 100644 --- a/noir/tooling/nargo_cli/src/errors.rs +++ b/noir/tooling/nargo_cli/src/errors.rs @@ -53,6 +53,9 @@ pub(crate) enum CliError { #[error(transparent)] LspError(#[from] async_lsp::Error), + #[error(transparent)] + DapError(#[from] dap::errors::ServerError), + /// Error from Nargo #[error(transparent)] NargoError(#[from] NargoError), diff --git a/noir/tooling/nargo_cli/src/main.rs b/noir/tooling/nargo_cli/src/main.rs index 7eeca2ab2b0..3f797b0bf0c 100644 --- a/noir/tooling/nargo_cli/src/main.rs +++ b/noir/tooling/nargo_cli/src/main.rs @@ -14,22 +14,27 @@ mod errors; use std::env; use color_eyre::config::HookBuilder; -use env_logger::{Builder, Env}; + use tracing_appender::rolling; +use tracing_subscriber::{fmt::format::FmtSpan, EnvFilter}; const PANIC_MESSAGE: &str = "This is a bug. We may have already fixed this in newer versions of Nargo so try searching for similar issues at https://github.com/noir-lang/noir/issues/.\nIf there isn't an open issue for this bug, consider opening one at https://github.com/noir-lang/noir/issues/new?labels=bug&template=bug_report.yml"; fn main() { - let env = Env::default().filter_or("NOIR_LOG", "error"); // Default to 'error' if NOIR_LOG is not set - Builder::from_env(env).init(); - // Setup tracing if let Ok(log_dir) = env::var("NARGO_LOG_DIR") { let debug_file = rolling::daily(log_dir, "nargo-log"); tracing_subscriber::fmt() + .with_span_events(FmtSpan::ACTIVE) .with_writer(debug_file) .with_ansi(false) - .with_max_level(tracing::Level::TRACE) + .with_env_filter(EnvFilter::from_default_env()) + .init(); + } else { + tracing_subscriber::fmt() + .with_span_events(FmtSpan::ACTIVE) + .with_ansi(true) + .with_env_filter(EnvFilter::from_env("NOIR_LOG")) .init(); } diff --git a/noir/tooling/nargo_toml/src/errors.rs b/noir/tooling/nargo_toml/src/errors.rs index da976e1b185..440895056c3 100644 --- a/noir/tooling/nargo_toml/src/errors.rs +++ b/noir/tooling/nargo_toml/src/errors.rs @@ -69,6 +69,9 @@ pub enum ManifestError { #[error(transparent)] SemverError(SemverError), + + #[error("Cyclic package dependency found when processing {cycle}")] + CyclicDependency { cycle: String }, } #[allow(clippy::enum_variant_names)] diff --git a/noir/tooling/nargo_toml/src/lib.rs b/noir/tooling/nargo_toml/src/lib.rs index 6c77fe85f2b..cecc3f7e26a 100644 --- a/noir/tooling/nargo_toml/src/lib.rs +++ b/noir/tooling/nargo_toml/src/lib.rs @@ -120,7 +120,11 @@ struct PackageConfig { } impl PackageConfig { - fn resolve_to_package(&self, root_dir: &Path) -> Result { + fn resolve_to_package( + &self, + root_dir: &Path, + processed: &mut Vec, + ) -> Result { let name: CrateName = if let Some(name) = &self.package.name { name.parse().map_err(|_| ManifestError::InvalidPackageName { toml: root_dir.join("Nargo.toml"), @@ -136,7 +140,7 @@ impl PackageConfig { toml: root_dir.join("Nargo.toml"), name: name.into(), })?; - let resolved_dep = dep_config.resolve_to_dependency(root_dir)?; + let resolved_dep = dep_config.resolve_to_dependency(root_dir, processed)?; dependencies.insert(name, resolved_dep); } @@ -283,7 +287,11 @@ enum DependencyConfig { } impl DependencyConfig { - fn resolve_to_dependency(&self, pkg_root: &Path) -> Result { + fn resolve_to_dependency( + &self, + pkg_root: &Path, + processed: &mut Vec, + ) -> Result { let dep = match self { Self::Github { git, tag, directory } => { let dir_path = clone_git_repo(git, tag).map_err(ManifestError::GitError)?; @@ -300,13 +308,13 @@ impl DependencyConfig { dir_path }; let toml_path = project_path.join("Nargo.toml"); - let package = resolve_package_from_toml(&toml_path)?; + let package = resolve_package_from_toml(&toml_path, processed)?; Dependency::Remote { package } } Self::Path { path } => { let dir_path = pkg_root.join(path); let toml_path = dir_path.join("Nargo.toml"); - let package = resolve_package_from_toml(&toml_path)?; + let package = resolve_package_from_toml(&toml_path, processed)?; Dependency::Local { package } } }; @@ -325,9 +333,10 @@ fn toml_to_workspace( nargo_toml: NargoToml, package_selection: PackageSelection, ) -> Result { + let mut resolved = Vec::new(); let workspace = match nargo_toml.config { Config::Package { package_config } => { - let member = package_config.resolve_to_package(&nargo_toml.root_dir)?; + let member = package_config.resolve_to_package(&nargo_toml.root_dir, &mut resolved)?; match &package_selection { PackageSelection::Selected(selected_name) if selected_name != &member.name => { return Err(ManifestError::MissingSelectedPackage(member.name)) @@ -345,7 +354,7 @@ fn toml_to_workspace( for (index, member_path) in workspace_config.members.into_iter().enumerate() { let package_root_dir = nargo_toml.root_dir.join(&member_path); let package_toml_path = package_root_dir.join("Nargo.toml"); - let member = resolve_package_from_toml(&package_toml_path)?; + let member = resolve_package_from_toml(&package_toml_path, &mut resolved)?; match &package_selection { PackageSelection::Selected(selected_name) => { @@ -402,17 +411,43 @@ fn read_toml(toml_path: &Path) -> Result { } /// Resolves a Nargo.toml file into a `Package` struct as defined by our `nargo` core. -fn resolve_package_from_toml(toml_path: &Path) -> Result { +fn resolve_package_from_toml( + toml_path: &Path, + processed: &mut Vec, +) -> Result { + // Checks for cyclic dependencies + let str_path = toml_path.to_str().expect("ICE - path is empty"); + if processed.contains(&str_path.to_string()) { + let mut cycle = false; + let mut message = String::new(); + for toml in processed { + cycle = cycle || toml == str_path; + if cycle { + message += &format!("{} referencing ", toml); + } + } + message += str_path; + return Err(ManifestError::CyclicDependency { cycle: message }); + } + // Adds the package to the set of resolved packages + if let Some(str) = toml_path.to_str() { + processed.push(str.to_string()); + } + let nargo_toml = read_toml(toml_path)?; - match nargo_toml.config { + let result = match nargo_toml.config { Config::Package { package_config } => { - package_config.resolve_to_package(&nargo_toml.root_dir) + package_config.resolve_to_package(&nargo_toml.root_dir, processed) } Config::Workspace { .. } => { Err(ManifestError::UnexpectedWorkspace(toml_path.to_path_buf())) } - } + }; + let pos = + processed.iter().position(|toml| toml == str_path).expect("added package must be here"); + processed.remove(pos); + result } #[derive(Debug, PartialEq, Eq)] diff --git a/noir/tooling/noir_codegen/package.json b/noir/tooling/noir_codegen/package.json index 73f96f8d720..97bab815764 100644 --- a/noir/tooling/noir_codegen/package.json +++ b/noir/tooling/noir_codegen/package.json @@ -1,12 +1,21 @@ { "name": "@noir-lang/noir_codegen", - "collaborators": [ + "contributors": [ "The Noir Team " ], "version": "0.22.0", "packageManager": "yarn@3.5.1", "license": "(MIT OR Apache-2.0)", "type": "module", + "homepage": "https://noir-lang.org/", + "repository": { + "url": "https://github.com/noir-lang/noir.git", + "directory": "compiler/wasm", + "type": "git" + }, + "bugs": { + "url": "https://github.com/noir-lang/noir/issues" + }, "dependencies": { "@noir-lang/types": "workspace:*", "glob": "^10.3.10", diff --git a/noir/tooling/noir_codegen/src/index.ts b/noir/tooling/noir_codegen/src/index.ts index 8d45b76bd7d..19829cd06ff 100644 --- a/noir/tooling/noir_codegen/src/index.ts +++ b/noir/tooling/noir_codegen/src/index.ts @@ -1,5 +1,6 @@ +import { AbiType } from '@noir-lang/noirc_abi'; import { CompiledCircuit } from '@noir-lang/types'; -import { PrimitiveTypesUsed, generateTsInterface } from './noir_types.js'; +import { PrimitiveTypesUsed, generateTsInterface, codegenStructDefinitions } from './noir_types.js'; // TODO: reenable this. See `abiTypeToTs` for reasoning. // export type FixedLengthArray = L extends 0 ? never[]: T[] & { length: L }; @@ -8,7 +9,9 @@ const codegenPrelude = `/* Autogenerated file, do not edit! */ /* eslint-disable */ -import { Noir, InputMap, CompiledCircuit } from "@noir-lang/noir_js" +import { Noir, InputMap, CompiledCircuit, ForeignCallHandler } from "@noir-lang/noir_js" + +export { ForeignCallHandler } from "@noir-lang/noir_js" `; const codegenFunction = ( @@ -19,26 +22,27 @@ const codegenFunction = ( const args = function_signature.inputs.map(([name]) => `${name}`).join(', '); const args_with_types = function_signature.inputs.map(([name, type]) => `${name}: ${type}`).join(', '); - return ` -export const ${name}_circuit: CompiledCircuit = ${JSON.stringify(compiled_program)}; + return `export const ${name}_circuit: CompiledCircuit = ${JSON.stringify(compiled_program)}; -export async function ${name}(${args_with_types}): Promise<${function_signature.returnValue}> { +export async function ${name}(${args_with_types}, foreignCallHandler?: ForeignCallHandler): Promise<${ + function_signature.returnValue + }> { const program = new Noir(${name}_circuit); const args: InputMap = { ${args} }; - const { returnValue } = await program.execute(args); + const { returnValue } = await program.execute(args, foreignCallHandler); return returnValue as ${function_signature.returnValue}; -}`; +} +`; }; export const codegen = (programs: [string, CompiledCircuit][]): string => { let results = [codegenPrelude]; const primitiveTypeMap = new Map(); + const structTypeMap = new Map(); const functions: string[] = []; for (const [name, program] of programs) { - const [types_string, function_sig] = generateTsInterface(program.abi, primitiveTypeMap); - functions.push(types_string); - functions.push('\n'); + const function_sig = generateTsInterface(program.abi, structTypeMap, primitiveTypeMap); functions.push(codegenFunction(name, stripUnwantedFields(program), function_sig)); } @@ -48,9 +52,11 @@ export const codegen = (programs: [string, CompiledCircuit][]): string => { primitiveTypeAliases.push(`export type ${value.aliasName} = ${value.tsType};`); } - results = results.concat(...primitiveTypeAliases, ...functions); + const structTypeDefinitions: string = codegenStructDefinitions(structTypeMap, primitiveTypeMap); + + results = results.concat(...primitiveTypeAliases, '', structTypeDefinitions, ...functions); - return results.filter((val) => val !== '').join('\n'); + return results.join('\n'); }; // eslint-disable-next-line @typescript-eslint/no-explicit-any diff --git a/noir/tooling/noir_codegen/src/noir_types.ts b/noir/tooling/noir_codegen/src/noir_types.ts index ba4f8650b3b..0c0e2b7c60f 100644 --- a/noir/tooling/noir_codegen/src/noir_types.ts +++ b/noir/tooling/noir_codegen/src/noir_types.ts @@ -112,43 +112,26 @@ function getLastComponentOfPath(str: string): string { */ function generateStructInterfaces( type: AbiType, - output: Set, + structsEncountered: Map, primitiveTypeMap: Map, -): string { - let result = ''; - +) { // Edge case to handle the array of structs case. - if (type.kind === 'array' && type.type.kind === 'struct' && !output.has(getLastComponentOfPath(type.type.path))) { - result += generateStructInterfaces(type.type, output, primitiveTypeMap); + if ( + type.kind === 'array' && + type.type.kind === 'struct' && + !structsEncountered.has(getLastComponentOfPath(type.type.path)) + ) { + generateStructInterfaces(type.type, structsEncountered, primitiveTypeMap); } - if (type.kind !== 'struct') return result; - - // List of structs encountered while viewing this type that we need to generate - // bindings for. - const typesEncountered = new Set(); - - // Codegen the struct and then its fields, so that the structs fields - // are defined before the struct itself. - let codeGeneratedStruct = ''; - let codeGeneratedStructFields = ''; + if (type.kind !== 'struct') return; const structName = getLastComponentOfPath(type.path); - if (!output.has(structName)) { - codeGeneratedStruct += `export type ${structName} = {\n`; + if (!structsEncountered.has(structName)) { for (const field of type.fields) { - codeGeneratedStruct += ` ${field.name}: ${abiTypeToTs(field.type, primitiveTypeMap)};\n`; - typesEncountered.add(field.type); - } - codeGeneratedStruct += `};`; - output.add(structName); - - // Generate code for the encountered structs in the field above - for (const type of typesEncountered) { - codeGeneratedStructFields += generateStructInterfaces(type, output, primitiveTypeMap); + generateStructInterfaces(field.type, structsEncountered, primitiveTypeMap); } + structsEncountered.set(structName, type.fields); } - - return codeGeneratedStructFields + '\n' + codeGeneratedStruct; } /** @@ -158,22 +141,37 @@ function generateStructInterfaces( */ export function generateTsInterface( abiObj: Abi, + structsEncountered: Map, primitiveTypeMap: Map, -): [string, { inputs: [string, string][]; returnValue: string | null }] { - let result = ``; - const outputStructs = new Set(); - +): { inputs: [string, string][]; returnValue: string | null } { // Define structs for composite types for (const param of abiObj.parameters) { - result += generateStructInterfaces(param.type, outputStructs, primitiveTypeMap); + generateStructInterfaces(param.type, structsEncountered, primitiveTypeMap); } // Generating Return type, if it exists if (abiObj.return_type != null) { - result += generateStructInterfaces(abiObj.return_type.abi_type, outputStructs, primitiveTypeMap); + generateStructInterfaces(abiObj.return_type.abi_type, structsEncountered, primitiveTypeMap); + } + + return getTsFunctionSignature(abiObj, primitiveTypeMap); +} + +export function codegenStructDefinitions( + structsEncountered: Map, + primitiveTypeMap: Map, +): string { + let codeGeneratedStruct = ''; + + for (const [structName, structFields] of structsEncountered) { + codeGeneratedStruct += `export type ${structName} = {\n`; + for (const field of structFields) { + codeGeneratedStruct += ` ${field.name}: ${abiTypeToTs(field.type, primitiveTypeMap)};\n`; + } + codeGeneratedStruct += `};\n\n`; } - return [result, getTsFunctionSignature(abiObj, primitiveTypeMap)]; + return codeGeneratedStruct; } function getTsFunctionSignature( diff --git a/noir/tooling/noir_codegen/test/assert_lt/src/main.nr b/noir/tooling/noir_codegen/test/assert_lt/src/main.nr index 3b3e04ddece..ed370bd87c0 100644 --- a/noir/tooling/noir_codegen/test/assert_lt/src/main.nr +++ b/noir/tooling/noir_codegen/test/assert_lt/src/main.nr @@ -3,17 +3,24 @@ struct MyStruct { bar: [str<5>; 3], } +struct NestedStruct { + foo: MyStruct, + bar: [MyStruct; 3], + baz: u64 +} + fn main( x: u64, y: pub u64, array: [u8; 5], - my_struct: MyStruct, + my_struct: NestedStruct, string: str<5> ) -> pub (u64, u64, MyStruct) { assert(array.len() == 5); - assert(my_struct.foo); + assert(my_struct.foo.foo); assert(string == "12345"); + print(x); assert(x < y); - (x + y, 3, my_struct) + (x + y, 3, my_struct.foo) } diff --git a/noir/tooling/noir_codegen/test/assert_lt/target/assert_lt.json b/noir/tooling/noir_codegen/test/assert_lt/target/assert_lt.json index a1ab87a99fe..be1b134d642 100644 --- a/noir/tooling/noir_codegen/test/assert_lt/target/assert_lt.json +++ b/noir/tooling/noir_codegen/test/assert_lt/target/assert_lt.json @@ -1 +1 @@ -{"noir_version":"0.19.4+55670ff82c270534a4bdb999ab0de5cea7017093","hash":11505576107297330043,"backend":"acvm-backend-barretenberg","abi":{"parameters":[{"name":"x","type":{"kind":"integer","sign":"unsigned","width":64},"visibility":"private"},{"name":"y","type":{"kind":"integer","sign":"unsigned","width":64},"visibility":"public"},{"name":"array","type":{"kind":"array","length":5,"type":{"kind":"integer","sign":"unsigned","width":8}},"visibility":"private"},{"name":"my_struct","type":{"kind":"struct","path":"MyStruct","fields":[{"name":"foo","type":{"kind":"boolean"}},{"name":"bar","type":{"kind":"array","length":3,"type":{"kind":"string","length":5}}}]},"visibility":"private"},{"name":"string","type":{"kind":"string","length":5},"visibility":"private"}],"param_witnesses":{"array":[{"start":3,"end":8}],"my_struct":[{"start":8,"end":24}],"string":[{"start":24,"end":29}],"x":[{"start":1,"end":2}],"y":[{"start":2,"end":3}]},"return_type":{"abi_type":{"kind":"tuple","fields":[{"kind":"integer","sign":"unsigned","width":64},{"kind":"integer","sign":"unsigned","width":64},{"kind":"struct","path":"MyStruct","fields":[{"name":"foo","type":{"kind":"boolean"}},{"name":"bar","type":{"kind":"array","length":3,"type":{"kind":"string","length":5}}}]}]},"visibility":"public"},"return_witnesses":[31,32,33,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23]},"bytecode":"H4sIAAAAAAAA/81XbU/CMBDu5hv4gopvvGw49JOJH1q2wfaN+E+AddFEgzGL/H250Go5dInumnhJ0z2jXJ9er7s+t4yxe7YyZ9lc1Y8N7CK8tWw1A28jvIPwLsJ7Cus5mfIPxquZqBlzmX5DPowiORpIEYoJH6TTJOZRPB0mIhFxEmeDJAxlEiWjdJqOeCqiUIo8TsNcOa7RceQ6DnUUl32EDxA+RPgI4QbCxwifIHyKcBPhM4TPEb5A+BLhK4RbCLcR7iDcRdhjX3mjzUb+jIlyxibPFgFPmYNlVnm2yXjOcps8O3Q8pU2eXTqemU2eHh3PGdQbl22aS8zZYXRn3/07L4FffLN0Mt9mXH3V99iqhuu80GOgzj+wzZxxjGdXjXFLxjg/+Kkb7/T/G8bvVRe/EQxzciqfvgok9QXEp+P4eQHpGT61bRHHw9ahqurrhjCeZfH7JU+OeAqfcM09wn2tEL/SD9x/Pjdl+8yr2do54dVMUJ6Ta0b/3TF92tr3gI53aJNnn3DfuwZHyE8o2FDIQYBr0Q1FFoQmiEsQlCAiociCWASBCKIQhCCIPxB8IPJA2IGYA9EBF3q4LMNcHlsv/E31XGUOyI1g2fpsvfDfqd5T/aQo5MtrERTzYJJlweKpeAzm7/Itf54vPgBYg2KL1RAAAA=="} \ No newline at end of file +{"noir_version":"0.22.0+6f69b3f511c8b4c51404ad4c18131bdf6b7f6a94","hash":3763979860977920209,"abi":{"parameters":[{"name":"x","type":{"kind":"integer","sign":"unsigned","width":64},"visibility":"private"},{"name":"y","type":{"kind":"integer","sign":"unsigned","width":64},"visibility":"public"},{"name":"array","type":{"kind":"array","length":5,"type":{"kind":"integer","sign":"unsigned","width":8}},"visibility":"private"},{"name":"my_struct","type":{"kind":"struct","path":"NestedStruct","fields":[{"name":"foo","type":{"kind":"struct","path":"MyStruct","fields":[{"name":"foo","type":{"kind":"boolean"}},{"name":"bar","type":{"kind":"array","length":3,"type":{"kind":"string","length":5}}}]}},{"name":"bar","type":{"kind":"array","length":3,"type":{"kind":"struct","path":"MyStruct","fields":[{"name":"foo","type":{"kind":"boolean"}},{"name":"bar","type":{"kind":"array","length":3,"type":{"kind":"string","length":5}}}]}}},{"name":"baz","type":{"kind":"integer","sign":"unsigned","width":64}}]},"visibility":"private"},{"name":"string","type":{"kind":"string","length":5},"visibility":"private"}],"param_witnesses":{"array":[{"start":3,"end":8}],"my_struct":[{"start":8,"end":73}],"string":[{"start":73,"end":78}],"x":[{"start":1,"end":2}],"y":[{"start":2,"end":3}]},"return_type":{"abi_type":{"kind":"tuple","fields":[{"kind":"integer","sign":"unsigned","width":64},{"kind":"integer","sign":"unsigned","width":64},{"kind":"struct","path":"MyStruct","fields":[{"name":"foo","type":{"kind":"boolean"}},{"name":"bar","type":{"kind":"array","length":3,"type":{"kind":"string","length":5}}}]}]},"visibility":"public"},"return_witnesses":[98,99,100,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23]},"bytecode":"H4sIAAAAAAAA/+1cC3PTRhBex8SJ40cezsNxaOtSoGnpQ7Jsy+6LkFJaSFIoKVCgQGzsTJlpCWU85Zf1/9Ur6drz9pAF3jtZM9FMRvliZff7bqXV6bTeYwD4E/wtNfyZCfY7Ep4hOD38mZfwGYJnCc4QPBdg4RMC+7hZk232vORLtutYzXq979b6tmN3rFq722pY9Ua32bJbdqPV6NVajtNv1Vtuu9t2rbZdd/r2caPtHAeG5/k4WmIcsmRcFgjOEZwnuEBwkeBFgpcIXiZ4heASwasErxG8TvCGxjiXhzbKGuJcBv44b5JxqRC8RfBZgt8h+F2C3yO4SvD7BJ8j+AOCzxN8geCLBH+oMc7bQxvbGuK8Dfxx/oiMy8cEXyL4E4I/Jfgzgj8n2CLYJrhGsENwneAGwU2CXY1xbg1ttDTEuQX8cW6TcfmC4C8J/orgrwn+huDLBO8QfIXgXYK/Jfgqwd8RfI3g7wn+AUbv/9fJ5zcI3iN4n+AD+O88EpuOecAO0zkk85whPCe1fZ3R1o9s4+fYaSkmOuPEx9m/PsV2E/x5qRwz3NLBPqNBCxA/dPyKir+xOtcRnJsa7N4C3otTh+5b/DHSmkhuMNr6iU2zUzOVSPg4jyaS23CaSFiCc1uD3UOY7kSCug/5Y6Q1kewx2vqZTbPjmEokfJxHE8kdOE0kLMG5o8HuXZjuRIK67/LHSAtXnDkdAv+j/D1IRgLdZ7T1C5tmp24qgfJxHk2g9+E0gbIE574Guw9guhMo6n7AHyMtXDHR3wP+BPoQkpFADxht/cqm2WmYSqB8nEcT6CM4TaAswXmkwe5jmO4Eirof88dIC1dM9A+BP4E+mXLdGJ8nihhNqttU4ksB/w3q72CPxRYzhH9GMVbWhJuGJGrJ3MV+VuJfDH5Pa9CSYbVZa6CWM6A+j2gs5BhlAs2M+my0Ocdr0xLFTnw23S7ayEpjIsZPcM9Jn89L45VlHq+U5FPYFTirzW8N6xi84qgw/QsKHgsG9S9o89vsCo1h+nMKHjmD+nPa/Dax3sIrfgvTn1fwyBvUn9fmt9lHG4Ux+gsKHgWD+gva/DaxTsm7x4XpLyp4FA3ql/lF5ZqNmSu/X6eDNhbH6F9U8Fg0qF/mF5VrNmau/H5drPfzioLD9C8peCwZ1C/zi8o1HzNXfr8u1m56Bdth+pcVPJYN6pf5ReWai5krv98m1sl6xfRh+lcUPFYM6pf5ReWaj5krv9+mlwNLY/SXFDxKBvXL/KJyLSSIay5BXPMxc+X363rz69Ux+lcVPFYN6pf5ReW6kiCuywniGve48vt1vTWmtTH61xQ81gzql/lF5ZqNmSu/39pTtLE+Rv+6gse6Qf0yv6hcszFz5ffrenPhjTH6NxQ8Ngzql/lF5ZpLENdCgriWYubK77eJ30H0vqwcpr+s4FE2qF/mF5VrNkFcizFz5ffr4PdzvS+Xh+nfVPDYNKh/U5tfx3tuqYzRX1HwqBjUX9Hm1+2hja0x+rcUPLYM6he+8L20eH//4uWz5wNKGre09LsodEhJx1xQHDsHGivI0vD/qhGZHJcfufJl0mqdmbfnZdM/KKSz2ZbH9SjY4+ujWSnw4hg8AXal41PSPiXFZBdGTxx6TOo1duTqCvH/iStNPAoGkrvs7YiP4789FDqSTbExly9pu6gmtdVnHM+w8XtDnhbhaR8xau4wxnWC8QtNcNN83YTF2Zps4yy7tDmvky7w5x3Zpq64P+Xj7ejk2WOM+77EEc9PvGGLElXR7w1vslieJsq1cFkCb7JYWoCvrHEJHJeWcQkAl1hx6Q6XhPCRGB9fcAqP01icUmKPLOyLhb2wsP9VFfw+V+fA72d1HvzZ4kXw+1Nhvyfsz4Q9mS6B33sJ+y1hjyXsq4QXGN6UcR0YBx2fLfDdID5j4RoWPsNjHRr2+sH+PtjTB/v4YO+ey8E4XgF/coH9d66C32fnGvj9dLCHDvZ6wTYNe+D3xDmA0cmLWFafZJzw/Mbzrwejk5fnwf5ssO8MBv0/Xgyqg5Nqp9ervno2+K168lf/5fHvJ6/+ASeieLQUUAAA"} \ No newline at end of file diff --git a/noir/tooling/noir_codegen/test/index.test.ts b/noir/tooling/noir_codegen/test/index.test.ts index 48199c13a67..822993b2f1e 100644 --- a/noir/tooling/noir_codegen/test/index.test.ts +++ b/noir/tooling/noir_codegen/test/index.test.ts @@ -1,12 +1,18 @@ import { expect } from 'chai'; -import { assert_lt, MyStruct, u64 } from './codegen/index.js'; +import { assert_lt, MyStruct, u64, ForeignCallHandler } from './codegen/index.js'; it('codegens a callable function', async () => { + const my_struct = { foo: true, bar: ['12345', '12345', '12345'] }; + const [sum, constant, struct]: [u64, u64, MyStruct] = await assert_lt( '2', '3', [0, 0, 0, 0, 0], - { foo: true, bar: ['12345', '12345', '12345'] }, + { + foo: my_struct, + bar: [my_struct, my_struct, my_struct], + baz: '64', + }, '12345', ); @@ -14,3 +20,85 @@ it('codegens a callable function', async () => { expect(constant).to.be.eq('0x03'); expect(struct).to.be.deep.eq({ foo: true, bar: ['12345', '12345', '12345'] }); }); + +it('allows passing a custom foreign call handler', async () => { + let observedName = ''; + let observedInputs: string[][] = []; + const foreignCallHandler: ForeignCallHandler = async (name: string, inputs: string[][]) => { + // Throwing inside the oracle callback causes a timeout so we log the observed values + // and defer the check against expected values until after the execution is complete. + observedName = name; + observedInputs = inputs; + + return []; + }; + + const my_struct = { foo: true, bar: ['12345', '12345', '12345'] }; + + const [sum, constant, struct]: [u64, u64, MyStruct] = await assert_lt( + '2', + '3', + [0, 0, 0, 0, 0], + { + foo: my_struct, + bar: [my_struct, my_struct, my_struct], + baz: '64', + }, + '12345', + foreignCallHandler, + ); + + expect(observedName).to.be.eq('print'); + expect(observedInputs).to.be.deep.eq([ + // add newline? + ['0x0000000000000000000000000000000000000000000000000000000000000000'], + // x + ['0x0000000000000000000000000000000000000000000000000000000000000002'], + // Type metadata + [ + '0x000000000000000000000000000000000000000000000000000000000000007b', + '0x0000000000000000000000000000000000000000000000000000000000000022', + '0x000000000000000000000000000000000000000000000000000000000000006b', + '0x0000000000000000000000000000000000000000000000000000000000000069', + '0x000000000000000000000000000000000000000000000000000000000000006e', + '0x0000000000000000000000000000000000000000000000000000000000000064', + '0x0000000000000000000000000000000000000000000000000000000000000022', + '0x000000000000000000000000000000000000000000000000000000000000003a', + '0x0000000000000000000000000000000000000000000000000000000000000022', + '0x0000000000000000000000000000000000000000000000000000000000000075', + '0x000000000000000000000000000000000000000000000000000000000000006e', + '0x0000000000000000000000000000000000000000000000000000000000000073', + '0x0000000000000000000000000000000000000000000000000000000000000069', + '0x0000000000000000000000000000000000000000000000000000000000000067', + '0x000000000000000000000000000000000000000000000000000000000000006e', + '0x0000000000000000000000000000000000000000000000000000000000000065', + '0x0000000000000000000000000000000000000000000000000000000000000064', + '0x0000000000000000000000000000000000000000000000000000000000000069', + '0x000000000000000000000000000000000000000000000000000000000000006e', + '0x0000000000000000000000000000000000000000000000000000000000000074', + '0x0000000000000000000000000000000000000000000000000000000000000065', + '0x0000000000000000000000000000000000000000000000000000000000000067', + '0x0000000000000000000000000000000000000000000000000000000000000065', + '0x0000000000000000000000000000000000000000000000000000000000000072', + '0x0000000000000000000000000000000000000000000000000000000000000022', + '0x000000000000000000000000000000000000000000000000000000000000002c', + '0x0000000000000000000000000000000000000000000000000000000000000022', + '0x0000000000000000000000000000000000000000000000000000000000000077', + '0x0000000000000000000000000000000000000000000000000000000000000069', + '0x0000000000000000000000000000000000000000000000000000000000000064', + '0x0000000000000000000000000000000000000000000000000000000000000074', + '0x0000000000000000000000000000000000000000000000000000000000000068', + '0x0000000000000000000000000000000000000000000000000000000000000022', + '0x000000000000000000000000000000000000000000000000000000000000003a', + '0x0000000000000000000000000000000000000000000000000000000000000036', + '0x0000000000000000000000000000000000000000000000000000000000000034', + '0x000000000000000000000000000000000000000000000000000000000000007d', + ], + // format string? + ['0x0000000000000000000000000000000000000000000000000000000000000000'], + ]); + + expect(sum).to.be.eq('0x05'); + expect(constant).to.be.eq('0x03'); + expect(struct).to.be.deep.eq({ foo: true, bar: ['12345', '12345', '12345'] }); +}); diff --git a/noir/tooling/noir_js/package.json b/noir/tooling/noir_js/package.json index d5ea4b4ad5d..ed2fd225810 100644 --- a/noir/tooling/noir_js/package.json +++ b/noir/tooling/noir_js/package.json @@ -1,12 +1,21 @@ { "name": "@noir-lang/noir_js", - "collaborators": [ + "contributors": [ "The Noir Team " ], "version": "0.22.0", "packageManager": "yarn@3.5.1", "license": "(MIT OR Apache-2.0)", "type": "module", + "homepage": "https://noir-lang.org/", + "repository": { + "url": "https://github.com/noir-lang/noir.git", + "directory": "tooling/noir_js", + "type": "git" + }, + "bugs": { + "url": "https://github.com/noir-lang/noir/issues" + }, "dependencies": { "@noir-lang/acvm_js": "workspace:*", "@noir-lang/noirc_abi": "workspace:*", diff --git a/noir/tooling/noir_js_backend_barretenberg/package.json b/noir/tooling/noir_js_backend_barretenberg/package.json index c34b8dfc825..93fdc856338 100644 --- a/noir/tooling/noir_js_backend_barretenberg/package.json +++ b/noir/tooling/noir_js_backend_barretenberg/package.json @@ -1,12 +1,21 @@ { "name": "@noir-lang/backend_barretenberg", - "collaborators": [ + "contributors": [ "The Noir Team " ], "version": "0.22.0", "packageManager": "yarn@3.5.1", "license": "(MIT OR Apache-2.0)", "type": "module", + "homepage": "https://noir-lang.org/", + "repository": { + "url": "https://github.com/noir-lang/noir.git", + "directory": "tooling/noir_js_backend_barretenberg", + "type": "git" + }, + "bugs": { + "url": "https://github.com/noir-lang/noir/issues" + }, "source": "src/index.ts", "main": "lib/cjs/index.js", "module": "lib/esm/index.js", diff --git a/noir/tooling/noir_js_types/package.json b/noir/tooling/noir_js_types/package.json index 51856cfe465..0276b8d087c 100644 --- a/noir/tooling/noir_js_types/package.json +++ b/noir/tooling/noir_js_types/package.json @@ -1,11 +1,20 @@ { "name": "@noir-lang/types", - "collaborators": [ + "contributors": [ "The Noir Team " ], "packageManager": "yarn@3.5.1", "version": "0.22.0", "license": "(MIT OR Apache-2.0)", + "homepage": "https://noir-lang.org/", + "repository": { + "url": "https://github.com/noir-lang/noir.git", + "directory": "tooling/noir_js_types", + "type": "git" + }, + "bugs": { + "url": "https://github.com/noir-lang/noir/issues" + }, "files": [ "lib", "package.json" diff --git a/noir/tooling/noirc_abi_wasm/package.json b/noir/tooling/noirc_abi_wasm/package.json index 5b2cd344eab..d023e1e4391 100644 --- a/noir/tooling/noirc_abi_wasm/package.json +++ b/noir/tooling/noirc_abi_wasm/package.json @@ -1,10 +1,19 @@ { "name": "@noir-lang/noirc_abi", - "collaborators": [ + "contributors": [ "The Noir Team " ], "version": "0.22.0", "license": "(MIT OR Apache-2.0)", + "homepage": "https://noir-lang.org/", + "repository": { + "url": "https://github.com/noir-lang/noir.git", + "directory": "tooling/noirc_abi_wasm", + "type": "git" + }, + "bugs": { + "url": "https://github.com/noir-lang/noir/issues" + }, "files": [ "nodejs", "web", @@ -17,10 +26,6 @@ "types": "./web/noirc_abi_wasm.d.ts", "module": "./web/noirc_abi_wasm.js", "sideEffects": false, - "repository": { - "type": "git", - "url": "https://github.com/noir-lang/noir.git" - }, "scripts": { "build": "bash ./build.sh", "test": "env TS_NODE_COMPILER_OPTIONS='{\"module\": \"commonjs\" }' mocha", diff --git a/noir/tooling/noirc_abi_wasm/src/lib.rs b/noir/tooling/noirc_abi_wasm/src/lib.rs index fb4c295b8c8..5557cc917bf 100644 --- a/noir/tooling/noirc_abi_wasm/src/lib.rs +++ b/noir/tooling/noirc_abi_wasm/src/lib.rs @@ -27,7 +27,7 @@ use js_witness_map::JsWitnessMap; #[wasm_bindgen(typescript_custom_section)] const INPUT_MAP: &'static str = r#" export type Field = string | number | boolean; -export type InputValue = Field | Field[] | InputMap; +export type InputValue = Field | InputMap | (Field | InputMap)[]; export type InputMap = { [key: string]: InputValue }; "#; diff --git a/noir/tooling/noirc_abi_wasm/test/browser/structs.test.ts b/noir/tooling/noirc_abi_wasm/test/browser/structs.test.ts new file mode 100644 index 00000000000..84352d986ab --- /dev/null +++ b/noir/tooling/noirc_abi_wasm/test/browser/structs.test.ts @@ -0,0 +1,26 @@ +import { expect } from '@esm-bundle/chai'; +import initNoirAbi, { abiEncode, abiDecode, WitnessMap } from '@noir-lang/noirc_abi'; +import { MyNestedStruct, MyStruct } from '../shared/structs'; +import { DecodedInputs } from '../types'; + +beforeEach(async () => { + await initNoirAbi(); +}); + +it('correctly handles struct inputs', async () => { + const { abi, inputs } = await import('../shared/structs'); + + const initial_witness: WitnessMap = abiEncode(abi, inputs); + const decoded_inputs: DecodedInputs = abiDecode(abi, initial_witness); + + const struct_arg: MyStruct = inputs.struct_arg as MyStruct; + const struct_array_arg: MyStruct[] = inputs.struct_array_arg as MyStruct[]; + const nested_struct_arg: MyNestedStruct = inputs.nested_struct_arg as MyNestedStruct; + + expect(BigInt(decoded_inputs.inputs.struct_arg.foo)).to.be.equal(BigInt(struct_arg.foo)); + expect(BigInt(decoded_inputs.inputs.struct_array_arg[0].foo)).to.be.equal(BigInt(struct_array_arg[0].foo)); + expect(BigInt(decoded_inputs.inputs.struct_array_arg[1].foo)).to.be.equal(BigInt(struct_array_arg[1].foo)); + expect(BigInt(decoded_inputs.inputs.struct_array_arg[2].foo)).to.be.equal(BigInt(struct_array_arg[2].foo)); + expect(BigInt(decoded_inputs.inputs.nested_struct_arg.foo.foo)).to.be.equal(BigInt(nested_struct_arg.foo.foo)); + expect(decoded_inputs.return_value).to.be.null; +}); diff --git a/noir/tooling/noirc_abi_wasm/test/node/structs.test.ts b/noir/tooling/noirc_abi_wasm/test/node/structs.test.ts new file mode 100644 index 00000000000..a7d104b46d3 --- /dev/null +++ b/noir/tooling/noirc_abi_wasm/test/node/structs.test.ts @@ -0,0 +1,22 @@ +import { expect } from 'chai'; +import { abiEncode, abiDecode, WitnessMap } from '@noir-lang/noirc_abi'; +import { MyNestedStruct, MyStruct } from '../shared/structs'; +import { DecodedInputs } from '../types'; + +it('correctly handles struct inputs', async () => { + const { abi, inputs } = await import('../shared/structs'); + + const initial_witness: WitnessMap = abiEncode(abi, inputs); + const decoded_inputs: DecodedInputs = abiDecode(abi, initial_witness); + + const struct_arg: MyStruct = inputs.struct_arg as MyStruct; + const struct_array_arg: MyStruct[] = inputs.struct_array_arg as MyStruct[]; + const nested_struct_arg: MyNestedStruct = inputs.nested_struct_arg as MyNestedStruct; + + expect(BigInt(decoded_inputs.inputs.struct_arg.foo)).to.be.equal(BigInt(struct_arg.foo)); + expect(BigInt(decoded_inputs.inputs.struct_array_arg[0].foo)).to.be.equal(BigInt(struct_array_arg[0].foo)); + expect(BigInt(decoded_inputs.inputs.struct_array_arg[1].foo)).to.be.equal(BigInt(struct_array_arg[1].foo)); + expect(BigInt(decoded_inputs.inputs.struct_array_arg[2].foo)).to.be.equal(BigInt(struct_array_arg[2].foo)); + expect(BigInt(decoded_inputs.inputs.nested_struct_arg.foo.foo)).to.be.equal(BigInt(nested_struct_arg.foo.foo)); + expect(decoded_inputs.return_value).to.be.null; +}); diff --git a/noir/tooling/noirc_abi_wasm/test/shared/structs.ts b/noir/tooling/noirc_abi_wasm/test/shared/structs.ts new file mode 100644 index 00000000000..6614f8f278e --- /dev/null +++ b/noir/tooling/noirc_abi_wasm/test/shared/structs.ts @@ -0,0 +1,79 @@ +import { Abi, Field, InputMap } from '@noir-lang/noirc_abi'; + +export type MyStruct = { + foo: Field; +}; + +export type MyNestedStruct = { + foo: MyStruct; +}; + +export const abi: Abi = { + parameters: [ + { + name: 'struct_arg', + type: { kind: 'struct', path: 'MyStruct', fields: [{ name: 'foo', type: { kind: 'field' } }] }, + visibility: 'private', + }, + { + name: 'struct_array_arg', + type: { + kind: 'array', + type: { + kind: 'struct', + path: 'MyStruct', + fields: [{ name: 'foo', type: { kind: 'field' } }], + }, + length: 3, + }, + visibility: 'private', + }, + { + name: 'nested_struct_arg', + type: { + kind: 'struct', + path: 'MyNestedStruct', + fields: [ + { + name: 'foo', + type: { + kind: 'struct', + path: 'MyStruct', + fields: [{ name: 'foo', type: { kind: 'field' } }], + }, + }, + ], + }, + visibility: 'private', + }, + ], + param_witnesses: { + struct_arg: [{ start: 1, end: 2 }], + struct_array_arg: [{ start: 2, end: 5 }], + nested_struct_arg: [{ start: 5, end: 6 }], + }, + return_type: null, + return_witnesses: [], +}; + +export const inputs: InputMap = { + struct_arg: { + foo: '1', + }, + struct_array_arg: [ + { + foo: '2', + }, + { + foo: '3', + }, + { + foo: '4', + }, + ], + nested_struct_arg: { + foo: { + foo: '5', + }, + }, +}; diff --git a/yarn-project/aztec-nr/safe-math/src/safe_u120.nr b/yarn-project/aztec-nr/safe-math/src/safe_u120.nr index a7051457ce6..d1ce40a01a6 100644 --- a/yarn-project/aztec-nr/safe-math/src/safe_u120.nr +++ b/yarn-project/aztec-nr/safe-math/src/safe_u120.nr @@ -1,4 +1,4 @@ -use dep::std::ops::Eq; +use dep::std::cmp::Eq; struct SafeU120 { value: u120, diff --git a/yarn-project/noir-compiler/src/__snapshots__/index.test.ts.snap b/yarn-project/noir-compiler/src/__snapshots__/index.test.ts.snap index a68f82ec125..5cbc1a00bb4 100644 --- a/yarn-project/noir-compiler/src/__snapshots__/index.test.ts.snap +++ b/yarn-project/noir-compiler/src/__snapshots__/index.test.ts.snap @@ -24,7 +24,7 @@ exports[`noir-compiler using nargo compiles the test contract 1`] = ` "verificationKey": "0000000200000800000000740000000f00000003515f3109623eb3c25aa5b16a1a79fd558bac7a7ce62c4560a8c537c77ce80dd339128d1d37b6582ee9e6df9567efb64313471dfa18f520f9ce53161b50dbf7731bc5f900000003515f322bc4cce83a486a92c92fd59bd84e0f92595baa639fc2ed86b00ffa0dfded2a092a669a3bdb7a273a015eda494457cc7ed5236f26cee330c290d45a33b9daa94800000003515f332729426c008c085a81bd34d8ef12dd31e80130339ef99d50013a89e4558eee6d0fa4ffe2ee7b7b62eb92608b2251ac31396a718f9b34978888789042b790a30100000003515f342be6b6824a913eb7a57b03cb1ee7bfb4de02f2f65fe8a4e97baa7766ddb353a82a8a25c49dc63778cd9fe96173f12a2bc77f3682f4c4448f98f1df82c75234a100000003515f351f85760d6ab567465aadc2f180af9eae3800e6958fec96aef53fd8a7b195d7c000c6267a0dd5cfc22b3fe804f53e266069c0e36f51885baec1e7e67650c62e170000000c515f41524954484d455449430d9d0f8ece2aa12012fa21e6e5c859e97bd5704e5c122064a66051294bc5e04213f61f54a0ebdf6fee4d4a6ecf693478191de0c2899bcd8e86a636c8d3eff43400000003515f43224a99d02c86336737c8dd5b746c40d2be6aead8393889a76a18d664029096e90f7fe81adcc92a74350eada9622ac453f49ebac24a066a1f83b394df54dfa0130000000c515f46495845445f42415345060e8a013ed289c2f9fd7473b04f6594b138ddb4b4cf6b901622a14088f04b8d2c83ff74fce56e3d5573b99c7b26d85d5046ce0c6559506acb7a675e7713eb3a00000007515f4c4f4749430721a91cb8da4b917e054f72147e1760cfe0ef3d45090ac0f4961d84ec1996961a25e787b26bd8b50b1a99450f77a424a83513c2b33af268cd253b0587ff50c700000003515f4d05dbd8623b8652511e1eb38d38887a69eceb082f807514f09e127237c5213b401b9325b48c6c225968002318095f89d0ef9cf629b2b7f0172e03bc39aacf6ed800000007515f52414e474504b57a3805e41df328f5ca9aefa40fad5917391543b7b65c6476e60b8f72e9ad07c92f3b3e11c8feae96dedc4b14a6226ef3201244f37cfc1ee5b96781f48d2b000000075349474d415f3125001d1954a18571eaa007144c5a567bb0d2be4def08a8be918b8c05e3b27d312c59ed41e09e144eab5de77ca89a2fd783be702a47c951d3112e3de02ce6e47c000000075349474d415f3223994e6a23618e60fa01c449a7ab88378709197e186d48d604bfb6931ffb15ad11c5ec7a0700570f80088fd5198ab5d5c227f2ad2a455a6edeec024156bb7beb000000075349474d415f3300cda5845f23468a13275d18bddae27c6bb189cf9aa95b6a03a0cb6688c7e8d829639b45cf8607c525cc400b55ebf90205f2f378626dc3406cc59b2d1b474fba000000075349474d415f342d299e7928496ea2d37f10b43afd6a80c90a33b483090d18069ffa275eedb2fc2f82121e8de43dc036d99b478b6227ceef34248939987a19011f065d8b5cef5c0000000010000000000000000100000002000000030000000400000005000000060000000700000008000000090000000a0000000b0000000c0000000d0000000e0000000f", }, { - "bytecode": "H4sIAAAAAAAA/6WPuwnAMAxE5Y30taUuq9hE3n+EFEnAEFL5NQdXHPcKABT48nbHk4JVNRsnCXXkGG6oNqqTk7md7CLp6i1GNAxSSZoWMvGmLFu4Bfe/r6vHBVdaDl3YAAAA", + "bytecode": "H4sIAAAAAAAA/61QwQ2AQAxqa+5MP86iTuAq7r+EGkmO8L3ygfQBBTcztx+LDbjohO50v8D7JDrlVXm216PBK+nnAG/oEqW55520Y1B+yMarbFxZ/lBfF/7wACd6RJX5AQAA", "functionType": "open", "isInternal": false, "name": "openFunction", @@ -222,7 +222,7 @@ exports[`noir-compiler using wasm binary compiles the test contract 1`] = ` "verificationKey": "0000000200000800000000740000000f00000003515f3109623eb3c25aa5b16a1a79fd558bac7a7ce62c4560a8c537c77ce80dd339128d1d37b6582ee9e6df9567efb64313471dfa18f520f9ce53161b50dbf7731bc5f900000003515f322bc4cce83a486a92c92fd59bd84e0f92595baa639fc2ed86b00ffa0dfded2a092a669a3bdb7a273a015eda494457cc7ed5236f26cee330c290d45a33b9daa94800000003515f332729426c008c085a81bd34d8ef12dd31e80130339ef99d50013a89e4558eee6d0fa4ffe2ee7b7b62eb92608b2251ac31396a718f9b34978888789042b790a30100000003515f342be6b6824a913eb7a57b03cb1ee7bfb4de02f2f65fe8a4e97baa7766ddb353a82a8a25c49dc63778cd9fe96173f12a2bc77f3682f4c4448f98f1df82c75234a100000003515f351f85760d6ab567465aadc2f180af9eae3800e6958fec96aef53fd8a7b195d7c000c6267a0dd5cfc22b3fe804f53e266069c0e36f51885baec1e7e67650c62e170000000c515f41524954484d455449430d9d0f8ece2aa12012fa21e6e5c859e97bd5704e5c122064a66051294bc5e04213f61f54a0ebdf6fee4d4a6ecf693478191de0c2899bcd8e86a636c8d3eff43400000003515f43224a99d02c86336737c8dd5b746c40d2be6aead8393889a76a18d664029096e90f7fe81adcc92a74350eada9622ac453f49ebac24a066a1f83b394df54dfa0130000000c515f46495845445f42415345060e8a013ed289c2f9fd7473b04f6594b138ddb4b4cf6b901622a14088f04b8d2c83ff74fce56e3d5573b99c7b26d85d5046ce0c6559506acb7a675e7713eb3a00000007515f4c4f4749430721a91cb8da4b917e054f72147e1760cfe0ef3d45090ac0f4961d84ec1996961a25e787b26bd8b50b1a99450f77a424a83513c2b33af268cd253b0587ff50c700000003515f4d05dbd8623b8652511e1eb38d38887a69eceb082f807514f09e127237c5213b401b9325b48c6c225968002318095f89d0ef9cf629b2b7f0172e03bc39aacf6ed800000007515f52414e474504b57a3805e41df328f5ca9aefa40fad5917391543b7b65c6476e60b8f72e9ad07c92f3b3e11c8feae96dedc4b14a6226ef3201244f37cfc1ee5b96781f48d2b000000075349474d415f3125001d1954a18571eaa007144c5a567bb0d2be4def08a8be918b8c05e3b27d312c59ed41e09e144eab5de77ca89a2fd783be702a47c951d3112e3de02ce6e47c000000075349474d415f3223994e6a23618e60fa01c449a7ab88378709197e186d48d604bfb6931ffb15ad11c5ec7a0700570f80088fd5198ab5d5c227f2ad2a455a6edeec024156bb7beb000000075349474d415f3300cda5845f23468a13275d18bddae27c6bb189cf9aa95b6a03a0cb6688c7e8d829639b45cf8607c525cc400b55ebf90205f2f378626dc3406cc59b2d1b474fba000000075349474d415f342d299e7928496ea2d37f10b43afd6a80c90a33b483090d18069ffa275eedb2fc2f82121e8de43dc036d99b478b6227ceef34248939987a19011f065d8b5cef5c0000000010000000000000000100000002000000030000000400000005000000060000000700000008000000090000000a0000000b0000000c0000000d0000000e0000000f", }, { - "bytecode": "H4sIAAAAAAAA/6WPuwnAMAxE5Y30taUuq9hE3n+EFEnAEFL5NQdXHPcKABT48nbHk4JVNRsnCXXkGG6oNqqTk7md7CLp6i1GNAxSSZoWMvGmLFu4Bfe/r6vHBVdaDl3YAAAA", + "bytecode": "H4sIAAAAAAAA/61QwQ2AQAxqa+5MP86iTuAq7r+EGkmO8L3ygfQBBTcztx+LDbjohO50v8D7JDrlVXm216PBK+nnAG/oEqW55520Y1B+yMarbFxZ/lBfF/7wACd6RJX5AQAA", "functionType": "open", "isInternal": false, "name": "openFunction", diff --git a/yarn-project/noir-protocol-circuits/src/crates/bug-collecting-crate/should-compile-different-namespace.nr b/yarn-project/noir-protocol-circuits/src/crates/bug-collecting-crate/should-compile-different-namespace.nr index 39e0eed8254..94200d15a38 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/bug-collecting-crate/should-compile-different-namespace.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/bug-collecting-crate/should-compile-different-namespace.nr @@ -1,4 +1,3 @@ - struct Foo { x : N, } diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/append_only_tree_snapshot.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/append_only_tree_snapshot.nr index 4856f446520..744a2062f78 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/append_only_tree_snapshot.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/append_only_tree_snapshot.nr @@ -1,4 +1,4 @@ -use dep::std::ops::Eq; +use dep::std::cmp::Eq; struct AppendOnlyTreeSnapshot { root : Field, diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/constant_rollup_data.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/constant_rollup_data.nr index 8df873f0a3b..e62bcd6ff73 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/constant_rollup_data.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/constant_rollup_data.nr @@ -1,6 +1,6 @@ use crate::abis::global_variables::GlobalVariables; use crate::abis::append_only_tree_snapshot::AppendOnlyTreeSnapshot; -use dep::std::ops::Eq; +use dep::std::cmp::Eq; struct ConstantRollupData { // The very latest roots as at the very beginning of the entire rollup: diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/global_variables.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/global_variables.nr index d3a92124c2f..daf88058596 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/global_variables.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/global_variables.nr @@ -1,5 +1,5 @@ use dep::types::constants::GENERATOR_INDEX__GLOBAL_VARIABLES; -use dep::std::ops::Eq; +use dep::std::cmp::Eq; use dep::types::traits::Hash; struct GlobalVariables { diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/public_data_tree_leaf.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/public_data_tree_leaf.nr index e820f172175..0bd347a6116 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/public_data_tree_leaf.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/public_data_tree_leaf.nr @@ -1,4 +1,4 @@ -use dep::std::ops::Eq; +use dep::std::cmp::Eq; use dep::types::traits::{Empty, Hash}; struct PublicDataTreeLeafPreimage { diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/call_context.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/call_context.nr index 668dd8fce90..b27d16a6a9b 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/call_context.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/call_context.nr @@ -7,7 +7,7 @@ use crate::{ }, hash::pedersen_hash, }; -use dep::std::ops::Eq; +use dep::std::cmp::Eq; use crate::traits::Hash; // docs:start:call-context diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/call_request.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/call_request.nr index da97e8b6207..c1dd67a1bda 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/call_request.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/call_request.nr @@ -1,5 +1,5 @@ use crate::address::AztecAddress; -use dep::std::ops::Eq; +use dep::std::cmp::Eq; use crate::traits::Empty; struct CallerContext { diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/function_selector.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/function_selector.nr index d076cc09bf8..c9ddf6da8be 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/function_selector.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/function_selector.nr @@ -1,5 +1,5 @@ use crate::utils::field::field_from_bytes; -use dep::std::ops::Eq; +use dep::std::cmp::Eq; global SELECTOR_SIZE = 4; diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/new_contract_data.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/new_contract_data.nr index ece38501b6b..2644eed2fc7 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/new_contract_data.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/new_contract_data.nr @@ -1,6 +1,6 @@ use crate::address::{AztecAddress, EthAddress}; use crate::constants::GENERATOR_INDEX__CONTRACT_LEAF; -use dep::std::ops::Eq; +use dep::std::cmp::Eq; use crate::traits::{Empty, Hash}; struct NewContractData { diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_data_read.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_data_read.nr index f009f6d4abc..096cc439c33 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_data_read.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_data_read.nr @@ -1,5 +1,5 @@ use crate::constants::GENERATOR_INDEX__PUBLIC_DATA_READ; -use dep::std::ops::Eq; +use dep::std::cmp::Eq; use crate::traits::{Empty,Hash}; struct PublicDataRead { diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_data_update_request.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_data_update_request.nr index 67ff120f086..502915f47ef 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_data_update_request.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_data_update_request.nr @@ -1,5 +1,5 @@ use crate::constants::GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST; -use dep::std::ops::Eq; +use dep::std::cmp::Eq; use crate::traits::{Empty, Hash}; struct PublicDataUpdateRequest { diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/side_effect.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/side_effect.nr index da13d853fe9..b74ecdb7bbf 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/side_effect.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/side_effect.nr @@ -1,5 +1,5 @@ use crate::constants::{GENERATOR_INDEX__SIDE_EFFECT}; -use dep::std::ops::Eq; +use dep::std::cmp::Eq; use crate::traits::{Empty, Hash}; struct SideEffect{ diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/address.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/address.nr index 5bea56809f4..7c5cb9959c2 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/address.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/address.nr @@ -7,7 +7,7 @@ use crate::{ utils, grumpkin_point::GrumpkinPoint, }; -use dep::std::ops::Eq; +use dep::std::cmp::Eq; use crate::traits::Empty; // Aztec address diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/contrakt/storage_update_request.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/contrakt/storage_update_request.nr index d9fab6779b2..0cfbc1cabb1 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/contrakt/storage_update_request.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/contrakt/storage_update_request.nr @@ -5,7 +5,7 @@ use crate::{ }, hash::pedersen_hash, }; -use dep::std::ops::Eq; +use dep::std::cmp::Eq; use crate::traits::{Hash, Empty}; struct StorageUpdateRequest{