From 89b11ae8e81660c74361e0ecc782550d9e7b59f2 Mon Sep 17 00:00:00 2001 From: AztecBot Date: Fri, 17 May 2024 18:34:33 +0000 Subject: [PATCH] chore: fix poor performance and long compile times in value_note.derement() (https://github.com/AztecProtocol/aztec-packages/pull/6523) https://github.com/AztecProtocol/aztec-packages/pull/6405 added a very complicated call (`get_npk_m_hash`) to `destroy_note`, not realizing that `destroy_note` is called in a loop by `decrement`. The loop unrolling caused multiple calls to this function, even though they all had the same arguments, ultimately resulting in humongous RAM usage during compilation (over 40GB just for this contract) and very compilation times (from 30s to multiple minutes). This PR fixes this simply inlining the code for `destroy_note` and fetching the key once at the beginning. It does worry me slightly however that such a large performance hit was not noticed - this likely affected test times as well. --- .aztec-sync-commit | 2 +- .github/workflows/gates_report.yml | 144 +- .github/workflows/test-js-packages.yml | 5 + .tokeignore | 12 + Cargo.lock | 284 -- Cargo.toml | 2 - acvm-repo/acir/benches/serialization.rs | 2 +- acvm-repo/acir/codegen/acir.cpp | 270 +- acvm-repo/acir/src/circuit/opcodes.rs | 16 +- .../opcodes/black_box_function_call.rs | 22 +- acvm-repo/acir/src/lib.rs | 3 +- .../acir/tests/test_program_serialization.rs | 41 +- .../optimizers/constant_backpropagation.rs | 2 +- .../compiler/optimizers/redundant_range.rs | 2 +- .../src/pwg/blackbox/embedded_curve_ops.rs | 41 +- acvm-repo/acvm/src/pwg/blackbox/mod.rs | 12 +- acvm-repo/acvm/src/pwg/mod.rs | 2 +- acvm-repo/acvm/tests/solver.rs | 8 +- acvm-repo/acvm_js/build.sh | 2 +- acvm-repo/acvm_js/test/shared/memory_op.ts | 8 +- .../acvm_js/test/shared/multi_scalar_mul.ts | 21 +- .../src/curve_specific_solver.rs | 18 +- .../src/embedded_curve_ops.rs | 104 +- acvm-repo/bn254_blackbox_solver/src/lib.rs | 16 +- acvm-repo/brillig/src/black_box.rs | 7 + acvm-repo/brillig_vm/src/black_box.rs | 78 +- aztec_macros/src/lib.rs | 28 +- .../src/transforms/contract_interface.rs | 3 +- aztec_macros/src/transforms/functions.rs | 15 + aztec_macros/src/utils/errors.rs | 6 + .../scripts/codegen-verifiers.sh | 36 +- compiler/noirc_driver/src/lib.rs | 31 +- .../noirc_driver/tests/stdlib_warnings.rs | 3 +- .../brillig/brillig_gen/brillig_black_box.rs | 8 +- .../src/brillig/brillig_gen/brillig_block.rs | 38 +- .../noirc_evaluator/src/brillig/brillig_ir.rs | 11 +- .../brillig/brillig_ir/codegen_intrinsic.rs | 62 +- .../src/brillig/brillig_ir/debug_show.rs | 13 +- .../src/brillig/brillig_ir/entry_point.rs | 2 +- .../src/ssa/acir_gen/acir_ir/acir_variable.rs | 9 +- .../ssa/acir_gen/acir_ir/generated_acir.rs | 17 +- .../noirc_evaluator/src/ssa/acir_gen/mod.rs | 35 +- .../noirc_evaluator/src/ssa/opt/inlining.rs | 8 +- compiler/noirc_frontend/src/ast/function.rs | 9 + compiler/noirc_frontend/src/ast/statement.rs | 5 +- .../src/elaborator/expressions.rs | 604 ++++ compiler/noirc_frontend/src/elaborator/mod.rs | 782 ++++++ .../noirc_frontend/src/elaborator/patterns.rs | 465 +++ .../noirc_frontend/src/elaborator/scope.rs | 200 ++ .../src/elaborator/statements.rs | 409 +++ .../noirc_frontend/src/elaborator/types.rs | 1438 ++++++++++ .../src/hir/def_collector/dc_crate.rs | 104 +- .../src/hir/def_collector/dc_mod.rs | 16 +- .../noirc_frontend/src/hir/def_map/mod.rs | 10 +- .../src/hir/resolution/import.rs | 9 + .../src/hir/resolution/resolver.rs | 12 +- .../noirc_frontend/src/hir/type_check/expr.rs | 4 +- .../noirc_frontend/src/hir/type_check/mod.rs | 2 +- compiler/noirc_frontend/src/hir_def/expr.rs | 11 +- .../noirc_frontend/src/hir_def/function.rs | 5 +- compiler/noirc_frontend/src/hir_def/types.rs | 8 +- compiler/noirc_frontend/src/lib.rs | 1 + compiler/noirc_frontend/src/node_interner.rs | 6 +- compiler/noirc_frontend/src/tests.rs | 2482 ++++++++--------- .../src/tests/name_shadowing.rs | 419 +++ compiler/wasm/src/compile.rs | 21 +- compiler/wasm/src/compile_new.rs | 30 +- cspell.json | 6 + .../barretenberg/_category_.json | 6 + .../getting_started/barretenberg/index.md | 54 + .../hello_noir/_category_.json | 2 +- docs/docs/getting_started/hello_noir/index.md | 41 +- .../hello_noir/project_breakdown.md | 66 +- .../getting_started/tooling/noir_codegen.md | 2 +- docs/docs/how_to/how-to-oracles.md | 4 +- docs/docs/how_to/how-to-solidity-verifier.md | 44 +- .../docs/noir/concepts/data_types/booleans.md | 3 - .../docs/noir/concepts/data_types/integers.md | 2 +- docs/docs/noir/standard_library/traits.md | 9 + examples/codegen-verifier/.gitignore | 4 + examples/codegen-verifier/Nargo.toml | 7 + examples/codegen-verifier/Prover.toml | 2 + examples/codegen-verifier/codegen_verifier.sh | 38 + examples/codegen-verifier/foundry.toml | 6 + examples/codegen-verifier/src/main.nr | 3 + examples/codegen-verifier/test.sh | 15 + examples/prove_and_verify/Nargo.toml | 7 + examples/prove_and_verify/Prover.toml | 2 + examples/prove_and_verify/proofs/proof | Bin 0 -> 2176 bytes examples/prove_and_verify/prove_and_verify.sh | 14 + examples/prove_and_verify/src/main.nr | 3 + examples/prove_and_verify/test.sh | 10 + noir_stdlib/src/aes128.nr | 3 + noir_stdlib/src/embedded_curve_ops.nr | 40 +- noir_stdlib/src/field/bn254.nr | 57 +- noir_stdlib/src/ops.nr | 2 +- noir_stdlib/src/ops/arith.nr | 11 + noir_stdlib/src/ops/bit.nr | 31 + noir_stdlib/src/uint128.nr | 278 +- scripts/count_loc.sh | 33 + scripts/install_bb.sh | 9 + security/insectarium/noir_stdlib.md | 61 + .../intrinsic_die/src/main.nr | 4 +- .../brillig_embedded_curve/src/main.nr | 10 +- .../embedded_curve_ops/src/main.nr | 10 +- .../no_predicates_brillig/Nargo.toml | 7 + .../no_predicates_brillig/Prover.toml | 2 + .../no_predicates_brillig/src/main.nr | 16 + .../execution_success/unit_value/Nargo.toml | 4 +- .../embedded_curve_ops/Nargo.toml | 7 + .../embedded_curve_ops/src/main.nr | 37 + test_programs/rebuild.sh | 5 +- tooling/backend_interface/CHANGELOG.md | 233 -- tooling/backend_interface/Cargo.toml | 35 - tooling/backend_interface/src/cli/contract.rs | 71 - tooling/backend_interface/src/cli/gates.rs | 64 - tooling/backend_interface/src/cli/mod.rs | 39 - .../src/cli/proof_as_fields.rs | 38 - tooling/backend_interface/src/cli/prove.rs | 66 - tooling/backend_interface/src/cli/verify.rs | 74 - tooling/backend_interface/src/cli/version.rs | 29 - .../backend_interface/src/cli/vk_as_fields.rs | 39 - tooling/backend_interface/src/cli/write_vk.rs | 58 - tooling/backend_interface/src/download.rs | 58 - tooling/backend_interface/src/lib.rs | 150 - tooling/backend_interface/src/proof_system.rs | 169 -- .../backend_interface/src/smart_contract.rs | 70 - .../test-binaries/mock_backend/Cargo.lock | 223 -- .../test-binaries/mock_backend/Cargo.toml | 11 - .../mock_backend/src/contract_cmd.rs | 21 - .../mock_backend/src/gates_cmd.rs | 18 - .../test-binaries/mock_backend/src/main.rs | 41 - .../mock_backend/src/prove_cmd.rs | 25 - .../mock_backend/src/verify_cmd.rs | 24 - .../mock_backend/src/write_vk_cmd.rs | 20 - tooling/bb_abstraction_leaks/Cargo.toml | 17 - tooling/bb_abstraction_leaks/build.rs | 58 - tooling/bb_abstraction_leaks/src/lib.rs | 26 - tooling/debugger/src/context.rs | 6 +- tooling/debugger/tests/debug.rs | 6 - tooling/lsp/src/lib.rs | 2 +- tooling/lsp/src/notifications/mod.rs | 4 +- tooling/lsp/src/requests/code_lens_request.rs | 2 +- tooling/lsp/src/requests/goto_declaration.rs | 2 +- tooling/lsp/src/requests/goto_definition.rs | 2 +- tooling/lsp/src/requests/test_run.rs | 2 +- tooling/lsp/src/requests/tests.rs | 2 +- tooling/lsp/src/solver.rs | 19 +- tooling/nargo/src/artifacts/contract.rs | 6 +- tooling/nargo/src/artifacts/debug.rs | 27 + tooling/nargo/src/artifacts/program.rs | 2 +- tooling/nargo/src/constants.rs | 2 - tooling/nargo/src/package.rs | 7 +- tooling/nargo_cli/Cargo.toml | 6 +- tooling/nargo_cli/build.rs | 7 - tooling/nargo_cli/src/backends.rs | 39 - .../src/cli/backend_cmd/current_cmd.rs | 13 - .../src/cli/backend_cmd/install_cmd.rs | 30 - .../nargo_cli/src/cli/backend_cmd/ls_cmd.rs | 34 - tooling/nargo_cli/src/cli/backend_cmd/mod.rs | 41 - .../src/cli/backend_cmd/uninstall_cmd.rs | 59 - .../nargo_cli/src/cli/backend_cmd/use_cmd.rs | 26 - tooling/nargo_cli/src/cli/check_cmd.rs | 22 +- .../nargo_cli/src/cli/codegen_verifier_cmd.rs | 83 - tooling/nargo_cli/src/cli/compile_cmd.rs | 21 +- tooling/nargo_cli/src/cli/dap_cmd.rs | 2 +- tooling/nargo_cli/src/cli/execute_cmd.rs | 35 +- tooling/nargo_cli/src/cli/export_cmd.rs | 1 + tooling/nargo_cli/src/cli/fs/inputs.rs | 98 - tooling/nargo_cli/src/cli/fs/mod.rs | 12 - tooling/nargo_cli/src/cli/fs/program.rs | 11 - tooling/nargo_cli/src/cli/fs/proof.rs | 20 - tooling/nargo_cli/src/cli/info_cmd.rs | 159 +- tooling/nargo_cli/src/cli/mod.rs | 25 +- tooling/nargo_cli/src/cli/prove_cmd.rs | 154 - tooling/nargo_cli/src/cli/test_cmd.rs | 2 + tooling/nargo_cli/src/cli/verify_cmd.rs | 109 - tooling/nargo_cli/src/errors.rs | 27 +- tooling/nargo_cli/src/main.rs | 1 - tooling/nargo_cli/tests/codegen-verifier.rs | 37 - tooling/nargo_cli/tests/stdlib-tests.rs | 17 +- tooling/nargo_fmt/src/config.rs | 2 +- tooling/noir_codegen/src/index.ts | 79 +- tooling/noir_codegen/src/main.ts | 2 +- tooling/noir_codegen/src/noir_types.ts | 187 -- tooling/noir_codegen/src/parseArgs.ts | 17 + .../src/utils/abi_type_with_generics.ts | 139 + .../noir_codegen/src/utils/demonomorphizer.ts | 284 ++ .../src/utils/typings_generator.ts | 324 +++ tooling/noir_codegen/test/index.test.ts | 18 +- tooling/noir_codegen/test/test_lib/src/lib.nr | 22 +- .../noir_js_backend_barretenberg/package.json | 2 +- tooling/noirc_abi/src/lib.rs | 10 +- yarn.lock | 13 +- 194 files changed, 8304 insertions(+), 5132 deletions(-) create mode 100644 .tokeignore create mode 100644 compiler/noirc_frontend/src/elaborator/expressions.rs create mode 100644 compiler/noirc_frontend/src/elaborator/mod.rs create mode 100644 compiler/noirc_frontend/src/elaborator/patterns.rs create mode 100644 compiler/noirc_frontend/src/elaborator/scope.rs create mode 100644 compiler/noirc_frontend/src/elaborator/statements.rs create mode 100644 compiler/noirc_frontend/src/elaborator/types.rs create mode 100644 compiler/noirc_frontend/src/tests/name_shadowing.rs create mode 100644 docs/docs/getting_started/barretenberg/_category_.json create mode 100644 docs/docs/getting_started/barretenberg/index.md create mode 100644 examples/codegen-verifier/.gitignore create mode 100644 examples/codegen-verifier/Nargo.toml create mode 100644 examples/codegen-verifier/Prover.toml create mode 100755 examples/codegen-verifier/codegen_verifier.sh create mode 100644 examples/codegen-verifier/foundry.toml create mode 100644 examples/codegen-verifier/src/main.nr create mode 100755 examples/codegen-verifier/test.sh create mode 100644 examples/prove_and_verify/Nargo.toml create mode 100644 examples/prove_and_verify/Prover.toml create mode 100644 examples/prove_and_verify/proofs/proof create mode 100755 examples/prove_and_verify/prove_and_verify.sh create mode 100644 examples/prove_and_verify/src/main.nr create mode 100755 examples/prove_and_verify/test.sh create mode 100755 scripts/count_loc.sh create mode 100755 scripts/install_bb.sh create mode 100644 security/insectarium/noir_stdlib.md create mode 100644 test_programs/execution_success/no_predicates_brillig/Nargo.toml create mode 100644 test_programs/execution_success/no_predicates_brillig/Prover.toml create mode 100644 test_programs/execution_success/no_predicates_brillig/src/main.nr create mode 100644 test_programs/noir_test_success/embedded_curve_ops/Nargo.toml create mode 100644 test_programs/noir_test_success/embedded_curve_ops/src/main.nr delete mode 100644 tooling/backend_interface/CHANGELOG.md delete mode 100644 tooling/backend_interface/Cargo.toml delete mode 100644 tooling/backend_interface/src/cli/contract.rs delete mode 100644 tooling/backend_interface/src/cli/gates.rs delete mode 100644 tooling/backend_interface/src/cli/mod.rs delete mode 100644 tooling/backend_interface/src/cli/proof_as_fields.rs delete mode 100644 tooling/backend_interface/src/cli/prove.rs delete mode 100644 tooling/backend_interface/src/cli/verify.rs delete mode 100644 tooling/backend_interface/src/cli/version.rs delete mode 100644 tooling/backend_interface/src/cli/vk_as_fields.rs delete mode 100644 tooling/backend_interface/src/cli/write_vk.rs delete mode 100644 tooling/backend_interface/src/download.rs delete mode 100644 tooling/backend_interface/src/lib.rs delete mode 100644 tooling/backend_interface/src/proof_system.rs delete mode 100644 tooling/backend_interface/src/smart_contract.rs delete mode 100644 tooling/backend_interface/test-binaries/mock_backend/Cargo.lock delete mode 100644 tooling/backend_interface/test-binaries/mock_backend/Cargo.toml delete mode 100644 tooling/backend_interface/test-binaries/mock_backend/src/contract_cmd.rs delete mode 100644 tooling/backend_interface/test-binaries/mock_backend/src/gates_cmd.rs delete mode 100644 tooling/backend_interface/test-binaries/mock_backend/src/main.rs delete mode 100644 tooling/backend_interface/test-binaries/mock_backend/src/prove_cmd.rs delete mode 100644 tooling/backend_interface/test-binaries/mock_backend/src/verify_cmd.rs delete mode 100644 tooling/backend_interface/test-binaries/mock_backend/src/write_vk_cmd.rs delete mode 100644 tooling/bb_abstraction_leaks/Cargo.toml delete mode 100644 tooling/bb_abstraction_leaks/build.rs delete mode 100644 tooling/bb_abstraction_leaks/src/lib.rs delete mode 100644 tooling/nargo_cli/src/backends.rs delete mode 100644 tooling/nargo_cli/src/cli/backend_cmd/current_cmd.rs delete mode 100644 tooling/nargo_cli/src/cli/backend_cmd/install_cmd.rs delete mode 100644 tooling/nargo_cli/src/cli/backend_cmd/ls_cmd.rs delete mode 100644 tooling/nargo_cli/src/cli/backend_cmd/mod.rs delete mode 100644 tooling/nargo_cli/src/cli/backend_cmd/uninstall_cmd.rs delete mode 100644 tooling/nargo_cli/src/cli/backend_cmd/use_cmd.rs delete mode 100644 tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs delete mode 100644 tooling/nargo_cli/src/cli/fs/proof.rs delete mode 100644 tooling/nargo_cli/src/cli/prove_cmd.rs delete mode 100644 tooling/nargo_cli/src/cli/verify_cmd.rs delete mode 100644 tooling/nargo_cli/tests/codegen-verifier.rs delete mode 100644 tooling/noir_codegen/src/noir_types.ts create mode 100644 tooling/noir_codegen/src/utils/abi_type_with_generics.ts create mode 100644 tooling/noir_codegen/src/utils/demonomorphizer.ts create mode 100644 tooling/noir_codegen/src/utils/typings_generator.ts diff --git a/.aztec-sync-commit b/.aztec-sync-commit index b9e87cffed..e596d61219 100644 --- a/.aztec-sync-commit +++ b/.aztec-sync-commit @@ -1 +1 @@ -1c74387e56b49102043fc6701735325a891e6c65 +002b4aa556041aa1a12f0fd09bb5ad0b07f04daa diff --git a/.github/workflows/gates_report.yml b/.github/workflows/gates_report.yml index ba4cb600c5..3d4bef1940 100644 --- a/.github/workflows/gates_report.yml +++ b/.github/workflows/gates_report.yml @@ -1,88 +1,88 @@ -name: Report gates diff +# name: Report gates diff -on: - push: - branches: - - master - pull_request: +# on: +# push: +# branches: +# - master +# pull_request: -jobs: - build-nargo: - runs-on: ubuntu-latest - strategy: - matrix: - target: [x86_64-unknown-linux-gnu] +# jobs: +# build-nargo: +# runs-on: ubuntu-latest +# strategy: +# matrix: +# target: [x86_64-unknown-linux-gnu] - steps: - - name: Checkout Noir repo - uses: actions/checkout@v4 +# steps: +# - name: Checkout Noir repo +# uses: actions/checkout@v4 - - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.74.1 +# - name: Setup toolchain +# uses: dtolnay/rust-toolchain@1.74.1 - - uses: Swatinem/rust-cache@v2 - with: - key: ${{ matrix.target }} - cache-on-failure: true - save-if: ${{ github.event_name != 'merge_group' }} +# - uses: Swatinem/rust-cache@v2 +# with: +# key: ${{ matrix.target }} +# cache-on-failure: true +# save-if: ${{ github.event_name != 'merge_group' }} - - name: Build Nargo - run: cargo build --package nargo_cli --release +# - name: Build Nargo +# run: cargo build --package nargo_cli --release - - name: Package artifacts - run: | - mkdir dist - cp ./target/release/nargo ./dist/nargo - 7z a -ttar -so -an ./dist/* | 7z a -si ./nargo-x86_64-unknown-linux-gnu.tar.gz +# - name: Package artifacts +# run: | +# mkdir dist +# cp ./target/release/nargo ./dist/nargo +# 7z a -ttar -so -an ./dist/* | 7z a -si ./nargo-x86_64-unknown-linux-gnu.tar.gz - - name: Upload artifact - uses: actions/upload-artifact@v4 - with: - name: nargo - path: ./dist/* - retention-days: 3 +# - name: Upload artifact +# uses: actions/upload-artifact@v4 +# with: +# name: nargo +# path: ./dist/* +# retention-days: 3 - compare_gas_reports: - needs: [build-nargo] - runs-on: ubuntu-latest - permissions: - pull-requests: write +# compare_gas_reports: +# needs: [build-nargo] +# runs-on: ubuntu-latest +# permissions: +# pull-requests: write - steps: - - uses: actions/checkout@v4 +# steps: +# - uses: actions/checkout@v4 - - name: Download nargo binary - uses: actions/download-artifact@v4 - with: - name: nargo - path: ./nargo +# - name: Download nargo binary +# uses: actions/download-artifact@v4 +# with: +# name: nargo +# path: ./nargo - - name: Set nargo on PATH - run: | - nargo_binary="${{ github.workspace }}/nargo/nargo" - chmod +x $nargo_binary - echo "$(dirname $nargo_binary)" >> $GITHUB_PATH - export PATH="$PATH:$(dirname $nargo_binary)" - nargo -V +# - name: Set nargo on PATH +# run: | +# nargo_binary="${{ github.workspace }}/nargo/nargo" +# chmod +x $nargo_binary +# echo "$(dirname $nargo_binary)" >> $GITHUB_PATH +# export PATH="$PATH:$(dirname $nargo_binary)" +# nargo -V - - name: Generate gates report - working-directory: ./test_programs - run: | - ./gates_report.sh - mv gates_report.json ../gates_report.json +# - name: Generate gates report +# working-directory: ./test_programs +# run: | +# ./gates_report.sh +# mv gates_report.json ../gates_report.json - - name: Compare gates reports - id: gates_diff - uses: vezenovm/noir-gates-diff@acf12797860f237117e15c0d6e08d64253af52b6 - with: - report: gates_report.json - summaryQuantile: 0.9 # only display the 10% most significant circuit size diffs in the summary (defaults to 20%) +# - name: Compare gates reports +# id: gates_diff +# uses: vezenovm/noir-gates-diff@acf12797860f237117e15c0d6e08d64253af52b6 +# with: +# report: gates_report.json +# summaryQuantile: 0.9 # only display the 10% most significant circuit size diffs in the summary (defaults to 20%) - - name: Add gates diff to sticky comment - if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' - uses: marocchino/sticky-pull-request-comment@v2 - with: - # delete the comment in case changes no longer impact circuit sizes - delete: ${{ !steps.gates_diff.outputs.markdown }} - message: ${{ steps.gates_diff.outputs.markdown }} +# - name: Add gates diff to sticky comment +# if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' +# uses: marocchino/sticky-pull-request-comment@v2 +# with: +# # delete the comment in case changes no longer impact circuit sizes +# delete: ${{ !steps.gates_diff.outputs.markdown }} +# message: ${{ steps.gates_diff.outputs.markdown }} diff --git a/.github/workflows/test-js-packages.yml b/.github/workflows/test-js-packages.yml index e6098dd269..c8a8be998e 100644 --- a/.github/workflows/test-js-packages.yml +++ b/.github/workflows/test-js-packages.yml @@ -399,6 +399,11 @@ jobs: - name: Checkout uses: actions/checkout@v4 + - name: Download bb binary + run: | + # Adds `bb` to PATH + ./scripts/install_bb.sh + - name: Download nargo binary uses: actions/download-artifact@v4 with: diff --git a/.tokeignore b/.tokeignore new file mode 100644 index 0000000000..55f24e41db --- /dev/null +++ b/.tokeignore @@ -0,0 +1,12 @@ +docs +scripts + +# aztec_macros is explicitly considered OOS for Noir audit +aztec_macros + +# config files +*.toml +*.md +*.json +*.txt +*.config.mjs diff --git a/Cargo.lock b/Cargo.lock index a8c63c032a..63a40ee132 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -451,25 +451,6 @@ dependencies = [ "regex", ] -[[package]] -name = "backend-interface" -version = "0.29.0" -dependencies = [ - "acvm", - "bb_abstraction_leaks", - "build-target", - "const_format", - "dirs", - "flate2", - "reqwest", - "serde_json", - "tar", - "tempfile", - "test-binary", - "thiserror", - "tracing", -] - [[package]] name = "backtrace" version = "0.3.68" @@ -509,15 +490,6 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" -[[package]] -name = "bb_abstraction_leaks" -version = "0.11.0" -dependencies = [ - "acvm", - "build-target", - "const_format", -] - [[package]] name = "bincode" version = "1.3.3" @@ -680,12 +652,6 @@ dependencies = [ "safe-regex", ] -[[package]] -name = "build-target" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "832133bbabbbaa9fbdba793456a2827627a7d2b8fb96032fa1e7666d7895832b" - [[package]] name = "bumpalo" version = "3.13.0" @@ -1568,15 +1534,6 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" -[[package]] -name = "encoding_rs" -version = "0.8.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394" -dependencies = [ - "cfg-if 1.0.0", -] - [[package]] name = "endian-type" version = "0.1.2" @@ -2011,25 +1968,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "h2" -version = "0.3.26" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" -dependencies = [ - "bytes", - "fnv", - "futures-core", - "futures-sink", - "futures-util", - "http", - "indexmap 2.0.0", - "slab", - "tokio", - "tokio-util 0.7.10", - "tracing", -] - [[package]] name = "half" version = "1.8.2" @@ -2149,7 +2087,6 @@ dependencies = [ "futures-channel", "futures-core", "futures-util", - "h2", "http", "http-body", "httparse", @@ -2163,20 +2100,6 @@ dependencies = [ "want", ] -[[package]] -name = "hyper-rustls" -version = "0.24.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" -dependencies = [ - "futures-util", - "http", - "hyper", - "rustls", - "tokio", - "tokio-rustls", -] - [[package]] name = "iai" version = "0.1.1" @@ -2364,12 +2287,6 @@ dependencies = [ "cfg-if 1.0.0", ] -[[package]] -name = "ipnet" -version = "2.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6" - [[package]] name = "is-terminal" version = "0.4.9" @@ -2768,12 +2685,6 @@ dependencies = [ "autocfg", ] -[[package]] -name = "mime" -version = "0.3.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" - [[package]] name = "miniz_oxide" version = "0.7.1" @@ -2846,7 +2757,6 @@ dependencies = [ "assert_cmd", "assert_fs", "async-lsp", - "backend-interface", "bn254_blackbox_solver", "build-data", "clap", @@ -2857,7 +2767,6 @@ dependencies = [ "dap", "dirs", "fm", - "hex", "iai", "iter-extended", "nargo", @@ -4018,45 +3927,6 @@ dependencies = [ "bytecheck", ] -[[package]] -name = "reqwest" -version = "0.11.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e9ad3fe7488d7e34558a2033d45a0c90b72d97b4f80705666fea71472e2e6a1" -dependencies = [ - "base64 0.21.2", - "bytes", - "encoding_rs", - "futures-core", - "futures-util", - "h2", - "http", - "http-body", - "hyper", - "hyper-rustls", - "ipnet", - "js-sys", - "log", - "mime", - "once_cell", - "percent-encoding 2.3.0", - "pin-project-lite", - "rustls", - "rustls-pemfile", - "serde", - "serde_json", - "serde_urlencoded", - "tokio", - "tokio-rustls", - "tower-service", - "url 2.4.0", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", - "webpki-roots", - "winreg", -] - [[package]] name = "rexpect" version = "0.5.0" @@ -4090,36 +3960,6 @@ dependencies = [ "bytemuck", ] -[[package]] -name = "ring" -version = "0.16.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" -dependencies = [ - "cc", - "libc", - "once_cell", - "spin 0.5.2", - "untrusted 0.7.1", - "web-sys", - "winapi", -] - -[[package]] -name = "ring" -version = "0.17.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" -dependencies = [ - "cc", - "cfg-if 1.0.0", - "getrandom 0.2.10", - "libc", - "spin 0.9.8", - "untrusted 0.9.0", - "windows-sys 0.52.0", -] - [[package]] name = "rkyv" version = "0.7.42" @@ -4217,37 +4057,6 @@ dependencies = [ "windows-sys 0.48.0", ] -[[package]] -name = "rustls" -version = "0.21.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fecbfb7b1444f477b345853b1fce097a2c6fb637b2bfb87e6bc5db0f043fae4" -dependencies = [ - "log", - "ring 0.17.8", - "rustls-webpki", - "sct", -] - -[[package]] -name = "rustls-pemfile" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2" -dependencies = [ - "base64 0.21.2", -] - -[[package]] -name = "rustls-webpki" -version = "0.101.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" -dependencies = [ - "ring 0.17.8", - "untrusted 0.9.0", -] - [[package]] name = "rustversion" version = "1.0.14" @@ -4380,16 +4189,6 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" -[[package]] -name = "sct" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" -dependencies = [ - "ring 0.16.20", - "untrusted 0.7.1", -] - [[package]] name = "seahash" version = "4.1.0" @@ -4521,18 +4320,6 @@ dependencies = [ "serde", ] -[[package]] -name = "serde_urlencoded" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" -dependencies = [ - "form_urlencoded", - "itoa", - "ryu", - "serde", -] - [[package]] name = "serde_with" version = "3.2.0" @@ -4715,18 +4502,6 @@ dependencies = [ "windows-sys 0.48.0", ] -[[package]] -name = "spin" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" - -[[package]] -name = "spin" -version = "0.9.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" - [[package]] name = "spki" version = "0.6.0" @@ -4850,17 +4625,6 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" -[[package]] -name = "tar" -version = "0.4.40" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b16afcea1f22891c49a00c751c7b63b2233284064f11a200fc624137c51e2ddb" -dependencies = [ - "filetime", - "libc", - "xattr", -] - [[package]] name = "target-lexicon" version = "0.12.11" @@ -5072,16 +4836,6 @@ dependencies = [ "syn 2.0.32", ] -[[package]] -name = "tokio-rustls" -version = "0.24.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" -dependencies = [ - "rustls", - "tokio", -] - [[package]] name = "tokio-stream" version = "0.1.15" @@ -5119,7 +4873,6 @@ dependencies = [ "futures-sink", "pin-project-lite", "tokio", - "tracing", ] [[package]] @@ -5366,18 +5119,6 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" -[[package]] -name = "untrusted" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" - -[[package]] -name = "untrusted" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" - [[package]] name = "url" version = "1.7.2" @@ -5750,12 +5491,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "webpki-roots" -version = "0.25.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14247bb57be4f377dfb94c72830b8ce8fc6beac03cf4bf7b9732eadd414123fc" - [[package]] name = "winapi" version = "0.3.9" @@ -5980,16 +5715,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "winreg" -version = "0.50.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" -dependencies = [ - "cfg-if 1.0.0", - "windows-sys 0.48.0", -] - [[package]] name = "wyz" version = "0.5.1" @@ -5999,15 +5724,6 @@ dependencies = [ "tap", ] -[[package]] -name = "xattr" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4686009f71ff3e5c4dbcf1a282d0a44db3f021ba69350cd42086b3e5f1c6985" -dependencies = [ - "libc", -] - [[package]] name = "zerocopy" version = "0.7.32" diff --git a/Cargo.toml b/Cargo.toml index f744d6d0cf..b5a5c68d73 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -13,8 +13,6 @@ members = [ "compiler/fm", "compiler/wasm", # Crates related to tooling built on top of the Noir compiler - "tooling/backend_interface", - "tooling/bb_abstraction_leaks", "tooling/lsp", "tooling/debugger", "tooling/nargo", diff --git a/acvm-repo/acir/benches/serialization.rs b/acvm-repo/acir/benches/serialization.rs index e51726e390..a7f32b4a4c 100644 --- a/acvm-repo/acir/benches/serialization.rs +++ b/acvm-repo/acir/benches/serialization.rs @@ -33,7 +33,7 @@ fn sample_program(num_opcodes: usize) -> Program { functions: vec![Circuit { current_witness_index: 4000, opcodes: assert_zero_opcodes.to_vec(), - expression_width: ExpressionWidth::Bounded { width: 3 }, + expression_width: ExpressionWidth::Bounded { width: 4 }, private_parameters: BTreeSet::from([Witness(1), Witness(2), Witness(3), Witness(4)]), public_parameters: PublicInputs(BTreeSet::from([Witness(5)])), return_values: PublicInputs(BTreeSet::from([Witness(6)])), diff --git a/acvm-repo/acir/codegen/acir.cpp b/acvm-repo/acir/codegen/acir.cpp index b7e75c4320..47e184a633 100644 --- a/acvm-repo/acir/codegen/acir.cpp +++ b/acvm-repo/acir/codegen/acir.cpp @@ -149,7 +149,7 @@ namespace Program { struct MultiScalarMul { std::vector points; std::vector scalars; - std::array outputs; + std::array outputs; friend bool operator==(const MultiScalarMul&, const MultiScalarMul&); std::vector bincodeSerialize() const; @@ -157,11 +157,9 @@ namespace Program { }; struct EmbeddedCurveAdd { - Program::FunctionInput input1_x; - Program::FunctionInput input1_y; - Program::FunctionInput input2_x; - Program::FunctionInput input2_y; - std::array outputs; + std::array input1; + std::array input2; + std::array outputs; friend bool operator==(const EmbeddedCurveAdd&, const EmbeddedCurveAdd&); std::vector bincodeSerialize() const; @@ -292,6 +290,33 @@ namespace Program { static BlockId bincodeDeserialize(std::vector); }; + struct BlockType { + + struct Memory { + friend bool operator==(const Memory&, const Memory&); + std::vector bincodeSerialize() const; + static Memory bincodeDeserialize(std::vector); + }; + + struct CallData { + friend bool operator==(const CallData&, const CallData&); + std::vector bincodeSerialize() const; + static CallData bincodeDeserialize(std::vector); + }; + + struct ReturnData { + friend bool operator==(const ReturnData&, const ReturnData&); + std::vector bincodeSerialize() const; + static ReturnData bincodeDeserialize(std::vector); + }; + + std::variant value; + + friend bool operator==(const BlockType&, const BlockType&); + std::vector bincodeSerialize() const; + static BlockType bincodeDeserialize(std::vector); + }; + struct Expression { std::vector> mul_terms; std::vector> linear_combinations; @@ -428,6 +453,7 @@ namespace Program { struct MemoryInit { Program::BlockId block_id; std::vector init; + Program::BlockType block_type; friend bool operator==(const MemoryInit&, const MemoryInit&); std::vector bincodeSerialize() const; @@ -754,8 +780,10 @@ namespace Program { struct EmbeddedCurveAdd { Program::MemoryAddress input1_x; Program::MemoryAddress input1_y; + Program::MemoryAddress input1_infinite; Program::MemoryAddress input2_x; Program::MemoryAddress input2_y; + Program::MemoryAddress input2_infinite; Program::HeapArray result; friend bool operator==(const EmbeddedCurveAdd&, const EmbeddedCurveAdd&); @@ -842,7 +870,17 @@ namespace Program { static Sha256Compression bincodeDeserialize(std::vector); }; - std::variant value; + struct ToRadix { + Program::MemoryAddress input; + uint32_t radix; + Program::HeapArray output; + + friend bool operator==(const ToRadix&, const ToRadix&); + std::vector bincodeSerialize() const; + static ToRadix bincodeDeserialize(std::vector); + }; + + std::variant value; friend bool operator==(const BlackBoxOp&, const BlackBoxOp&); std::vector bincodeSerialize() const; @@ -2762,10 +2800,8 @@ Program::BlackBoxFuncCall::MultiScalarMul serde::Deserializable template void serde::Serializable::serialize(const Program::BlackBoxFuncCall::EmbeddedCurveAdd &obj, Serializer &serializer) { - serde::Serializable::serialize(obj.input1_x, serializer); - serde::Serializable::serialize(obj.input1_y, serializer); - serde::Serializable::serialize(obj.input2_x, serializer); - serde::Serializable::serialize(obj.input2_y, serializer); + serde::Serializable::serialize(obj.input1, serializer); + serde::Serializable::serialize(obj.input2, serializer); serde::Serializable::serialize(obj.outputs, serializer); } @@ -2801,10 +2835,8 @@ template <> template Program::BlackBoxFuncCall::EmbeddedCurveAdd serde::Deserializable::deserialize(Deserializer &deserializer) { Program::BlackBoxFuncCall::EmbeddedCurveAdd obj; - obj.input1_x = serde::Deserializable::deserialize(deserializer); - obj.input1_y = serde::Deserializable::deserialize(deserializer); - obj.input2_x = serde::Deserializable::deserialize(deserializer); - obj.input2_y = serde::Deserializable::deserialize(deserializer); + obj.input1 = serde::Deserializable::deserialize(deserializer); + obj.input2 = serde::Deserializable::deserialize(deserializer); obj.outputs = serde::Deserializable::deserialize(deserializer); return obj; } @@ -3871,8 +3903,10 @@ namespace Program { inline bool operator==(const BlackBoxOp::EmbeddedCurveAdd &lhs, const BlackBoxOp::EmbeddedCurveAdd &rhs) { if (!(lhs.input1_x == rhs.input1_x)) { return false; } if (!(lhs.input1_y == rhs.input1_y)) { return false; } + if (!(lhs.input1_infinite == rhs.input1_infinite)) { return false; } if (!(lhs.input2_x == rhs.input2_x)) { return false; } if (!(lhs.input2_y == rhs.input2_y)) { return false; } + if (!(lhs.input2_infinite == rhs.input2_infinite)) { return false; } if (!(lhs.result == rhs.result)) { return false; } return true; } @@ -3899,8 +3933,10 @@ template void serde::Serializable::serialize(const Program::BlackBoxOp::EmbeddedCurveAdd &obj, Serializer &serializer) { serde::Serializable::serialize(obj.input1_x, serializer); serde::Serializable::serialize(obj.input1_y, serializer); + serde::Serializable::serialize(obj.input1_infinite, serializer); serde::Serializable::serialize(obj.input2_x, serializer); serde::Serializable::serialize(obj.input2_y, serializer); + serde::Serializable::serialize(obj.input2_infinite, serializer); serde::Serializable::serialize(obj.result, serializer); } @@ -3910,8 +3946,10 @@ Program::BlackBoxOp::EmbeddedCurveAdd serde::Deserializable::deserialize(deserializer); obj.input1_y = serde::Deserializable::deserialize(deserializer); + obj.input1_infinite = serde::Deserializable::deserialize(deserializer); obj.input2_x = serde::Deserializable::deserialize(deserializer); obj.input2_y = serde::Deserializable::deserialize(deserializer); + obj.input2_infinite = serde::Deserializable::deserialize(deserializer); obj.result = serde::Deserializable::deserialize(deserializer); return obj; } @@ -4265,6 +4303,50 @@ Program::BlackBoxOp::Sha256Compression serde::Deserializable BlackBoxOp::ToRadix::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlackBoxOp::ToRadix BlackBoxOp::ToRadix::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlackBoxOp::ToRadix &obj, Serializer &serializer) { + serde::Serializable::serialize(obj.input, serializer); + serde::Serializable::serialize(obj.radix, serializer); + serde::Serializable::serialize(obj.output, serializer); +} + +template <> +template +Program::BlackBoxOp::ToRadix serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::BlackBoxOp::ToRadix obj; + obj.input = serde::Deserializable::deserialize(deserializer); + obj.radix = serde::Deserializable::deserialize(deserializer); + obj.output = serde::Deserializable::deserialize(deserializer); + return obj; +} + namespace Program { inline bool operator==(const BlockId &lhs, const BlockId &rhs) { @@ -4307,6 +4389,153 @@ Program::BlockId serde::Deserializable::deserialize(Deserializ return obj; } +namespace Program { + + inline bool operator==(const BlockType &lhs, const BlockType &rhs) { + if (!(lhs.value == rhs.value)) { return false; } + return true; + } + + inline std::vector BlockType::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlockType BlockType::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlockType &obj, Serializer &serializer) { + serializer.increase_container_depth(); + serde::Serializable::serialize(obj.value, serializer); + serializer.decrease_container_depth(); +} + +template <> +template +Program::BlockType serde::Deserializable::deserialize(Deserializer &deserializer) { + deserializer.increase_container_depth(); + Program::BlockType obj; + obj.value = serde::Deserializable::deserialize(deserializer); + deserializer.decrease_container_depth(); + return obj; +} + +namespace Program { + + inline bool operator==(const BlockType::Memory &lhs, const BlockType::Memory &rhs) { + return true; + } + + inline std::vector BlockType::Memory::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlockType::Memory BlockType::Memory::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlockType::Memory &obj, Serializer &serializer) { +} + +template <> +template +Program::BlockType::Memory serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::BlockType::Memory obj; + return obj; +} + +namespace Program { + + inline bool operator==(const BlockType::CallData &lhs, const BlockType::CallData &rhs) { + return true; + } + + inline std::vector BlockType::CallData::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlockType::CallData BlockType::CallData::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlockType::CallData &obj, Serializer &serializer) { +} + +template <> +template +Program::BlockType::CallData serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::BlockType::CallData obj; + return obj; +} + +namespace Program { + + inline bool operator==(const BlockType::ReturnData &lhs, const BlockType::ReturnData &rhs) { + return true; + } + + inline std::vector BlockType::ReturnData::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlockType::ReturnData BlockType::ReturnData::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Program + +template <> +template +void serde::Serializable::serialize(const Program::BlockType::ReturnData &obj, Serializer &serializer) { +} + +template <> +template +Program::BlockType::ReturnData serde::Deserializable::deserialize(Deserializer &deserializer) { + Program::BlockType::ReturnData obj; + return obj; +} + namespace Program { inline bool operator==(const BrilligBytecode &lhs, const BrilligBytecode &rhs) { @@ -6443,6 +6672,7 @@ namespace Program { inline bool operator==(const Opcode::MemoryInit &lhs, const Opcode::MemoryInit &rhs) { if (!(lhs.block_id == rhs.block_id)) { return false; } if (!(lhs.init == rhs.init)) { return false; } + if (!(lhs.block_type == rhs.block_type)) { return false; } return true; } @@ -6468,6 +6698,7 @@ template void serde::Serializable::serialize(const Program::Opcode::MemoryInit &obj, Serializer &serializer) { serde::Serializable::serialize(obj.block_id, serializer); serde::Serializable::serialize(obj.init, serializer); + serde::Serializable::serialize(obj.block_type, serializer); } template <> @@ -6476,6 +6707,7 @@ Program::Opcode::MemoryInit serde::Deserializable:: Program::Opcode::MemoryInit obj; obj.block_id = serde::Deserializable::deserialize(deserializer); obj.init = serde::Deserializable::deserialize(deserializer); + obj.block_type = serde::Deserializable::deserialize(deserializer); return obj; } diff --git a/acvm-repo/acir/src/circuit/opcodes.rs b/acvm-repo/acir/src/circuit/opcodes.rs index 7db317c41a..e6dc11dac7 100644 --- a/acvm-repo/acir/src/circuit/opcodes.rs +++ b/acvm-repo/acir/src/circuit/opcodes.rs @@ -11,6 +11,13 @@ mod memory_operation; pub use black_box_function_call::{BlackBoxFuncCall, FunctionInput}; pub use memory_operation::{BlockId, MemOp}; +#[derive(Clone, PartialEq, Eq, Serialize, Deserialize)] +pub enum BlockType { + Memory, + CallData, + ReturnData, +} + #[allow(clippy::large_enum_variant)] #[derive(Clone, PartialEq, Eq, Serialize, Deserialize)] pub enum Opcode { @@ -30,6 +37,7 @@ pub enum Opcode { MemoryInit { block_id: BlockId, init: Vec, + block_type: BlockType, }, /// Calls to unconstrained functions BrilligCall { @@ -103,8 +111,12 @@ impl std::fmt::Display for Opcode { write!(f, "(id: {}, op {} at: {}) ", block_id.0, op.operation, op.index) } } - Opcode::MemoryInit { block_id, init } => { - write!(f, "INIT ")?; + Opcode::MemoryInit { block_id, init, block_type: databus } => { + match databus { + BlockType::Memory => write!(f, "INIT ")?, + BlockType::CallData => write!(f, "INIT CALLDATA ")?, + BlockType::ReturnData => write!(f, "INIT RETURNDATA ")?, + } write!(f, "(id: {}, len: {}) ", block_id.0, init.len()) } // We keep the display for a BrilligCall and circuit Call separate as they diff --git a/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs b/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs index 115a33c1c9..b0e77b15c2 100644 --- a/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs +++ b/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs @@ -89,14 +89,12 @@ pub enum BlackBoxFuncCall { MultiScalarMul { points: Vec, scalars: Vec, - outputs: (Witness, Witness), + outputs: (Witness, Witness, Witness), }, EmbeddedCurveAdd { - input1_x: FunctionInput, - input1_y: FunctionInput, - input2_x: FunctionInput, - input2_y: FunctionInput, - outputs: (Witness, Witness), + input1: Box<[FunctionInput; 3]>, + input2: Box<[FunctionInput; 3]>, + outputs: (Witness, Witness, Witness), }, Keccak256 { inputs: Vec, @@ -245,9 +243,9 @@ impl BlackBoxFuncCall { inputs.extend(scalars.iter().copied()); inputs } - BlackBoxFuncCall::EmbeddedCurveAdd { - input1_x, input1_y, input2_x, input2_y, .. - } => vec![*input1_x, *input1_y, *input2_x, *input2_y], + BlackBoxFuncCall::EmbeddedCurveAdd { input1, input2, .. } => { + vec![input1[0], input1[1], input2[0], input2[1]] + } BlackBoxFuncCall::RANGE { input } => vec![*input], BlackBoxFuncCall::SchnorrVerify { public_key_x, @@ -343,9 +341,11 @@ impl BlackBoxFuncCall { | BlackBoxFuncCall::EcdsaSecp256k1 { output, .. } | BlackBoxFuncCall::PedersenHash { output, .. } | BlackBoxFuncCall::EcdsaSecp256r1 { output, .. } => vec![*output], + BlackBoxFuncCall::PedersenCommitment { outputs, .. } => vec![outputs.0, outputs.1], BlackBoxFuncCall::MultiScalarMul { outputs, .. } - | BlackBoxFuncCall::PedersenCommitment { outputs, .. } - | BlackBoxFuncCall::EmbeddedCurveAdd { outputs, .. } => vec![outputs.0, outputs.1], + | BlackBoxFuncCall::EmbeddedCurveAdd { outputs, .. } => { + vec![outputs.0, outputs.1, outputs.2] + } BlackBoxFuncCall::RANGE { .. } | BlackBoxFuncCall::RecursiveAggregation { .. } | BlackBoxFuncCall::BigIntFromLeBytes { .. } diff --git a/acvm-repo/acir/src/lib.rs b/acvm-repo/acir/src/lib.rs index 24f27aae06..f60f1b46b6 100644 --- a/acvm-repo/acir/src/lib.rs +++ b/acvm-repo/acir/src/lib.rs @@ -41,7 +41,7 @@ mod reflection { circuit::{ brillig::{BrilligInputs, BrilligOutputs}, directives::Directive, - opcodes::BlackBoxFuncCall, + opcodes::{BlackBoxFuncCall, BlockType}, AssertionPayload, Circuit, ExpressionOrMemory, ExpressionWidth, Opcode, OpcodeLocation, Program, }, @@ -60,6 +60,7 @@ mod reflection { }; let mut tracer = Tracer::new(TracerConfig::default()); + tracer.trace_simple_type::().unwrap(); tracer.trace_simple_type::().unwrap(); tracer.trace_simple_type::().unwrap(); tracer.trace_simple_type::().unwrap(); diff --git a/acvm-repo/acir/tests/test_program_serialization.rs b/acvm-repo/acir/tests/test_program_serialization.rs index d9327f784e..19e4beb615 100644 --- a/acvm-repo/acir/tests/test_program_serialization.rs +++ b/acvm-repo/acir/tests/test_program_serialization.rs @@ -63,19 +63,26 @@ fn multi_scalar_mul_circuit() { points: vec![ FunctionInput { witness: Witness(1), num_bits: 128 }, FunctionInput { witness: Witness(2), num_bits: 128 }, + FunctionInput { witness: Witness(3), num_bits: 1 }, ], scalars: vec![ - FunctionInput { witness: Witness(3), num_bits: 128 }, FunctionInput { witness: Witness(4), num_bits: 128 }, + FunctionInput { witness: Witness(5), num_bits: 128 }, ], - outputs: (Witness(5), Witness(6)), + outputs: (Witness(6), Witness(7), Witness(8)), }); let circuit = Circuit { - current_witness_index: 7, + current_witness_index: 9, opcodes: vec![multi_scalar_mul], - private_parameters: BTreeSet::from([Witness(1), Witness(2), Witness(3), Witness(4)]), - return_values: PublicInputs(BTreeSet::from_iter(vec![Witness(5), Witness(6)])), + private_parameters: BTreeSet::from([ + Witness(1), + Witness(2), + Witness(3), + Witness(4), + Witness(5), + ]), + return_values: PublicInputs(BTreeSet::from_iter(vec![Witness(6), Witness(7), Witness(8)])), ..Circuit::default() }; let program = Program { functions: vec![circuit], unconstrained_functions: vec![] }; @@ -83,10 +90,10 @@ fn multi_scalar_mul_circuit() { let bytes = Program::serialize_program(&program); let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 85, 76, 65, 14, 0, 32, 8, 82, 179, 186, 244, 104, 159, - 30, 45, 218, 136, 141, 33, 40, 186, 93, 76, 208, 57, 31, 93, 96, 136, 47, 250, 146, 188, - 209, 39, 181, 131, 131, 187, 148, 110, 240, 246, 101, 38, 63, 180, 243, 97, 3, 125, 173, - 118, 131, 153, 0, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 141, 219, 10, 0, 32, 8, 67, 243, 214, 5, 250, 232, + 62, 189, 69, 123, 176, 132, 195, 116, 50, 149, 114, 107, 0, 97, 127, 116, 2, 75, 243, 2, + 74, 53, 122, 202, 189, 211, 15, 106, 5, 13, 116, 238, 35, 221, 81, 230, 61, 249, 37, 253, + 250, 179, 79, 109, 218, 22, 67, 227, 173, 0, 0, 0, ]; assert_eq!(bytes, expected_serialization) @@ -347,7 +354,11 @@ fn complex_brillig_foreign_call() { fn memory_op_circuit() { let init = vec![Witness(1), Witness(2)]; - let memory_init = Opcode::MemoryInit { block_id: BlockId(0), init }; + let memory_init = Opcode::MemoryInit { + block_id: BlockId(0), + init, + block_type: acir::circuit::opcodes::BlockType::Memory, + }; let write = Opcode::MemoryOp { block_id: BlockId(0), op: MemOp::write_to_mem_index(FieldElement::from(1u128).into(), Witness(3).into()), @@ -371,11 +382,11 @@ fn memory_op_circuit() { let bytes = Program::serialize_program(&program); let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 82, 65, 10, 0, 32, 8, 203, 180, 255, 216, 15, 250, - 255, 171, 10, 154, 16, 210, 45, 61, 52, 144, 13, 132, 49, 135, 84, 54, 218, 26, 134, 22, - 112, 5, 19, 180, 237, 61, 6, 88, 223, 208, 179, 125, 41, 216, 151, 227, 188, 52, 187, 92, - 253, 173, 92, 137, 190, 157, 143, 160, 254, 155, 45, 188, 148, 11, 38, 213, 237, 188, 16, - 35, 3, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 82, 65, 10, 0, 32, 8, 211, 180, 255, 216, 15, 250, + 255, 171, 10, 82, 176, 232, 150, 30, 26, 200, 118, 144, 49, 135, 8, 11, 117, 14, 169, 102, + 229, 162, 140, 78, 219, 206, 137, 174, 44, 111, 104, 217, 190, 24, 236, 75, 113, 94, 146, + 93, 174, 252, 86, 46, 71, 223, 78, 46, 104, 129, 253, 155, 45, 60, 195, 5, 3, 89, 11, 161, + 73, 39, 3, 0, 0, ]; assert_eq!(bytes, expected_serialization) diff --git a/acvm-repo/acvm/src/compiler/optimizers/constant_backpropagation.rs b/acvm-repo/acvm/src/compiler/optimizers/constant_backpropagation.rs index 0e7d28104d..5b778f63f0 100644 --- a/acvm-repo/acvm/src/compiler/optimizers/constant_backpropagation.rs +++ b/acvm-repo/acvm/src/compiler/optimizers/constant_backpropagation.rs @@ -282,7 +282,7 @@ mod tests { fn test_circuit(opcodes: Vec) -> Circuit { Circuit { current_witness_index: 1, - expression_width: ExpressionWidth::Bounded { width: 3 }, + expression_width: ExpressionWidth::Bounded { width: 4 }, opcodes, private_parameters: BTreeSet::new(), public_parameters: PublicInputs::default(), diff --git a/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs b/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs index c6ca18d30a..0e1629717b 100644 --- a/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs +++ b/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs @@ -164,7 +164,7 @@ mod tests { Circuit { current_witness_index: 1, - expression_width: ExpressionWidth::Bounded { width: 3 }, + expression_width: ExpressionWidth::Bounded { width: 4 }, opcodes, private_parameters: BTreeSet::new(), public_parameters: PublicInputs::default(), diff --git a/acvm-repo/acvm/src/pwg/blackbox/embedded_curve_ops.rs b/acvm-repo/acvm/src/pwg/blackbox/embedded_curve_ops.rs index ee35385fa8..0b52ae295a 100644 --- a/acvm-repo/acvm/src/pwg/blackbox/embedded_curve_ops.rs +++ b/acvm-repo/acvm/src/pwg/blackbox/embedded_curve_ops.rs @@ -11,7 +11,7 @@ pub(super) fn multi_scalar_mul( initial_witness: &mut WitnessMap, points: &[FunctionInput], scalars: &[FunctionInput], - outputs: (Witness, Witness), + outputs: (Witness, Witness, Witness), ) -> Result<(), OpcodeResolutionError> { let points: Result, _> = points.iter().map(|input| witness_to_value(initial_witness, input.witness)).collect(); @@ -19,35 +19,44 @@ pub(super) fn multi_scalar_mul( let scalars: Result, _> = scalars.iter().map(|input| witness_to_value(initial_witness, input.witness)).collect(); - let scalars: Vec<_> = scalars?.into_iter().cloned().collect(); - + let mut scalars_lo = Vec::new(); + let mut scalars_hi = Vec::new(); + for (i, scalar) in scalars?.into_iter().enumerate() { + if i % 2 == 0 { + scalars_lo.push(*scalar); + } else { + scalars_hi.push(*scalar); + } + } // Call the backend's multi-scalar multiplication function - let (res_x, res_y) = backend.multi_scalar_mul(&points, &scalars)?; + let (res_x, res_y, is_infinite) = + backend.multi_scalar_mul(&points, &scalars_lo, &scalars_hi)?; // Insert the resulting point into the witness map insert_value(&outputs.0, res_x, initial_witness)?; insert_value(&outputs.1, res_y, initial_witness)?; - + insert_value(&outputs.2, is_infinite, initial_witness)?; Ok(()) } pub(super) fn embedded_curve_add( backend: &impl BlackBoxFunctionSolver, initial_witness: &mut WitnessMap, - input1_x: FunctionInput, - input1_y: FunctionInput, - input2_x: FunctionInput, - input2_y: FunctionInput, - outputs: (Witness, Witness), + input1: [FunctionInput; 3], + input2: [FunctionInput; 3], + outputs: (Witness, Witness, Witness), ) -> Result<(), OpcodeResolutionError> { - let input1_x = witness_to_value(initial_witness, input1_x.witness)?; - let input1_y = witness_to_value(initial_witness, input1_y.witness)?; - let input2_x = witness_to_value(initial_witness, input2_x.witness)?; - let input2_y = witness_to_value(initial_witness, input2_y.witness)?; - let (res_x, res_y) = backend.ec_add(input1_x, input1_y, input2_x, input2_y)?; + let input1_x = witness_to_value(initial_witness, input1[0].witness)?; + let input1_y = witness_to_value(initial_witness, input1[1].witness)?; + let input1_infinite = witness_to_value(initial_witness, input1[2].witness)?; + let input2_x = witness_to_value(initial_witness, input2[0].witness)?; + let input2_y = witness_to_value(initial_witness, input2[1].witness)?; + let input2_infinite = witness_to_value(initial_witness, input2[2].witness)?; + let (res_x, res_y, res_infinite) = + backend.ec_add(input1_x, input1_y, input1_infinite, input2_x, input2_y, input2_infinite)?; insert_value(&outputs.0, res_x, initial_witness)?; insert_value(&outputs.1, res_y, initial_witness)?; - + insert_value(&outputs.2, res_infinite, initial_witness)?; Ok(()) } diff --git a/acvm-repo/acvm/src/pwg/blackbox/mod.rs b/acvm-repo/acvm/src/pwg/blackbox/mod.rs index a74f44b79d..99ed09a52e 100644 --- a/acvm-repo/acvm/src/pwg/blackbox/mod.rs +++ b/acvm-repo/acvm/src/pwg/blackbox/mod.rs @@ -164,16 +164,8 @@ pub(crate) fn solve( BlackBoxFuncCall::MultiScalarMul { points, scalars, outputs } => { multi_scalar_mul(backend, initial_witness, points, scalars, *outputs) } - BlackBoxFuncCall::EmbeddedCurveAdd { input1_x, input1_y, input2_x, input2_y, outputs } => { - embedded_curve_add( - backend, - initial_witness, - *input1_x, - *input1_y, - *input2_x, - *input2_y, - *outputs, - ) + BlackBoxFuncCall::EmbeddedCurveAdd { input1, input2, outputs } => { + embedded_curve_add(backend, initial_witness, **input1, **input2, *outputs) } // Recursive aggregation will be entirely handled by the backend and is not solved by the ACVM BlackBoxFuncCall::RecursiveAggregation { .. } => Ok(()), diff --git a/acvm-repo/acvm/src/pwg/mod.rs b/acvm-repo/acvm/src/pwg/mod.rs index a4219adbfa..f2649b9399 100644 --- a/acvm-repo/acvm/src/pwg/mod.rs +++ b/acvm-repo/acvm/src/pwg/mod.rs @@ -335,7 +335,7 @@ impl<'a, B: BlackBoxFunctionSolver> ACVM<'a, B> { &mut self.bigint_solver, ), Opcode::Directive(directive) => solve_directives(&mut self.witness_map, directive), - Opcode::MemoryInit { block_id, init } => { + Opcode::MemoryInit { block_id, init, .. } => { let solver = self.block_solvers.entry(*block_id).or_default(); solver.init(init, &self.witness_map) } diff --git a/acvm-repo/acvm/tests/solver.rs b/acvm-repo/acvm/tests/solver.rs index df61083eee..495389d7b3 100644 --- a/acvm-repo/acvm/tests/solver.rs +++ b/acvm-repo/acvm/tests/solver.rs @@ -4,7 +4,7 @@ use acir::{ brillig::{BinaryFieldOp, HeapArray, MemoryAddress, Opcode as BrilligOpcode, ValueOrArray}, circuit::{ brillig::{BrilligBytecode, BrilligInputs, BrilligOutputs}, - opcodes::{BlockId, MemOp}, + opcodes::{BlockId, BlockType, MemOp}, Opcode, OpcodeLocation, }, native_types::{Expression, Witness, WitnessMap}, @@ -658,7 +658,11 @@ fn memory_operations() { let block_id = BlockId(0); - let init = Opcode::MemoryInit { block_id, init: (1..6).map(Witness).collect() }; + let init = Opcode::MemoryInit { + block_id, + init: (1..6).map(Witness).collect(), + block_type: BlockType::Memory, + }; let read_op = Opcode::MemoryOp { block_id, diff --git a/acvm-repo/acvm_js/build.sh b/acvm-repo/acvm_js/build.sh index 16fb26e55d..ee93413ab8 100755 --- a/acvm-repo/acvm_js/build.sh +++ b/acvm-repo/acvm_js/build.sh @@ -25,7 +25,7 @@ function run_if_available { require_command jq require_command cargo require_command wasm-bindgen -require_command wasm-opt +# require_command wasm-opt self_path=$(dirname "$(readlink -f "$0")") pname=$(cargo read-manifest | jq -r '.name') diff --git a/acvm-repo/acvm_js/test/shared/memory_op.ts b/acvm-repo/acvm_js/test/shared/memory_op.ts index 20ea88c713..f7443c2258 100644 --- a/acvm-repo/acvm_js/test/shared/memory_op.ts +++ b/acvm-repo/acvm_js/test/shared/memory_op.ts @@ -1,9 +1,9 @@ // See `memory_op_circuit` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 82, 65, 10, 0, 32, 8, 203, 180, 255, 216, 15, 250, 255, 171, 10, 154, 16, 210, - 45, 61, 52, 144, 13, 132, 49, 135, 84, 54, 218, 26, 134, 22, 112, 5, 19, 180, 237, 61, 6, 88, 223, 208, 179, 125, 41, - 216, 151, 227, 188, 52, 187, 92, 253, 173, 92, 137, 190, 157, 143, 160, 254, 155, 45, 188, 148, 11, 38, 213, 237, 188, - 16, 35, 3, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 82, 65, 10, 0, 32, 8, 211, 180, 255, 216, 15, 250, 255, 171, 10, 82, 176, 232, + 150, 30, 26, 200, 118, 144, 49, 135, 8, 11, 117, 14, 169, 102, 229, 162, 140, 78, 219, 206, 137, 174, 44, 111, 104, + 217, 190, 24, 236, 75, 113, 94, 146, 93, 174, 252, 86, 46, 71, 223, 78, 46, 104, 129, 253, 155, 45, 60, 195, 5, 3, 89, + 11, 161, 73, 39, 3, 0, 0, ]); export const initialWitnessMap = new Map([ diff --git a/acvm-repo/acvm_js/test/shared/multi_scalar_mul.ts b/acvm-repo/acvm_js/test/shared/multi_scalar_mul.ts index 8ee0a067a3..5401da7697 100644 --- a/acvm-repo/acvm_js/test/shared/multi_scalar_mul.ts +++ b/acvm-repo/acvm_js/test/shared/multi_scalar_mul.ts @@ -1,21 +1,24 @@ // See `multi_scalar_mul_circuit` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 85, 76, 65, 14, 0, 32, 8, 82, 179, 186, 244, 104, 159, 30, 45, 218, 136, 141, 33, - 40, 186, 93, 76, 208, 57, 31, 93, 96, 136, 47, 250, 146, 188, 209, 39, 181, 131, 131, 187, 148, 110, 240, 246, 101, - 38, 63, 180, 243, 97, 3, 125, 173, 118, 131, 153, 0, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 141, 219, 10, 0, 32, 8, 67, 243, 214, 5, 250, 232, 62, 189, 69, 123, 176, 132, + 195, 116, 50, 149, 114, 107, 0, 97, 127, 116, 2, 75, 243, 2, 74, 53, 122, 202, 189, 211, 15, 106, 5, 13, 116, 238, 35, + 221, 81, 230, 61, 249, 37, 253, 250, 179, 79, 109, 218, 22, 67, 227, 173, 0, 0, 0, ]); export const initialWitnessMap = new Map([ [1, '0x0000000000000000000000000000000000000000000000000000000000000001'], [2, '0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c'], - [3, '0x0000000000000000000000000000000000000000000000000000000000000001'], - [4, '0x0000000000000000000000000000000000000000000000000000000000000000'], + [3, '0x0000000000000000000000000000000000000000000000000000000000000000'], + [4, '0x0000000000000000000000000000000000000000000000000000000000000001'], + [5, '0x0000000000000000000000000000000000000000000000000000000000000000'], ]); export const expectedWitnessMap = new Map([ [1, '0x0000000000000000000000000000000000000000000000000000000000000001'], [2, '0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c'], - [3, '0x0000000000000000000000000000000000000000000000000000000000000001'], - [4, '0x0000000000000000000000000000000000000000000000000000000000000000'], - [5, '0x0000000000000000000000000000000000000000000000000000000000000001'], - [6, '0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c'], + [3, '0x0000000000000000000000000000000000000000000000000000000000000000'], + [4, '0x0000000000000000000000000000000000000000000000000000000000000001'], + [5, '0x0000000000000000000000000000000000000000000000000000000000000000'], + [6, '0x0000000000000000000000000000000000000000000000000000000000000001'], + [7, '0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c'], + [8, '0x0000000000000000000000000000000000000000000000000000000000000000'], ]); diff --git a/acvm-repo/blackbox_solver/src/curve_specific_solver.rs b/acvm-repo/blackbox_solver/src/curve_specific_solver.rs index 3403b0fe23..73f64d3d9d 100644 --- a/acvm-repo/blackbox_solver/src/curve_specific_solver.rs +++ b/acvm-repo/blackbox_solver/src/curve_specific_solver.rs @@ -27,15 +27,18 @@ pub trait BlackBoxFunctionSolver { fn multi_scalar_mul( &self, points: &[FieldElement], - scalars: &[FieldElement], - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError>; + scalars_lo: &[FieldElement], + scalars_hi: &[FieldElement], + ) -> Result<(FieldElement, FieldElement, FieldElement), BlackBoxResolutionError>; fn ec_add( &self, input1_x: &FieldElement, input1_y: &FieldElement, + input1_infinite: &FieldElement, input2_x: &FieldElement, input2_y: &FieldElement, - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError>; + input2_infinite: &FieldElement, + ) -> Result<(FieldElement, FieldElement, FieldElement), BlackBoxResolutionError>; fn poseidon2_permutation( &self, _inputs: &[FieldElement], @@ -81,17 +84,20 @@ impl BlackBoxFunctionSolver for StubbedBlackBoxSolver { fn multi_scalar_mul( &self, _points: &[FieldElement], - _scalars: &[FieldElement], - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { + _scalars_lo: &[FieldElement], + _scalars_hi: &[FieldElement], + ) -> Result<(FieldElement, FieldElement, FieldElement), BlackBoxResolutionError> { Err(Self::fail(BlackBoxFunc::MultiScalarMul)) } fn ec_add( &self, _input1_x: &FieldElement, _input1_y: &FieldElement, + _input1_infinite: &FieldElement, _input2_x: &FieldElement, _input2_y: &FieldElement, - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { + _input2_infinite: &FieldElement, + ) -> Result<(FieldElement, FieldElement, FieldElement), BlackBoxResolutionError> { Err(Self::fail(BlackBoxFunc::EmbeddedCurveAdd)) } fn poseidon2_permutation( diff --git a/acvm-repo/bn254_blackbox_solver/src/embedded_curve_ops.rs b/acvm-repo/bn254_blackbox_solver/src/embedded_curve_ops.rs index 3f6d2ac86c..901eb9d5a0 100644 --- a/acvm-repo/bn254_blackbox_solver/src/embedded_curve_ops.rs +++ b/acvm-repo/bn254_blackbox_solver/src/embedded_curve_ops.rs @@ -10,9 +10,11 @@ use crate::BlackBoxResolutionError; /// Performs multi scalar multiplication of points with scalars. pub fn multi_scalar_mul( points: &[FieldElement], - scalars: &[FieldElement], -) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - if points.len() != scalars.len() { + scalars_lo: &[FieldElement], + scalars_hi: &[FieldElement], +) -> Result<(FieldElement, FieldElement, FieldElement), BlackBoxResolutionError> { + if points.len() != 3 * scalars_lo.len() || scalars_lo.len() != scalars_hi.len() { + dbg!(&points.len(), &scalars_lo.len(), &scalars_hi.len()); return Err(BlackBoxResolutionError::Failed( BlackBoxFunc::MultiScalarMul, "Points and scalars must have the same length".to_string(), @@ -21,21 +23,22 @@ pub fn multi_scalar_mul( let mut output_point = grumpkin::SWAffine::zero(); - for i in (0..points.len()).step_by(2) { - let point = create_point(points[i], points[i + 1]) - .map_err(|e| BlackBoxResolutionError::Failed(BlackBoxFunc::MultiScalarMul, e))?; + for i in (0..points.len()).step_by(3) { + let point = + create_point(points[i], points[i + 1], points[i + 2] == FieldElement::from(1_u128)) + .map_err(|e| BlackBoxResolutionError::Failed(BlackBoxFunc::MultiScalarMul, e))?; - let scalar_low: u128 = scalars[i].try_into_u128().ok_or_else(|| { + let scalar_low: u128 = scalars_lo[i / 3].try_into_u128().ok_or_else(|| { BlackBoxResolutionError::Failed( BlackBoxFunc::MultiScalarMul, - format!("Limb {} is not less than 2^128", scalars[i].to_hex()), + format!("Limb {} is not less than 2^128", scalars_lo[i].to_hex()), ) })?; - let scalar_high: u128 = scalars[i + 1].try_into_u128().ok_or_else(|| { + let scalar_high: u128 = scalars_hi[i / 3].try_into_u128().ok_or_else(|| { BlackBoxResolutionError::Failed( BlackBoxFunc::MultiScalarMul, - format!("Limb {} is not less than 2^128", scalars[i + 1].to_hex()), + format!("Limb {} is not less than 2^128", scalars_hi[i].to_hex()), ) })?; @@ -59,25 +62,33 @@ pub fn multi_scalar_mul( } if let Some((out_x, out_y)) = output_point.xy() { - Ok((FieldElement::from_repr(*out_x), FieldElement::from_repr(*out_y))) + Ok(( + FieldElement::from_repr(*out_x), + FieldElement::from_repr(*out_y), + FieldElement::from(output_point.is_zero() as u128), + )) } else { - Ok((FieldElement::zero(), FieldElement::zero())) + Ok((FieldElement::from(0_u128), FieldElement::from(0_u128), FieldElement::from(1_u128))) } } pub fn embedded_curve_add( - input1_x: FieldElement, - input1_y: FieldElement, - input2_x: FieldElement, - input2_y: FieldElement, -) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - let point1 = create_point(input1_x, input1_y) + input1: [FieldElement; 3], + input2: [FieldElement; 3], +) -> Result<(FieldElement, FieldElement, FieldElement), BlackBoxResolutionError> { + let point1 = create_point(input1[0], input1[1], input1[2] == FieldElement::one()) .map_err(|e| BlackBoxResolutionError::Failed(BlackBoxFunc::EmbeddedCurveAdd, e))?; - let point2 = create_point(input2_x, input2_y) + let point2 = create_point(input2[0], input2[1], input2[2] == FieldElement::one()) .map_err(|e| BlackBoxResolutionError::Failed(BlackBoxFunc::EmbeddedCurveAdd, e))?; let res = grumpkin::SWAffine::from(point1 + point2); if let Some((res_x, res_y)) = res.xy() { - Ok((FieldElement::from_repr(*res_x), FieldElement::from_repr(*res_y))) + Ok(( + FieldElement::from_repr(*res_x), + FieldElement::from_repr(*res_y), + FieldElement::from(res.is_zero() as u128), + )) + } else if res.is_zero() { + Ok((FieldElement::from(0_u128), FieldElement::from(0_u128), FieldElement::from(1_u128))) } else { Err(BlackBoxResolutionError::Failed( BlackBoxFunc::EmbeddedCurveAdd, @@ -86,7 +97,14 @@ pub fn embedded_curve_add( } } -fn create_point(x: FieldElement, y: FieldElement) -> Result { +fn create_point( + x: FieldElement, + y: FieldElement, + is_infinite: bool, +) -> Result { + if is_infinite { + return Ok(grumpkin::SWAffine::zero()); + } let point = grumpkin::SWAffine::new_unchecked(x.into_repr(), y.into_repr()); if !point.is_on_curve() { return Err(format!("Point ({}, {}) is not on curve", x.to_hex(), y.to_hex())); @@ -103,11 +121,11 @@ mod tests { use super::*; - fn get_generator() -> [FieldElement; 2] { + fn get_generator() -> [FieldElement; 3] { let generator = grumpkin::SWAffine::generator(); let generator_x = FieldElement::from_repr(*generator.x().unwrap()); let generator_y = FieldElement::from_repr(*generator.y().unwrap()); - [generator_x, generator_y] + [generator_x, generator_y, FieldElement::zero()] } #[test] @@ -115,7 +133,7 @@ mod tests { // We check that multiplying 1 by generator results in the generator let generator = get_generator(); - let res = multi_scalar_mul(&generator, &[FieldElement::one(), FieldElement::zero()])?; + let res = multi_scalar_mul(&generator, &[FieldElement::one()], &[FieldElement::zero()])?; assert_eq!(generator[0], res.0); assert_eq!(generator[1], res.1); @@ -125,9 +143,10 @@ mod tests { #[test] fn low_high_smoke_test() -> Result<(), BlackBoxResolutionError> { let points = get_generator(); - let scalars = [FieldElement::one(), FieldElement::from(2u128)]; + let scalars_lo = [FieldElement::one()]; + let scalars_hi = [FieldElement::from(2u128)]; - let res = multi_scalar_mul(&points, &scalars)?; + let res = multi_scalar_mul(&points, &scalars_lo, &scalars_hi)?; let x = "0702ab9c7038eeecc179b4f209991bcb68c7cb05bf4c532d804ccac36199c9a9"; let y = "23f10e9e43a3ae8d75d24154e796aae12ae7af546716e8f81a2564f1b5814130"; @@ -148,10 +167,10 @@ mod tests { "Limb 0000000000000000000000000000000100000000000000000000000000000000 is not less than 2^128".into(), )); - let res = multi_scalar_mul(&points, &[FieldElement::one(), invalid_limb]); + let res = multi_scalar_mul(&points, &[FieldElement::one()], &[invalid_limb]); assert_eq!(res, expected_error); - let res = multi_scalar_mul(&points, &[invalid_limb, FieldElement::one()]); + let res = multi_scalar_mul(&points, &[invalid_limb], &[FieldElement::one()]); assert_eq!(res, expected_error); } @@ -162,7 +181,7 @@ mod tests { let low = FieldElement::from_be_bytes_reduce(&x[16..32]); let high = FieldElement::from_be_bytes_reduce(&x[0..16]); - let res = multi_scalar_mul(&get_generator(), &[low, high]); + let res = multi_scalar_mul(&get_generator(), &[low], &[high]); assert_eq!( res, @@ -181,8 +200,9 @@ mod tests { let valid_scalar_high = FieldElement::zero(); let res = multi_scalar_mul( - &[invalid_point_x, invalid_point_y], - &[valid_scalar_low, valid_scalar_high], + &[invalid_point_x, invalid_point_y, FieldElement::zero()], + &[valid_scalar_low], + &[valid_scalar_high], ); assert_eq!( @@ -197,9 +217,10 @@ mod tests { #[test] fn throws_on_args_length_mismatch() { let points = get_generator(); - let scalars = [FieldElement::from(2u128)]; + let scalars_lo = [FieldElement::from(2u128)]; + let scalars_hi = []; - let res = multi_scalar_mul(&points, &scalars); + let res = multi_scalar_mul(&points, &scalars_lo, &scalars_hi); assert_eq!( res, @@ -215,7 +236,10 @@ mod tests { let x = FieldElement::from(1u128); let y = FieldElement::from(2u128); - let res = embedded_curve_add(x, y, x, y); + let res = embedded_curve_add( + [x, y, FieldElement::from(0u128)], + [x, y, FieldElement::from(0u128)], + ); assert_eq!( res, @@ -229,10 +253,14 @@ mod tests { #[test] fn output_of_msm_matches_add() -> Result<(), BlackBoxResolutionError> { let points = get_generator(); - let scalars = [FieldElement::from(2u128), FieldElement::zero()]; - - let msm_res = multi_scalar_mul(&points, &scalars)?; - let add_res = embedded_curve_add(points[0], points[1], points[0], points[1])?; + let scalars_lo = [FieldElement::from(2u128)]; + let scalars_hi = [FieldElement::zero()]; + + let msm_res = multi_scalar_mul(&points, &scalars_lo, &scalars_hi)?; + let add_res = embedded_curve_add( + [points[0], points[1], FieldElement::from(0u128)], + [points[0], points[1], FieldElement::from(0u128)], + )?; assert_eq!(msm_res.0, add_res.0); assert_eq!(msm_res.1, add_res.1); diff --git a/acvm-repo/bn254_blackbox_solver/src/lib.rs b/acvm-repo/bn254_blackbox_solver/src/lib.rs index 4cb51b5975..a85ddcd894 100644 --- a/acvm-repo/bn254_blackbox_solver/src/lib.rs +++ b/acvm-repo/bn254_blackbox_solver/src/lib.rs @@ -92,19 +92,25 @@ impl BlackBoxFunctionSolver for Bn254BlackBoxSolver { fn multi_scalar_mul( &self, points: &[FieldElement], - scalars: &[FieldElement], - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - multi_scalar_mul(points, scalars) + scalars_lo: &[FieldElement], + scalars_hi: &[FieldElement], + ) -> Result<(FieldElement, FieldElement, FieldElement), BlackBoxResolutionError> { + multi_scalar_mul(points, scalars_lo, scalars_hi) } fn ec_add( &self, input1_x: &FieldElement, input1_y: &FieldElement, + input1_infinite: &FieldElement, input2_x: &FieldElement, input2_y: &FieldElement, - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - embedded_curve_add(*input1_x, *input1_y, *input2_x, *input2_y) + input2_infinite: &FieldElement, + ) -> Result<(FieldElement, FieldElement, FieldElement), BlackBoxResolutionError> { + embedded_curve_add( + [*input1_x, *input1_y, *input1_infinite], + [*input2_x, *input2_y, *input2_infinite], + ) } fn poseidon2_permutation( diff --git a/acvm-repo/brillig/src/black_box.rs b/acvm-repo/brillig/src/black_box.rs index 15abc19ed9..3887092a8c 100644 --- a/acvm-repo/brillig/src/black_box.rs +++ b/acvm-repo/brillig/src/black_box.rs @@ -83,8 +83,10 @@ pub enum BlackBoxOp { EmbeddedCurveAdd { input1_x: MemoryAddress, input1_y: MemoryAddress, + input1_infinite: MemoryAddress, input2_x: MemoryAddress, input2_y: MemoryAddress, + input2_infinite: MemoryAddress, result: HeapArray, }, BigIntAdd { @@ -126,4 +128,9 @@ pub enum BlackBoxOp { hash_values: HeapVector, output: HeapArray, }, + ToRadix { + input: MemoryAddress, + radix: u32, + output: HeapArray, + }, } diff --git a/acvm-repo/brillig_vm/src/black_box.rs b/acvm-repo/brillig_vm/src/black_box.rs index c999b5bf33..ebaa697628 100644 --- a/acvm-repo/brillig_vm/src/black_box.rs +++ b/acvm-repo/brillig_vm/src/black_box.rs @@ -5,6 +5,7 @@ use acvm_blackbox_solver::{ aes128_encrypt, blake2s, blake3, ecdsa_secp256k1_verify, ecdsa_secp256r1_verify, keccak256, keccakf1600, sha256, sha256compression, BlackBoxFunctionSolver, BlackBoxResolutionError, }; +use num_bigint::BigUint; use crate::memory::MemoryValue; use crate::Memory; @@ -156,22 +157,63 @@ pub(crate) fn evaluate_black_box( Ok(()) } BlackBoxOp::MultiScalarMul { points, scalars, outputs: result } => { - let points: Vec = - read_heap_vector(memory, points).iter().map(|x| x.try_into().unwrap()).collect(); + let points: Vec = read_heap_vector(memory, points) + .iter() + .enumerate() + .map(|(i, x)| { + if i % 3 == 2 { + let is_infinite: bool = x.try_into().unwrap(); + FieldElement::from(is_infinite as u128) + } else { + x.try_into().unwrap() + } + }) + .collect(); let scalars: Vec = read_heap_vector(memory, scalars).iter().map(|x| x.try_into().unwrap()).collect(); - - let (x, y) = solver.multi_scalar_mul(&points, &scalars)?; - memory.write_slice(memory.read_ref(result.pointer), &[x.into(), y.into()]); + let mut scalars_lo = Vec::with_capacity(scalars.len() / 2); + let mut scalars_hi = Vec::with_capacity(scalars.len() / 2); + for (i, scalar) in scalars.iter().enumerate() { + if i % 2 == 0 { + scalars_lo.push(*scalar); + } else { + scalars_hi.push(*scalar); + } + } + let (x, y, is_infinite) = solver.multi_scalar_mul(&points, &scalars_lo, &scalars_hi)?; + memory.write_slice( + memory.read_ref(result.pointer), + &[x.into(), y.into(), is_infinite.into()], + ); Ok(()) } - BlackBoxOp::EmbeddedCurveAdd { input1_x, input1_y, input2_x, input2_y, result } => { + BlackBoxOp::EmbeddedCurveAdd { + input1_x, + input1_y, + input2_x, + input2_y, + result, + input1_infinite, + input2_infinite, + } => { let input1_x = memory.read(*input1_x).try_into().unwrap(); let input1_y = memory.read(*input1_y).try_into().unwrap(); + let input1_infinite: bool = memory.read(*input1_infinite).try_into().unwrap(); let input2_x = memory.read(*input2_x).try_into().unwrap(); let input2_y = memory.read(*input2_y).try_into().unwrap(); - let (x, y) = solver.ec_add(&input1_x, &input1_y, &input2_x, &input2_y)?; - memory.write_slice(memory.read_ref(result.pointer), &[x.into(), y.into()]); + let input2_infinite: bool = memory.read(*input2_infinite).try_into().unwrap(); + let (x, y, infinite) = solver.ec_add( + &input1_x, + &input1_y, + &input1_infinite.into(), + &input2_x, + &input2_y, + &input2_infinite.into(), + )?; + memory.write_slice( + memory.read_ref(result.pointer), + &[x.into(), y.into(), infinite.into()], + ); Ok(()) } BlackBoxOp::PedersenCommitment { inputs, domain_separator, output } => { @@ -295,6 +337,25 @@ pub(crate) fn evaluate_black_box( memory.write_slice(memory.read_ref(output.pointer), &state); Ok(()) } + BlackBoxOp::ToRadix { input, radix, output } => { + let input: FieldElement = + memory.read(*input).try_into().expect("ToRadix input not a field"); + + let mut input = BigUint::from_bytes_be(&input.to_be_bytes()); + let radix = BigUint::from(*radix); + + let mut limbs: Vec = Vec::with_capacity(output.size); + + for _ in 0..output.size { + let limb = &input % &radix; + limbs.push(FieldElement::from_be_bytes_reduce(&limb.to_bytes_be()).into()); + input /= &radix; + } + + memory.write_slice(memory.read_ref(output.pointer), &limbs); + + Ok(()) + } } } @@ -321,6 +382,7 @@ fn black_box_function_from_op(op: &BlackBoxOp) -> BlackBoxFunc { BlackBoxOp::BigIntToLeBytes { .. } => BlackBoxFunc::BigIntToLeBytes, BlackBoxOp::Poseidon2Permutation { .. } => BlackBoxFunc::Poseidon2Permutation, BlackBoxOp::Sha256Compression { .. } => BlackBoxFunc::Sha256Compression, + BlackBoxOp::ToRadix { .. } => unreachable!("ToRadix is not an ACIR BlackBoxFunc"), } } diff --git a/aztec_macros/src/lib.rs b/aztec_macros/src/lib.rs index 17ae999fb8..5f0326bea5 100644 --- a/aztec_macros/src/lib.rs +++ b/aztec_macros/src/lib.rs @@ -7,7 +7,9 @@ use transforms::{ generate_contract_interface, stub_function, update_fn_signatures_in_contract_interface, }, events::{generate_selector_impl, transform_events}, - functions::{export_fn_abi, transform_function, transform_unconstrained}, + functions::{ + check_for_public_args, export_fn_abi, transform_function, transform_unconstrained, + }, note_interface::{generate_note_interface_impl, inject_note_exports}, storage::{ assign_storage_slots, check_for_storage_definition, check_for_storage_implementation, @@ -180,7 +182,7 @@ fn transform_module( if has_transformed_module { // We only want to run these checks if the macro processor has found the module to be an Aztec contract. - let private_functions_count = module + let private_functions: Vec<_> = module .functions .iter() .filter(|func| { @@ -190,9 +192,27 @@ fn transform_module( .iter() .any(|attr| is_custom_attribute(attr, "aztec(private)")) }) - .count(); + .collect(); + + let public_functions: Vec<_> = module + .functions + .iter() + .filter(|func| { + func.def + .attributes + .secondary + .iter() + .any(|attr| is_custom_attribute(attr, "aztec(public)")) + }) + .collect(); + + let private_function_count = private_functions.len(); + + check_for_public_args(&private_functions)?; + + check_for_public_args(&public_functions)?; - if private_functions_count > MAX_CONTRACT_PRIVATE_FUNCTIONS { + if private_function_count > MAX_CONTRACT_PRIVATE_FUNCTIONS { return Err(AztecMacroError::ContractHasTooManyPrivateFunctions { span: Span::default(), }); diff --git a/aztec_macros/src/transforms/contract_interface.rs b/aztec_macros/src/transforms/contract_interface.rs index 1afe0a3006..921bad4b61 100644 --- a/aztec_macros/src/transforms/contract_interface.rs +++ b/aztec_macros/src/transforms/contract_interface.rs @@ -75,7 +75,8 @@ pub fn stub_function(aztec_visibility: &str, func: &NoirFunction) -> String { for i in 0..{0}.len() {{ args_acc = args_acc.append(hash_{0}[i].as_slice()); }}\n", - param_name, typ.typ + param_name, + typ.typ.to_string().replace("plain::", "") ) } _ => { diff --git a/aztec_macros/src/transforms/functions.rs b/aztec_macros/src/transforms/functions.rs index 90563c6085..487d15ceab 100644 --- a/aztec_macros/src/transforms/functions.rs +++ b/aztec_macros/src/transforms/functions.rs @@ -792,3 +792,18 @@ fn add_cast_to_hasher(identifier: &Ident, hasher_name: &str) -> Statement { vec![cast_operation], // args ))) } + +/** + * Takes a vector of functions and checks for the presence of arguments with Public visibility + * Returns AztecMAcroError::PublicArgsDisallowed if found + */ +pub fn check_for_public_args(functions: &[&NoirFunction]) -> Result<(), AztecMacroError> { + for func in functions { + for param in &func.def.parameters { + if param.visibility == Visibility::Public { + return Err(AztecMacroError::PublicArgsDisallowed { span: func.span() }); + } + } + } + Ok(()) +} diff --git a/aztec_macros/src/utils/errors.rs b/aztec_macros/src/utils/errors.rs index db86012a00..8d1a19bfea 100644 --- a/aztec_macros/src/utils/errors.rs +++ b/aztec_macros/src/utils/errors.rs @@ -20,6 +20,7 @@ pub enum AztecMacroError { CouldNotGenerateContractInterface { secondary_message: Option }, EventError { span: Span, message: String }, UnsupportedAttributes { span: Span, secondary_message: Option }, + PublicArgsDisallowed { span: Span }, } impl From for MacroError { @@ -95,6 +96,11 @@ impl From for MacroError { secondary_message, span: Some(span), }, + AztecMacroError::PublicArgsDisallowed { span } => MacroError { + primary_message: "Aztec functions can't have public arguments".to_string(), + secondary_message: None, + span: Some(span), + }, } } } diff --git a/compiler/integration-tests/scripts/codegen-verifiers.sh b/compiler/integration-tests/scripts/codegen-verifiers.sh index e377a3ee3f..abc26c4c46 100644 --- a/compiler/integration-tests/scripts/codegen-verifiers.sh +++ b/compiler/integration-tests/scripts/codegen-verifiers.sh @@ -1,26 +1,34 @@ #!/usr/bin/env bash +NARGO_BACKEND_PATH=${NARGO_BACKEND_PATH:-bb} + self_path=$(dirname "$(readlink -f "$0")") repo_root=$self_path/../../.. -# Run codegen-verifier for 1_mul +# We want to move all the contracts to the root of compiler/integration-tests +contracts_dir=$self_path/../contracts +rm -rf $contracts_dir +mkdir $contracts_dir + +KEYS=$(mktemp -d) + +# Codegen verifier contract for 1_mul mul_dir=$repo_root/test_programs/execution_success/1_mul -nargo --program-dir $mul_dir codegen-verifier +nargo --program-dir $mul_dir compile +$NARGO_BACKEND_PATH write_vk -b $mul_dir/target/1_mul.json -o $KEYS/1_mul +$NARGO_BACKEND_PATH contract -k $KEYS/1_mul -o $contracts_dir/1_mul.sol -# Run codegen-verifier for assert_statement +# Codegen verifier contract for assert_statement assert_statement_dir=$repo_root/test_programs/execution_success/assert_statement -nargo --program-dir $assert_statement_dir codegen-verifier +nargo --program-dir $assert_statement_dir compile +$NARGO_BACKEND_PATH write_vk -b $assert_statement_dir/target/assert_statement.json -o $KEYS/assert_statement +$NARGO_BACKEND_PATH contract -k $KEYS/assert_statement -o $contracts_dir/assert_statement.sol -# Run codegen-verifier for recursion +# Codegen verifier contract for recursion recursion_dir=$repo_root/compiler/integration-tests/circuits/recursion -nargo --program-dir $recursion_dir codegen-verifier - -# Copy compiled contracts from the root of compiler/integration-tests -contracts_dir=$self_path/../contracts -rm -rf $contracts_dir -mkdir $contracts_dir +nargo --program-dir $recursion_dir compile +$NARGO_BACKEND_PATH write_vk -b $recursion_dir/target/recursion.json -o $KEYS/recursion +$NARGO_BACKEND_PATH contract -k $KEYS/recursion ./ -o $contracts_dir/recursion.sol -cp $mul_dir/contract/1_mul/plonk_vk.sol $contracts_dir/1_mul.sol -cp $assert_statement_dir/contract/assert_statement/plonk_vk.sol $contracts_dir/assert_statement.sol -cp $recursion_dir/contract/recursion/plonk_vk.sol $contracts_dir/recursion.sol +rm -rf $KEYS \ No newline at end of file diff --git a/compiler/noirc_driver/src/lib.rs b/compiler/noirc_driver/src/lib.rs index f18997d5d7..d7368f299b 100644 --- a/compiler/noirc_driver/src/lib.rs +++ b/compiler/noirc_driver/src/lib.rs @@ -54,7 +54,7 @@ pub const NOIR_ARTIFACT_VERSION_STRING: &str = #[derive(Args, Clone, Debug, Default)] pub struct CompileOptions { /// Override the expression width requested by the backend. - #[arg(long, value_parser = parse_expression_width, default_value = "3")] + #[arg(long, value_parser = parse_expression_width, default_value = "4")] pub expression_width: ExpressionWidth, /// Force a full recompilation. @@ -84,10 +84,6 @@ pub struct CompileOptions { #[arg(long, conflicts_with = "deny_warnings")] pub silence_warnings: bool, - /// Output ACIR gzipped bytecode instead of the JSON artefact - #[arg(long, hide = true)] - pub only_acir: bool, - /// Disables the builtin Aztec macros being used in the compiler #[arg(long, hide = true)] pub disable_macros: bool, @@ -103,6 +99,10 @@ pub struct CompileOptions { /// Force Brillig output (for step debugging) #[arg(long, hide = true)] pub force_brillig: bool, + + /// Enable the experimental elaborator pass + #[arg(long, hide = true)] + pub use_elaborator: bool, } fn parse_expression_width(input: &str) -> Result { @@ -245,12 +245,13 @@ pub fn check_crate( crate_id: CrateId, deny_warnings: bool, disable_macros: bool, + use_elaborator: bool, ) -> CompilationResult<()> { let macros: &[&dyn MacroProcessor] = if disable_macros { &[] } else { &[&aztec_macros::AztecMacro as &dyn MacroProcessor] }; let mut errors = vec![]; - let diagnostics = CrateDefMap::collect_defs(crate_id, context, macros); + let diagnostics = CrateDefMap::collect_defs(crate_id, context, use_elaborator, macros); errors.extend(diagnostics.into_iter().map(|(error, file_id)| { let diagnostic = CustomDiagnostic::from(&error); diagnostic.in_file(file_id) @@ -282,8 +283,13 @@ pub fn compile_main( options: &CompileOptions, cached_program: Option, ) -> CompilationResult { - let (_, mut warnings) = - check_crate(context, crate_id, options.deny_warnings, options.disable_macros)?; + let (_, mut warnings) = check_crate( + context, + crate_id, + options.deny_warnings, + options.disable_macros, + options.use_elaborator, + )?; let main = context.get_main_function(&crate_id).ok_or_else(|| { // TODO(#2155): This error might be a better to exist in Nargo @@ -318,8 +324,13 @@ pub fn compile_contract( crate_id: CrateId, options: &CompileOptions, ) -> CompilationResult { - let (_, warnings) = - check_crate(context, crate_id, options.deny_warnings, options.disable_macros)?; + let (_, warnings) = check_crate( + context, + crate_id, + options.deny_warnings, + options.disable_macros, + options.use_elaborator, + )?; // TODO: We probably want to error if contracts is empty let contracts = context.get_all_contracts(&crate_id); diff --git a/compiler/noirc_driver/tests/stdlib_warnings.rs b/compiler/noirc_driver/tests/stdlib_warnings.rs index 6f43762112..327c8daad0 100644 --- a/compiler/noirc_driver/tests/stdlib_warnings.rs +++ b/compiler/noirc_driver/tests/stdlib_warnings.rs @@ -24,7 +24,8 @@ fn stdlib_does_not_produce_constant_warnings() -> Result<(), ErrorsAndWarnings> let mut context = Context::new(file_manager, parsed_files); let root_crate_id = prepare_crate(&mut context, file_name); - let ((), warnings) = noirc_driver::check_crate(&mut context, root_crate_id, false, false)?; + let ((), warnings) = + noirc_driver::check_crate(&mut context, root_crate_id, false, false, false)?; assert_eq!(warnings, Vec::new(), "stdlib is producing warnings"); diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs index d982d864d0..f56c5daf31 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs @@ -207,15 +207,17 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::EmbeddedCurveAdd => { if let ( - [BrilligVariable::SingleAddr(input1_x), BrilligVariable::SingleAddr(input1_y), BrilligVariable::SingleAddr(input2_x), BrilligVariable::SingleAddr(input2_y)], + [BrilligVariable::SingleAddr(input1_x), BrilligVariable::SingleAddr(input1_y), BrilligVariable::SingleAddr(input1_infinite), BrilligVariable::SingleAddr(input2_x), BrilligVariable::SingleAddr(input2_y), BrilligVariable::SingleAddr(input2_infinite)], [BrilligVariable::BrilligArray(result_array)], ) = (function_arguments, function_results) { brillig_context.black_box_op_instruction(BlackBoxOp::EmbeddedCurveAdd { input1_x: input1_x.address, input1_y: input1_y.address, + input1_infinite: input1_infinite.address, input2_x: input2_x.address, input2_y: input2_y.address, + input2_infinite: input2_infinite.address, result: result_array.to_heap_array(), }); } else { @@ -233,9 +235,7 @@ pub(crate) fn convert_black_box_call( BlackBoxFunc::RANGE => unreachable!( "ICE: `BlackBoxFunc::RANGE` calls should be transformed into a `Instruction::Cast`" ), - BlackBoxFunc::RecursiveAggregation => unimplemented!( - "ICE: `BlackBoxFunc::RecursiveAggregation` is not implemented by the Brillig VM" - ), + BlackBoxFunc::RecursiveAggregation => {} BlackBoxFunc::BigIntAdd => { if let ( [BrilligVariable::SingleAddr(lhs), BrilligVariable::SingleAddr(lhs_modulus), BrilligVariable::SingleAddr(rhs), BrilligVariable::SingleAddr(rhs_modulus)], diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index f660c8e0b7..6a4f9f5cc0 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -488,8 +488,22 @@ impl<'block> BrilligBlock<'block> { } Value::Intrinsic(Intrinsic::ToRadix(endianness)) => { let source = self.convert_ssa_single_addr_value(arguments[0], dfg); - let radix = self.convert_ssa_single_addr_value(arguments[1], dfg); - let limb_count = self.convert_ssa_single_addr_value(arguments[2], dfg); + + let radix: u32 = dfg + .get_numeric_constant(arguments[1]) + .expect("Radix should be known") + .try_to_u64() + .expect("Radix should fit in u64") + .try_into() + .expect("Radix should be u32"); + + let limb_count: usize = dfg + .get_numeric_constant(arguments[2]) + .expect("Limb count should be known") + .try_to_u64() + .expect("Limb count should fit in u64") + .try_into() + .expect("Limb count should fit in usize"); let results = dfg.instruction_results(instruction_id); @@ -511,7 +525,8 @@ impl<'block> BrilligBlock<'block> { .extract_vector(); // Update the user-facing slice length - self.brillig_context.cast_instruction(target_len, limb_count); + self.brillig_context + .usize_const_instruction(target_len.address, limb_count.into()); self.brillig_context.codegen_to_radix( source, @@ -524,7 +539,13 @@ impl<'block> BrilligBlock<'block> { } Value::Intrinsic(Intrinsic::ToBits(endianness)) => { let source = self.convert_ssa_single_addr_value(arguments[0], dfg); - let limb_count = self.convert_ssa_single_addr_value(arguments[1], dfg); + let limb_count: usize = dfg + .get_numeric_constant(arguments[1]) + .expect("Limb count should be known") + .try_to_u64() + .expect("Limb count should fit in u64") + .try_into() + .expect("Limb count should fit in usize"); let results = dfg.instruction_results(instruction_id); @@ -549,21 +570,18 @@ impl<'block> BrilligBlock<'block> { BrilligVariable::SingleAddr(..) => unreachable!("ICE: ToBits on non-array"), }; - let radix = self.brillig_context.make_constant_instruction(2_usize.into(), 32); - // Update the user-facing slice length - self.brillig_context.cast_instruction(target_len, limb_count); + self.brillig_context + .usize_const_instruction(target_len.address, limb_count.into()); self.brillig_context.codegen_to_radix( source, target_vector, - radix, + 2, limb_count, matches!(endianness, Endian::Big), 1, ); - - self.brillig_context.deallocate_single_addr(radix); } _ => { unreachable!("unsupported function call type {:?}", dfg[*func]) diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir.rs index fadcdb22c1..2bd57dc948 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir.rs @@ -170,18 +170,21 @@ pub(crate) mod tests { fn multi_scalar_mul( &self, _points: &[FieldElement], - _scalars: &[FieldElement], - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - Ok((4_u128.into(), 5_u128.into())) + _scalars_lo: &[FieldElement], + _scalars_hi: &[FieldElement], + ) -> Result<(FieldElement, FieldElement, FieldElement), BlackBoxResolutionError> { + Ok((4_u128.into(), 5_u128.into(), 0_u128.into())) } fn ec_add( &self, _input1_x: &FieldElement, _input1_y: &FieldElement, + _input1_infinite: &FieldElement, _input2_x: &FieldElement, _input2_y: &FieldElement, - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { + _input2_infinite: &FieldElement, + ) -> Result<(FieldElement, FieldElement, FieldElement), BlackBoxResolutionError> { panic!("Path not trodden by this test") } diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs index ab756217bc..58166554e1 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/codegen_intrinsic.rs @@ -1,6 +1,7 @@ -use acvm::FieldElement; - -use crate::brillig::brillig_ir::BrilligBinaryOp; +use acvm::{ + acir::brillig::{BlackBoxOp, HeapArray}, + FieldElement, +}; use super::{ brillig_variable::{BrilligVector, SingleAddrVariable}, @@ -36,57 +37,46 @@ impl BrilligContext { &mut self, source_field: SingleAddrVariable, target_vector: BrilligVector, - radix: SingleAddrVariable, - limb_count: SingleAddrVariable, + radix: u32, + limb_count: usize, big_endian: bool, limb_bit_size: u32, ) { assert!(source_field.bit_size == FieldElement::max_num_bits()); - assert!(radix.bit_size == 32); - assert!(limb_count.bit_size == 32); - let radix_as_field = - SingleAddrVariable::new(self.allocate_register(), FieldElement::max_num_bits()); - self.cast_instruction(radix_as_field, radix); - self.cast_instruction(SingleAddrVariable::new_usize(target_vector.size), limb_count); + self.usize_const_instruction(target_vector.size, limb_count.into()); self.usize_const_instruction(target_vector.rc, 1_usize.into()); self.codegen_allocate_array(target_vector.pointer, target_vector.size); - let shifted_field = - SingleAddrVariable::new(self.allocate_register(), FieldElement::max_num_bits()); - self.mov_instruction(shifted_field.address, source_field.address); + self.black_box_op_instruction(BlackBoxOp::ToRadix { + input: source_field.address, + radix, + output: HeapArray { pointer: target_vector.pointer, size: limb_count }, + }); let limb_field = SingleAddrVariable::new(self.allocate_register(), FieldElement::max_num_bits()); let limb_casted = SingleAddrVariable::new(self.allocate_register(), limb_bit_size); - self.codegen_loop(target_vector.size, |ctx, iterator_register| { - // Compute the modulus - ctx.binary_instruction( - shifted_field, - radix_as_field, - limb_field, - BrilligBinaryOp::Modulo, - ); - // Cast it - ctx.cast_instruction(limb_casted, limb_field); - // Write it - ctx.codegen_array_set(target_vector.pointer, iterator_register, limb_casted.address); - // Integer div the field - ctx.binary_instruction( - shifted_field, - radix_as_field, - shifted_field, - BrilligBinaryOp::UnsignedDiv, - ); - }); + if limb_bit_size != FieldElement::max_num_bits() { + self.codegen_loop(target_vector.size, |ctx, iterator_register| { + // Read the limb + ctx.codegen_array_get(target_vector.pointer, iterator_register, limb_field.address); + // Cast it + ctx.cast_instruction(limb_casted, limb_field); + // Write it + ctx.codegen_array_set( + target_vector.pointer, + iterator_register, + limb_casted.address, + ); + }); + } // Deallocate our temporary registers - self.deallocate_single_addr(shifted_field); self.deallocate_single_addr(limb_field); self.deallocate_single_addr(limb_casted); - self.deallocate_single_addr(radix_as_field); if big_endian { self.codegen_reverse_vector_in_place(target_vector); diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs index 667ccf6ddb..def91f82bf 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs @@ -334,7 +334,9 @@ impl DebugShow { outputs ); } - BlackBoxOp::EmbeddedCurveAdd { input1_x, input1_y, input2_x, input2_y, result } => { + BlackBoxOp::EmbeddedCurveAdd { + input1_x, input1_y, input2_x, input2_y, result, .. + } => { debug_println!( self.enable_debug_trace, " EMBEDDED_CURVE_ADD ({} {}) ({} {}) -> {}", @@ -451,6 +453,15 @@ impl DebugShow { output ); } + BlackBoxOp::ToRadix { input, radix, output } => { + debug_println!( + self.enable_debug_trace, + " TO_RADIX {} {} -> {}", + input, + radix, + output + ); + } } } diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs index 732bd3cbc5..38e9bdfa8b 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs @@ -7,7 +7,7 @@ use super::{ }; use acvm::{acir::brillig::MemoryAddress, FieldElement}; -pub(crate) const MAX_STACK_SIZE: usize = 1024; +pub(crate) const MAX_STACK_SIZE: usize = 2048; impl BrilligContext { /// Creates an entry point artifact that will jump to the function label provided. diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs index 407cdf0a17..e8f6f7b281 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs @@ -8,7 +8,7 @@ use crate::ssa::ir::dfg::CallStack; use crate::ssa::ir::types::Type as SsaType; use crate::ssa::ir::{instruction::Endian, types::NumericType}; use acvm::acir::circuit::brillig::{BrilligInputs, BrilligOutputs}; -use acvm::acir::circuit::opcodes::{BlockId, MemOp}; +use acvm::acir::circuit::opcodes::{BlockId, BlockType, MemOp}; use acvm::acir::circuit::{AssertionPayload, ExpressionOrMemory, Opcode}; use acvm::blackbox_solver; use acvm::brillig_vm::{MemoryValue, VMStatus, VM}; @@ -1773,6 +1773,7 @@ impl AcirContext { block_id: BlockId, len: usize, optional_value: Option, + databus: BlockType, ) -> Result<(), InternalError> { let initialized_values = match optional_value { None => { @@ -1787,7 +1788,11 @@ impl AcirContext { } }; - self.acir_ir.push_opcode(Opcode::MemoryInit { block_id, init: initialized_values }); + self.acir_ir.push_opcode(Opcode::MemoryInit { + block_id, + init: initialized_values, + block_type: databus, + }); Ok(()) } diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs index c1249ae41c..d23f4abe5f 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs @@ -293,14 +293,13 @@ impl GeneratedAcir { BlackBoxFunc::MultiScalarMul => BlackBoxFuncCall::MultiScalarMul { points: inputs[0].clone(), scalars: inputs[1].clone(), - outputs: (outputs[0], outputs[1]), + outputs: (outputs[0], outputs[1], outputs[2]), }, + BlackBoxFunc::EmbeddedCurveAdd => BlackBoxFuncCall::EmbeddedCurveAdd { - input1_x: inputs[0][0], - input1_y: inputs[1][0], - input2_x: inputs[2][0], - input2_y: inputs[3][0], - outputs: (outputs[0], outputs[1]), + input1: Box::new([inputs[0][0], inputs[1][0], inputs[2][0]]), + input2: Box::new([inputs[3][0], inputs[4][0], inputs[5][0]]), + outputs: (outputs[0], outputs[1], outputs[2]), }, BlackBoxFunc::Keccak256 => { let var_message_size = match inputs.to_vec().pop() { @@ -684,8 +683,8 @@ fn black_box_func_expected_input_size(name: BlackBoxFunc) -> Option { // Recursive aggregation has a variable number of inputs BlackBoxFunc::RecursiveAggregation => None, - // Addition over the embedded curve: input are coordinates (x1,y1) and (x2,y2) of the Grumpkin points - BlackBoxFunc::EmbeddedCurveAdd => Some(4), + // Addition over the embedded curve: input are coordinates (x1,y1,infinite1) and (x2,y2,infinite2) of the Grumpkin points + BlackBoxFunc::EmbeddedCurveAdd => Some(6), // Big integer operations take in 0 inputs. They use constants for their inputs. BlackBoxFunc::BigIntAdd @@ -735,7 +734,7 @@ fn black_box_expected_output_size(name: BlackBoxFunc) -> Option { // Output of operations over the embedded curve // will be 2 field elements representing the point. - BlackBoxFunc::MultiScalarMul | BlackBoxFunc::EmbeddedCurveAdd => Some(2), + BlackBoxFunc::MultiScalarMul | BlackBoxFunc::EmbeddedCurveAdd => Some(3), // Big integer operations return a big integer BlackBoxFunc::BigIntAdd diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index dea7e2fa13..117804b370 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -29,6 +29,7 @@ use crate::brillig::brillig_ir::BrilligContext; use crate::brillig::{brillig_gen::brillig_fn::FunctionContext as BrilligFunctionContext, Brillig}; use crate::errors::{InternalError, InternalWarning, RuntimeError, SsaReport}; pub(crate) use acir_ir::generated_acir::GeneratedAcir; +use acvm::acir::circuit::opcodes::BlockType; use noirc_frontend::monomorphization::ast::InlineType; use acvm::acir::circuit::brillig::BrilligBytecode; @@ -1683,7 +1684,18 @@ impl<'a> Context<'a> { len: usize, value: Option, ) -> Result<(), InternalError> { - self.acir_context.initialize_array(array, len, value)?; + let databus = if self.data_bus.call_data.is_some() + && self.block_id(&self.data_bus.call_data.unwrap()) == array + { + BlockType::CallData + } else if self.data_bus.return_data.is_some() + && self.block_id(&self.data_bus.return_data.unwrap()) == array + { + BlockType::ReturnData + } else { + BlockType::Memory + }; + self.acir_context.initialize_array(array, len, value, databus)?; self.initialized_arrays.insert(array); Ok(()) } @@ -1729,13 +1741,16 @@ impl<'a> Context<'a> { // will expand the array if there is one. let return_acir_vars = self.flatten_value_list(return_values, dfg)?; let mut warnings = Vec::new(); - for acir_var in return_acir_vars { + for (acir_var, is_databus) in return_acir_vars { if self.acir_context.is_constant(&acir_var) { warnings.push(SsaReport::Warning(InternalWarning::ReturnConstant { call_stack: call_stack.clone(), })); } - self.acir_context.return_var(acir_var)?; + if !is_databus { + // We do not return value for the data bus. + self.acir_context.return_var(acir_var)?; + } } Ok(warnings) } @@ -2659,12 +2674,22 @@ impl<'a> Context<'a> { &mut self, arguments: &[ValueId], dfg: &DataFlowGraph, - ) -> Result, InternalError> { + ) -> Result, InternalError> { let mut acir_vars = Vec::with_capacity(arguments.len()); for value_id in arguments { + let is_databus = if let Some(return_databus) = self.data_bus.return_data { + dfg[*value_id] == dfg[return_databus] + } else { + false + }; let value = self.convert_value(*value_id, dfg); acir_vars.append( - &mut self.acir_context.flatten(value)?.iter().map(|(var, _)| *var).collect(), + &mut self + .acir_context + .flatten(value)? + .iter() + .map(|(var, _)| (*var, is_databus)) + .collect(), ); } Ok(acir_vars) diff --git a/compiler/noirc_evaluator/src/ssa/opt/inlining.rs b/compiler/noirc_evaluator/src/ssa/opt/inlining.rs index bddfb25f26..73dc388818 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/inlining.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/inlining.rs @@ -11,7 +11,7 @@ use crate::ssa::{ ir::{ basic_block::BasicBlockId, dfg::{CallStack, InsertInstructionResult}, - function::{Function, FunctionId}, + function::{Function, FunctionId, RuntimeType}, instruction::{Instruction, InstructionId, TerminatorInstruction}, value::{Value, ValueId}, }, @@ -392,10 +392,12 @@ impl<'function> PerFunctionContext<'function> { Some(func_id) => { let function = &ssa.functions[&func_id]; // If we have not already finished the flattening pass, functions marked - // to not have predicates should be marked as entry points. + // to not have predicates should be marked as entry points unless we are inlining into brillig. + let entry_point = &ssa.functions[&self.context.entry_point]; let no_predicates_is_entry_point = self.context.no_predicates_is_entry_point - && function.is_no_predicates(); + && function.is_no_predicates() + && !matches!(entry_point.runtime(), RuntimeType::Brillig); if function.runtime().is_entry_point() || no_predicates_is_entry_point { self.push_instruction(*id); } else { diff --git a/compiler/noirc_frontend/src/ast/function.rs b/compiler/noirc_frontend/src/ast/function.rs index dc426a4642..8acc068d86 100644 --- a/compiler/noirc_frontend/src/ast/function.rs +++ b/compiler/noirc_frontend/src/ast/function.rs @@ -32,6 +32,15 @@ pub enum FunctionKind { Recursive, } +impl FunctionKind { + pub fn can_ignore_return_type(self) -> bool { + match self { + FunctionKind::LowLevel | FunctionKind::Builtin | FunctionKind::Oracle => true, + FunctionKind::Normal | FunctionKind::Recursive => false, + } + } +} + impl NoirFunction { pub fn normal(def: FunctionDefinition) -> NoirFunction { NoirFunction { kind: FunctionKind::Normal, def } diff --git a/compiler/noirc_frontend/src/ast/statement.rs b/compiler/noirc_frontend/src/ast/statement.rs index 0da39edfd8..94b5841e52 100644 --- a/compiler/noirc_frontend/src/ast/statement.rs +++ b/compiler/noirc_frontend/src/ast/statement.rs @@ -565,7 +565,7 @@ impl ForRange { identifier: Ident, block: Expression, for_loop_span: Span, - ) -> StatementKind { + ) -> Statement { /// Counter used to generate unique names when desugaring /// code in the parser requires the creation of fresh variables. /// The parser is stateless so this is a static global instead. @@ -662,7 +662,8 @@ impl ForRange { let block = ExpressionKind::Block(BlockExpression { statements: vec![let_array, for_loop], }); - StatementKind::Expression(Expression::new(block, for_loop_span)) + let kind = StatementKind::Expression(Expression::new(block, for_loop_span)); + Statement { kind, span: for_loop_span } } } } diff --git a/compiler/noirc_frontend/src/elaborator/expressions.rs b/compiler/noirc_frontend/src/elaborator/expressions.rs new file mode 100644 index 0000000000..ed8ed5305d --- /dev/null +++ b/compiler/noirc_frontend/src/elaborator/expressions.rs @@ -0,0 +1,604 @@ +use iter_extended::vecmap; +use noirc_errors::{Location, Span}; +use regex::Regex; +use rustc_hash::FxHashSet as HashSet; + +use crate::{ + ast::{ + ArrayLiteral, ConstructorExpression, IfExpression, InfixExpression, Lambda, + UnresolvedTypeExpression, + }, + hir::{ + resolution::{errors::ResolverError, resolver::LambdaContext}, + type_check::TypeCheckError, + }, + hir_def::{ + expr::{ + HirArrayLiteral, HirBinaryOp, HirBlockExpression, HirCallExpression, HirCastExpression, + HirConstructorExpression, HirIdent, HirIfExpression, HirIndexExpression, + HirInfixExpression, HirLambda, HirMemberAccess, HirMethodCallExpression, + HirMethodReference, HirPrefixExpression, + }, + traits::TraitConstraint, + }, + macros_api::{ + BlockExpression, CallExpression, CastExpression, Expression, ExpressionKind, HirExpression, + HirLiteral, HirStatement, Ident, IndexExpression, Literal, MemberAccessExpression, + MethodCallExpression, PrefixExpression, + }, + node_interner::{DefinitionKind, ExprId, FuncId}, + Shared, StructType, Type, +}; + +use super::Elaborator; + +impl<'context> Elaborator<'context> { + pub(super) fn elaborate_expression(&mut self, expr: Expression) -> (ExprId, Type) { + let (hir_expr, typ) = match expr.kind { + ExpressionKind::Literal(literal) => self.elaborate_literal(literal, expr.span), + ExpressionKind::Block(block) => self.elaborate_block(block), + ExpressionKind::Prefix(prefix) => self.elaborate_prefix(*prefix), + ExpressionKind::Index(index) => self.elaborate_index(*index), + ExpressionKind::Call(call) => self.elaborate_call(*call, expr.span), + ExpressionKind::MethodCall(call) => self.elaborate_method_call(*call, expr.span), + ExpressionKind::Constructor(constructor) => self.elaborate_constructor(*constructor), + ExpressionKind::MemberAccess(access) => { + return self.elaborate_member_access(*access, expr.span) + } + ExpressionKind::Cast(cast) => self.elaborate_cast(*cast, expr.span), + ExpressionKind::Infix(infix) => return self.elaborate_infix(*infix, expr.span), + ExpressionKind::If(if_) => self.elaborate_if(*if_), + ExpressionKind::Variable(variable) => return self.elaborate_variable(variable), + ExpressionKind::Tuple(tuple) => self.elaborate_tuple(tuple), + ExpressionKind::Lambda(lambda) => self.elaborate_lambda(*lambda), + ExpressionKind::Parenthesized(expr) => return self.elaborate_expression(*expr), + ExpressionKind::Quote(quote) => self.elaborate_quote(quote), + ExpressionKind::Comptime(comptime) => self.elaborate_comptime_block(comptime), + ExpressionKind::Error => (HirExpression::Error, Type::Error), + }; + let id = self.interner.push_expr(hir_expr); + self.interner.push_expr_location(id, expr.span, self.file); + self.interner.push_expr_type(id, typ.clone()); + (id, typ) + } + + pub(super) fn elaborate_block(&mut self, block: BlockExpression) -> (HirExpression, Type) { + self.push_scope(); + let mut block_type = Type::Unit; + let mut statements = Vec::with_capacity(block.statements.len()); + + for (i, statement) in block.statements.into_iter().enumerate() { + let (id, stmt_type) = self.elaborate_statement(statement); + statements.push(id); + + if let HirStatement::Semi(expr) = self.interner.statement(&id) { + let inner_expr_type = self.interner.id_type(expr); + let span = self.interner.expr_span(&expr); + + self.unify(&inner_expr_type, &Type::Unit, || TypeCheckError::UnusedResultError { + expr_type: inner_expr_type.clone(), + expr_span: span, + }); + + if i + 1 == statements.len() { + block_type = stmt_type; + } + } + } + + self.pop_scope(); + (HirExpression::Block(HirBlockExpression { statements }), block_type) + } + + fn elaborate_literal(&mut self, literal: Literal, span: Span) -> (HirExpression, Type) { + use HirExpression::Literal as Lit; + match literal { + Literal::Unit => (Lit(HirLiteral::Unit), Type::Unit), + Literal::Bool(b) => (Lit(HirLiteral::Bool(b)), Type::Bool), + Literal::Integer(integer, sign) => { + let int = HirLiteral::Integer(integer, sign); + (Lit(int), self.polymorphic_integer_or_field()) + } + Literal::Str(str) | Literal::RawStr(str, _) => { + let len = Type::Constant(str.len() as u64); + (Lit(HirLiteral::Str(str)), Type::String(Box::new(len))) + } + Literal::FmtStr(str) => self.elaborate_fmt_string(str, span), + Literal::Array(array_literal) => { + self.elaborate_array_literal(array_literal, span, true) + } + Literal::Slice(array_literal) => { + self.elaborate_array_literal(array_literal, span, false) + } + } + } + + fn elaborate_array_literal( + &mut self, + array_literal: ArrayLiteral, + span: Span, + is_array: bool, + ) -> (HirExpression, Type) { + let (expr, elem_type, length) = match array_literal { + ArrayLiteral::Standard(elements) => { + let first_elem_type = self.interner.next_type_variable(); + let first_span = elements.first().map(|elem| elem.span).unwrap_or(span); + + let elements = vecmap(elements.into_iter().enumerate(), |(i, elem)| { + let span = elem.span; + let (elem_id, elem_type) = self.elaborate_expression(elem); + + self.unify(&elem_type, &first_elem_type, || { + TypeCheckError::NonHomogeneousArray { + first_span, + first_type: first_elem_type.to_string(), + first_index: 0, + second_span: span, + second_type: elem_type.to_string(), + second_index: i, + } + .add_context("elements in an array must have the same type") + }); + elem_id + }); + + let length = Type::Constant(elements.len() as u64); + (HirArrayLiteral::Standard(elements), first_elem_type, length) + } + ArrayLiteral::Repeated { repeated_element, length } => { + let span = length.span; + let length = + UnresolvedTypeExpression::from_expr(*length, span).unwrap_or_else(|error| { + self.push_err(ResolverError::ParserError(Box::new(error))); + UnresolvedTypeExpression::Constant(0, span) + }); + + let length = self.convert_expression_type(length); + let (repeated_element, elem_type) = self.elaborate_expression(*repeated_element); + + let length_clone = length.clone(); + (HirArrayLiteral::Repeated { repeated_element, length }, elem_type, length_clone) + } + }; + let constructor = if is_array { HirLiteral::Array } else { HirLiteral::Slice }; + let elem_type = Box::new(elem_type); + let typ = if is_array { + Type::Array(Box::new(length), elem_type) + } else { + Type::Slice(elem_type) + }; + (HirExpression::Literal(constructor(expr)), typ) + } + + fn elaborate_fmt_string(&mut self, str: String, call_expr_span: Span) -> (HirExpression, Type) { + let re = Regex::new(r"\{([a-zA-Z0-9_]+)\}") + .expect("ICE: an invalid regex pattern was used for checking format strings"); + + let mut fmt_str_idents = Vec::new(); + let mut capture_types = Vec::new(); + + for field in re.find_iter(&str) { + let matched_str = field.as_str(); + let ident_name = &matched_str[1..(matched_str.len() - 1)]; + + let scope_tree = self.scopes.current_scope_tree(); + let variable = scope_tree.find(ident_name); + if let Some((old_value, _)) = variable { + old_value.num_times_used += 1; + let ident = HirExpression::Ident(old_value.ident.clone()); + let expr_id = self.interner.push_expr(ident); + self.interner.push_expr_location(expr_id, call_expr_span, self.file); + let ident = old_value.ident.clone(); + let typ = self.type_check_variable(ident, expr_id); + self.interner.push_expr_type(expr_id, typ.clone()); + capture_types.push(typ); + fmt_str_idents.push(expr_id); + } else if ident_name.parse::().is_ok() { + self.push_err(ResolverError::NumericConstantInFormatString { + name: ident_name.to_owned(), + span: call_expr_span, + }); + } else { + self.push_err(ResolverError::VariableNotDeclared { + name: ident_name.to_owned(), + span: call_expr_span, + }); + } + } + + let len = Type::Constant(str.len() as u64); + let typ = Type::FmtString(Box::new(len), Box::new(Type::Tuple(capture_types))); + (HirExpression::Literal(HirLiteral::FmtStr(str, fmt_str_idents)), typ) + } + + fn elaborate_prefix(&mut self, prefix: PrefixExpression) -> (HirExpression, Type) { + let span = prefix.rhs.span; + let (rhs, rhs_type) = self.elaborate_expression(prefix.rhs); + let ret_type = self.type_check_prefix_operand(&prefix.operator, &rhs_type, span); + (HirExpression::Prefix(HirPrefixExpression { operator: prefix.operator, rhs }), ret_type) + } + + fn elaborate_index(&mut self, index_expr: IndexExpression) -> (HirExpression, Type) { + let span = index_expr.index.span; + let (index, index_type) = self.elaborate_expression(index_expr.index); + + let expected = self.polymorphic_integer_or_field(); + self.unify(&index_type, &expected, || TypeCheckError::TypeMismatch { + expected_typ: "an integer".to_owned(), + expr_typ: index_type.to_string(), + expr_span: span, + }); + + // When writing `a[i]`, if `a : &mut ...` then automatically dereference `a` as many + // times as needed to get the underlying array. + let lhs_span = index_expr.collection.span; + let (lhs, lhs_type) = self.elaborate_expression(index_expr.collection); + let (collection, lhs_type) = self.insert_auto_dereferences(lhs, lhs_type); + + let typ = match lhs_type.follow_bindings() { + // XXX: We can check the array bounds here also, but it may be better to constant fold first + // and have ConstId instead of ExprId for constants + Type::Array(_, base_type) => *base_type, + Type::Slice(base_type) => *base_type, + Type::Error => Type::Error, + typ => { + self.push_err(TypeCheckError::TypeMismatch { + expected_typ: "Array".to_owned(), + expr_typ: typ.to_string(), + expr_span: lhs_span, + }); + Type::Error + } + }; + + let expr = HirExpression::Index(HirIndexExpression { collection, index }); + (expr, typ) + } + + fn elaborate_call(&mut self, call: CallExpression, span: Span) -> (HirExpression, Type) { + let (func, func_type) = self.elaborate_expression(*call.func); + + let mut arguments = Vec::with_capacity(call.arguments.len()); + let args = vecmap(call.arguments, |arg| { + let span = arg.span; + let (arg, typ) = self.elaborate_expression(arg); + arguments.push(arg); + (typ, arg, span) + }); + + let location = Location::new(span, self.file); + let call = HirCallExpression { func, arguments, location }; + let typ = self.type_check_call(&call, func_type, args, span); + (HirExpression::Call(call), typ) + } + + fn elaborate_method_call( + &mut self, + method_call: MethodCallExpression, + span: Span, + ) -> (HirExpression, Type) { + let object_span = method_call.object.span; + let (mut object, mut object_type) = self.elaborate_expression(method_call.object); + object_type = object_type.follow_bindings(); + + let method_name = method_call.method_name.0.contents.as_str(); + match self.lookup_method(&object_type, method_name, span) { + Some(method_ref) => { + // Automatically add `&mut` if the method expects a mutable reference and + // the object is not already one. + if let HirMethodReference::FuncId(func_id) = &method_ref { + if *func_id != FuncId::dummy_id() { + let function_type = self.interner.function_meta(func_id).typ.clone(); + + self.try_add_mutable_reference_to_object( + &function_type, + &mut object_type, + &mut object, + ); + } + } + + // These arguments will be given to the desugared function call. + // Compared to the method arguments, they also contain the object. + let mut function_args = Vec::with_capacity(method_call.arguments.len() + 1); + let mut arguments = Vec::with_capacity(method_call.arguments.len()); + + function_args.push((object_type.clone(), object, object_span)); + + for arg in method_call.arguments { + let span = arg.span; + let (arg, typ) = self.elaborate_expression(arg); + arguments.push(arg); + function_args.push((typ, arg, span)); + } + + let location = Location::new(span, self.file); + let method = method_call.method_name; + let method_call = HirMethodCallExpression { method, object, arguments, location }; + + // Desugar the method call into a normal, resolved function call + // so that the backend doesn't need to worry about methods + // TODO: update object_type here? + let ((function_id, function_name), function_call) = method_call.into_function_call( + &method_ref, + object_type, + location, + self.interner, + ); + + let func_type = self.type_check_variable(function_name, function_id); + + // Type check the new call now that it has been changed from a method call + // to a function call. This way we avoid duplicating code. + let typ = self.type_check_call(&function_call, func_type, function_args, span); + (HirExpression::Call(function_call), typ) + } + None => (HirExpression::Error, Type::Error), + } + } + + fn elaborate_constructor( + &mut self, + constructor: ConstructorExpression, + ) -> (HirExpression, Type) { + let span = constructor.type_name.span(); + + match self.lookup_type_or_error(constructor.type_name) { + Some(Type::Struct(r#type, struct_generics)) => { + let struct_type = r#type.clone(); + let generics = struct_generics.clone(); + + let fields = constructor.fields; + let field_types = r#type.borrow().get_fields(&struct_generics); + let fields = self.resolve_constructor_expr_fields( + struct_type.clone(), + field_types, + fields, + span, + ); + let expr = HirExpression::Constructor(HirConstructorExpression { + fields, + r#type, + struct_generics, + }); + (expr, Type::Struct(struct_type, generics)) + } + Some(typ) => { + self.push_err(ResolverError::NonStructUsedInConstructor { typ, span }); + (HirExpression::Error, Type::Error) + } + None => (HirExpression::Error, Type::Error), + } + } + + /// Resolve all the fields of a struct constructor expression. + /// Ensures all fields are present, none are repeated, and all + /// are part of the struct. + fn resolve_constructor_expr_fields( + &mut self, + struct_type: Shared, + field_types: Vec<(String, Type)>, + fields: Vec<(Ident, Expression)>, + span: Span, + ) -> Vec<(Ident, ExprId)> { + let mut ret = Vec::with_capacity(fields.len()); + let mut seen_fields = HashSet::default(); + let mut unseen_fields = struct_type.borrow().field_names(); + + for (field_name, field) in fields { + let expected_type = field_types.iter().find(|(name, _)| name == &field_name.0.contents); + let expected_type = expected_type.map(|(_, typ)| typ).unwrap_or(&Type::Error); + + let field_span = field.span; + let (resolved, field_type) = self.elaborate_expression(field); + + if unseen_fields.contains(&field_name) { + unseen_fields.remove(&field_name); + seen_fields.insert(field_name.clone()); + + self.unify_with_coercions(&field_type, expected_type, resolved, || { + TypeCheckError::TypeMismatch { + expected_typ: expected_type.to_string(), + expr_typ: field_type.to_string(), + expr_span: field_span, + } + }); + } else if seen_fields.contains(&field_name) { + // duplicate field + self.push_err(ResolverError::DuplicateField { field: field_name.clone() }); + } else { + // field not required by struct + self.push_err(ResolverError::NoSuchField { + field: field_name.clone(), + struct_definition: struct_type.borrow().name.clone(), + }); + } + + ret.push((field_name, resolved)); + } + + if !unseen_fields.is_empty() { + self.push_err(ResolverError::MissingFields { + span, + missing_fields: unseen_fields.into_iter().map(|field| field.to_string()).collect(), + struct_definition: struct_type.borrow().name.clone(), + }); + } + + ret + } + + fn elaborate_member_access( + &mut self, + access: MemberAccessExpression, + span: Span, + ) -> (ExprId, Type) { + let (lhs, lhs_type) = self.elaborate_expression(access.lhs); + let rhs = access.rhs; + // `is_offset` is only used when lhs is a reference and we want to return a reference to rhs + let access = HirMemberAccess { lhs, rhs, is_offset: false }; + let expr_id = self.intern_expr(HirExpression::MemberAccess(access.clone()), span); + let typ = self.type_check_member_access(access, expr_id, lhs_type, span); + self.interner.push_expr_type(expr_id, typ.clone()); + (expr_id, typ) + } + + pub fn intern_expr(&mut self, expr: HirExpression, span: Span) -> ExprId { + let id = self.interner.push_expr(expr); + self.interner.push_expr_location(id, span, self.file); + id + } + + fn elaborate_cast(&mut self, cast: CastExpression, span: Span) -> (HirExpression, Type) { + let (lhs, lhs_type) = self.elaborate_expression(cast.lhs); + let r#type = self.resolve_type(cast.r#type); + let result = self.check_cast(lhs_type, &r#type, span); + let expr = HirExpression::Cast(HirCastExpression { lhs, r#type }); + (expr, result) + } + + fn elaborate_infix(&mut self, infix: InfixExpression, span: Span) -> (ExprId, Type) { + let (lhs, lhs_type) = self.elaborate_expression(infix.lhs); + let (rhs, rhs_type) = self.elaborate_expression(infix.rhs); + let trait_id = self.interner.get_operator_trait_method(infix.operator.contents); + + let operator = HirBinaryOp::new(infix.operator, self.file); + let expr = HirExpression::Infix(HirInfixExpression { + lhs, + operator, + trait_method_id: trait_id, + rhs, + }); + + let expr_id = self.interner.push_expr(expr); + self.interner.push_expr_location(expr_id, span, self.file); + + let typ = match self.infix_operand_type_rules(&lhs_type, &operator, &rhs_type, span) { + Ok((typ, use_impl)) => { + if use_impl { + // Delay checking the trait constraint until the end of the function. + // Checking it now could bind an unbound type variable to any type + // that implements the trait. + let constraint = TraitConstraint { + typ: lhs_type.clone(), + trait_id: trait_id.trait_id, + trait_generics: Vec::new(), + }; + self.trait_constraints.push((constraint, expr_id)); + self.type_check_operator_method(expr_id, trait_id, &lhs_type, span); + } + typ + } + Err(error) => { + self.push_err(error); + Type::Error + } + }; + + self.interner.push_expr_type(expr_id, typ.clone()); + (expr_id, typ) + } + + fn elaborate_if(&mut self, if_expr: IfExpression) -> (HirExpression, Type) { + let expr_span = if_expr.condition.span; + let (condition, cond_type) = self.elaborate_expression(if_expr.condition); + let (consequence, mut ret_type) = self.elaborate_expression(if_expr.consequence); + + self.unify(&cond_type, &Type::Bool, || TypeCheckError::TypeMismatch { + expected_typ: Type::Bool.to_string(), + expr_typ: cond_type.to_string(), + expr_span, + }); + + let alternative = if_expr.alternative.map(|alternative| { + let expr_span = alternative.span; + let (else_, else_type) = self.elaborate_expression(alternative); + + self.unify(&ret_type, &else_type, || { + let err = TypeCheckError::TypeMismatch { + expected_typ: ret_type.to_string(), + expr_typ: else_type.to_string(), + expr_span, + }; + + let context = if ret_type == Type::Unit { + "Are you missing a semicolon at the end of your 'else' branch?" + } else if else_type == Type::Unit { + "Are you missing a semicolon at the end of the first block of this 'if'?" + } else { + "Expected the types of both if branches to be equal" + }; + + err.add_context(context) + }); + else_ + }); + + if alternative.is_none() { + ret_type = Type::Unit; + } + + let if_expr = HirIfExpression { condition, consequence, alternative }; + (HirExpression::If(if_expr), ret_type) + } + + fn elaborate_tuple(&mut self, tuple: Vec) -> (HirExpression, Type) { + let mut element_ids = Vec::with_capacity(tuple.len()); + let mut element_types = Vec::with_capacity(tuple.len()); + + for element in tuple { + let (id, typ) = self.elaborate_expression(element); + element_ids.push(id); + element_types.push(typ); + } + + (HirExpression::Tuple(element_ids), Type::Tuple(element_types)) + } + + fn elaborate_lambda(&mut self, lambda: Lambda) -> (HirExpression, Type) { + self.push_scope(); + let scope_index = self.scopes.current_scope_index(); + + self.lambda_stack.push(LambdaContext { captures: Vec::new(), scope_index }); + + let mut arg_types = Vec::with_capacity(lambda.parameters.len()); + let parameters = vecmap(lambda.parameters, |(pattern, typ)| { + let parameter = DefinitionKind::Local(None); + let typ = self.resolve_inferred_type(typ); + arg_types.push(typ.clone()); + (self.elaborate_pattern(pattern, typ.clone(), parameter), typ) + }); + + let return_type = self.resolve_inferred_type(lambda.return_type); + let body_span = lambda.body.span; + let (body, body_type) = self.elaborate_expression(lambda.body); + + let lambda_context = self.lambda_stack.pop().unwrap(); + self.pop_scope(); + + self.unify(&body_type, &return_type, || TypeCheckError::TypeMismatch { + expected_typ: return_type.to_string(), + expr_typ: body_type.to_string(), + expr_span: body_span, + }); + + let captured_vars = vecmap(&lambda_context.captures, |capture| { + self.interner.definition_type(capture.ident.id) + }); + + let env_type = + if captured_vars.is_empty() { Type::Unit } else { Type::Tuple(captured_vars) }; + + let captures = lambda_context.captures; + let expr = HirExpression::Lambda(HirLambda { parameters, return_type, body, captures }); + (expr, Type::Function(arg_types, Box::new(body_type), Box::new(env_type))) + } + + fn elaborate_quote(&mut self, block: BlockExpression) -> (HirExpression, Type) { + (HirExpression::Quote(block), Type::Code) + } + + fn elaborate_comptime_block(&mut self, _comptime: BlockExpression) -> (HirExpression, Type) { + todo!("Elaborate comptime block") + } +} diff --git a/compiler/noirc_frontend/src/elaborator/mod.rs b/compiler/noirc_frontend/src/elaborator/mod.rs new file mode 100644 index 0000000000..446e5b62ea --- /dev/null +++ b/compiler/noirc_frontend/src/elaborator/mod.rs @@ -0,0 +1,782 @@ +#![allow(unused)] +use std::{ + collections::{BTreeMap, BTreeSet}, + rc::Rc, +}; + +use crate::hir::def_map::CrateDefMap; +use crate::{ + ast::{ + ArrayLiteral, ConstructorExpression, FunctionKind, IfExpression, InfixExpression, Lambda, + UnresolvedTraitConstraint, UnresolvedTypeExpression, + }, + hir::{ + def_collector::dc_crate::CompilationError, + resolution::{errors::ResolverError, path_resolver::PathResolver, resolver::LambdaContext}, + scope::ScopeForest as GenericScopeForest, + type_check::TypeCheckError, + }, + hir_def::{ + expr::{ + HirArrayLiteral, HirBinaryOp, HirBlockExpression, HirCallExpression, HirCastExpression, + HirConstructorExpression, HirIdent, HirIfExpression, HirIndexExpression, + HirInfixExpression, HirLambda, HirMemberAccess, HirMethodCallExpression, + HirMethodReference, HirPrefixExpression, + }, + traits::TraitConstraint, + }, + macros_api::{ + BlockExpression, CallExpression, CastExpression, Expression, ExpressionKind, HirExpression, + HirLiteral, HirStatement, Ident, IndexExpression, Literal, MemberAccessExpression, + MethodCallExpression, NodeInterner, NoirFunction, PrefixExpression, Statement, + StatementKind, StructId, + }, + node_interner::{DefinitionKind, DependencyId, ExprId, FuncId, StmtId, TraitId}, + Shared, StructType, Type, TypeVariable, +}; +use crate::{ + ast::{TraitBound, UnresolvedGenerics}, + graph::CrateId, + hir::{ + def_collector::{ + dc_crate::{CollectedItems, DefCollector}, + errors::DefCollectorErrorKind, + }, + def_map::{LocalModuleId, ModuleDefId, ModuleId, MAIN_FUNCTION}, + resolution::{ + errors::PubPosition, + import::{PathResolution, PathResolutionError}, + path_resolver::StandardPathResolver, + }, + Context, + }, + hir_def::function::{FuncMeta, HirFunction}, + macros_api::{Param, Path, UnresolvedType, UnresolvedTypeData, Visibility}, + node_interner::TraitImplId, + token::FunctionAttribute, + Generics, +}; + +mod expressions; +mod patterns; +mod scope; +mod statements; +mod types; + +use fm::FileId; +use iter_extended::vecmap; +use noirc_errors::{Location, Span}; +use regex::Regex; +use rustc_hash::FxHashSet as HashSet; + +/// ResolverMetas are tagged onto each definition to track how many times they are used +#[derive(Debug, PartialEq, Eq)] +pub struct ResolverMeta { + num_times_used: usize, + ident: HirIdent, + warn_if_unused: bool, +} + +type ScopeForest = GenericScopeForest; + +pub struct Elaborator<'context> { + scopes: ScopeForest, + + errors: Vec<(CompilationError, FileId)>, + + interner: &'context mut NodeInterner, + + def_maps: &'context BTreeMap, + + file: FileId, + + in_unconstrained_fn: bool, + nested_loops: usize, + + /// True if the current module is a contract. + /// This is usually determined by self.path_resolver.module_id(), but it can + /// be overridden for impls. Impls are an odd case since the methods within resolve + /// as if they're in the parent module, but should be placed in a child module. + /// Since they should be within a child module, in_contract is manually set to false + /// for these so we can still resolve them in the parent module without them being in a contract. + in_contract: bool, + + /// Contains a mapping of the current struct or functions's generics to + /// unique type variables if we're resolving a struct. Empty otherwise. + /// This is a Vec rather than a map to preserve the order a functions generics + /// were declared in. + generics: Vec<(Rc, TypeVariable, Span)>, + + /// When resolving lambda expressions, we need to keep track of the variables + /// that are captured. We do this in order to create the hidden environment + /// parameter for the lambda function. + lambda_stack: Vec, + + /// Set to the current type if we're resolving an impl + self_type: Option, + + /// The current dependency item we're resolving. + /// Used to link items to their dependencies in the dependency graph + current_item: Option, + + /// If we're currently resolving methods within a trait impl, this will be set + /// to the corresponding trait impl ID. + current_trait_impl: Option, + + trait_id: Option, + + /// In-resolution names + /// + /// This needs to be a set because we can have multiple in-resolution + /// names when resolving structs that are declared in reverse order of their + /// dependencies, such as in the following case: + /// + /// ``` + /// struct Wrapper { + /// value: Wrapped + /// } + /// struct Wrapped { + /// } + /// ``` + resolving_ids: BTreeSet, + + trait_bounds: Vec, + + current_function: Option, + + /// All type variables created in the current function. + /// This map is used to default any integer type variables at the end of + /// a function (before checking trait constraints) if a type wasn't already chosen. + type_variables: Vec, + + /// Trait constraints are collected during type checking until they are + /// verified at the end of a function. This is because constraints arise + /// on each variable, but it is only until function calls when the types + /// needed for the trait constraint may become known. + trait_constraints: Vec<(TraitConstraint, ExprId)>, + + /// The current module this elaborator is in. + /// Initially empty, it is set whenever a new top-level item is resolved. + local_module: LocalModuleId, + + crate_id: CrateId, +} + +impl<'context> Elaborator<'context> { + pub fn new(context: &'context mut Context, crate_id: CrateId) -> Self { + Self { + scopes: ScopeForest::default(), + errors: Vec::new(), + interner: &mut context.def_interner, + def_maps: &context.def_maps, + file: FileId::dummy(), + in_unconstrained_fn: false, + nested_loops: 0, + in_contract: false, + generics: Vec::new(), + lambda_stack: Vec::new(), + self_type: None, + current_item: None, + trait_id: None, + local_module: LocalModuleId::dummy_id(), + crate_id, + resolving_ids: BTreeSet::new(), + trait_bounds: Vec::new(), + current_function: None, + type_variables: Vec::new(), + trait_constraints: Vec::new(), + current_trait_impl: None, + } + } + + pub fn elaborate( + context: &'context mut Context, + crate_id: CrateId, + items: CollectedItems, + ) -> Vec<(CompilationError, FileId)> { + let mut this = Self::new(context, crate_id); + + // the resolver filters literal globals first + for global in items.globals {} + + for alias in items.type_aliases {} + + for trait_ in items.traits {} + + for struct_ in items.types {} + + for trait_impl in &items.trait_impls { + // only collect now + } + + for impl_ in &items.impls { + // only collect now + } + + // resolver resolves non-literal globals here + + for functions in items.functions { + this.file = functions.file_id; + this.trait_id = functions.trait_id; // TODO: Resolve? + for (local_module, id, func) in functions.functions { + this.local_module = local_module; + this.elaborate_function(func, id); + } + } + + for impl_ in items.impls {} + + for trait_impl in items.trait_impls {} + + let cycle_errors = this.interner.check_for_dependency_cycles(); + this.errors.extend(cycle_errors); + + this.errors + } + + fn elaborate_function(&mut self, mut function: NoirFunction, id: FuncId) { + self.current_function = Some(id); + self.resolve_where_clause(&mut function.def.where_clause); + + // Without this, impl methods can accidentally be placed in contracts. See #3254 + if self.self_type.is_some() { + self.in_contract = false; + } + + self.scopes.start_function(); + self.current_item = Some(DependencyId::Function(id)); + + // Check whether the function has globals in the local module and add them to the scope + self.resolve_local_globals(); + self.add_generics(&function.def.generics); + + self.desugar_impl_trait_args(&mut function, id); + self.trait_bounds = function.def.where_clause.clone(); + + let is_low_level_or_oracle = function + .attributes() + .function + .as_ref() + .map_or(false, |func| func.is_low_level() || func.is_oracle()); + + if function.def.is_unconstrained { + self.in_unconstrained_fn = true; + } + + let func_meta = self.extract_meta(&function, id); + + self.add_trait_constraints_to_scope(&func_meta); + + let (hir_func, body_type) = match function.kind { + FunctionKind::Builtin | FunctionKind::LowLevel | FunctionKind::Oracle => { + (HirFunction::empty(), Type::Error) + } + FunctionKind::Normal | FunctionKind::Recursive => { + let block_span = function.def.span; + let (block, body_type) = self.elaborate_block(function.def.body); + let expr_id = self.intern_expr(block, block_span); + self.interner.push_expr_type(expr_id, body_type.clone()); + (HirFunction::unchecked_from_expr(expr_id), body_type) + } + }; + + if !func_meta.can_ignore_return_type() { + self.type_check_function_body(body_type, &func_meta, hir_func.as_expr()); + } + + // Default any type variables that still need defaulting. + // This is done before trait impl search since leaving them bindable can lead to errors + // when multiple impls are available. Instead we default first to choose the Field or u64 impl. + for typ in &self.type_variables { + if let Type::TypeVariable(variable, kind) = typ.follow_bindings() { + let msg = "TypeChecker should only track defaultable type vars"; + variable.bind(kind.default_type().expect(msg)); + } + } + + // Verify any remaining trait constraints arising from the function body + for (constraint, expr_id) in std::mem::take(&mut self.trait_constraints) { + let span = self.interner.expr_span(&expr_id); + self.verify_trait_constraint( + &constraint.typ, + constraint.trait_id, + &constraint.trait_generics, + expr_id, + span, + ); + } + + // Now remove all the `where` clause constraints we added + for constraint in &func_meta.trait_constraints { + self.interner.remove_assumed_trait_implementations_for_trait(constraint.trait_id); + } + + let func_scope_tree = self.scopes.end_function(); + + // The arguments to low-level and oracle functions are always unused so we do not produce warnings for them. + if !is_low_level_or_oracle { + self.check_for_unused_variables_in_scope_tree(func_scope_tree); + } + + self.trait_bounds.clear(); + + self.interner.push_fn_meta(func_meta, id); + self.interner.update_fn(id, hir_func); + self.current_function = None; + } + + /// This turns function parameters of the form: + /// fn foo(x: impl Bar) + /// + /// into + /// fn foo(x: T0_impl_Bar) where T0_impl_Bar: Bar + fn desugar_impl_trait_args(&mut self, func: &mut NoirFunction, func_id: FuncId) { + let mut impl_trait_generics = HashSet::default(); + let mut counter: usize = 0; + for parameter in func.def.parameters.iter_mut() { + if let UnresolvedTypeData::TraitAsType(path, args) = ¶meter.typ.typ { + let mut new_generic_ident: Ident = + format!("T{}_impl_{}", func_id, path.as_string()).into(); + let mut new_generic_path = Path::from_ident(new_generic_ident.clone()); + while impl_trait_generics.contains(&new_generic_ident) + || self.lookup_generic_or_global_type(&new_generic_path).is_some() + { + new_generic_ident = + format!("T{}_impl_{}_{}", func_id, path.as_string(), counter).into(); + new_generic_path = Path::from_ident(new_generic_ident.clone()); + counter += 1; + } + impl_trait_generics.insert(new_generic_ident.clone()); + + let is_synthesized = true; + let new_generic_type_data = + UnresolvedTypeData::Named(new_generic_path, vec![], is_synthesized); + let new_generic_type = + UnresolvedType { typ: new_generic_type_data.clone(), span: None }; + let new_trait_bound = TraitBound { + trait_path: path.clone(), + trait_id: None, + trait_generics: args.to_vec(), + }; + let new_trait_constraint = UnresolvedTraitConstraint { + typ: new_generic_type, + trait_bound: new_trait_bound, + }; + + parameter.typ.typ = new_generic_type_data; + func.def.generics.push(new_generic_ident); + func.def.where_clause.push(new_trait_constraint); + } + } + self.add_generics(&impl_trait_generics.into_iter().collect()); + } + + /// Add the given generics to scope. + /// Each generic will have a fresh Shared associated with it. + pub fn add_generics(&mut self, generics: &UnresolvedGenerics) -> Generics { + vecmap(generics, |generic| { + // Map the generic to a fresh type variable + let id = self.interner.next_type_variable_id(); + let typevar = TypeVariable::unbound(id); + let span = generic.0.span(); + + // Check for name collisions of this generic + let name = Rc::new(generic.0.contents.clone()); + + if let Some((_, _, first_span)) = self.find_generic(&name) { + self.push_err(ResolverError::DuplicateDefinition { + name: generic.0.contents.clone(), + first_span: *first_span, + second_span: span, + }); + } else { + self.generics.push((name, typevar.clone(), span)); + } + + typevar + }) + } + + fn push_err(&mut self, error: impl Into) { + self.errors.push((error.into(), self.file)); + } + + fn resolve_where_clause(&mut self, clause: &mut [UnresolvedTraitConstraint]) { + for bound in clause { + if let Some(trait_id) = self.resolve_trait_by_path(bound.trait_bound.trait_path.clone()) + { + bound.trait_bound.trait_id = Some(trait_id); + } + } + } + + fn resolve_trait_by_path(&mut self, path: Path) -> Option { + let path_resolver = StandardPathResolver::new(self.module_id()); + + let error = match path_resolver.resolve(self.def_maps, path.clone()) { + Ok(PathResolution { module_def_id: ModuleDefId::TraitId(trait_id), error }) => { + if let Some(error) = error { + self.push_err(error); + } + return Some(trait_id); + } + Ok(_) => DefCollectorErrorKind::NotATrait { not_a_trait_name: path }, + Err(_) => DefCollectorErrorKind::TraitNotFound { trait_path: path }, + }; + self.push_err(error); + None + } + + fn resolve_local_globals(&mut self) { + let globals = vecmap(self.interner.get_all_globals(), |global| { + (global.id, global.local_id, global.ident.clone()) + }); + for (id, local_module_id, name) in globals { + if local_module_id == self.local_module { + let definition = DefinitionKind::Global(id); + self.add_global_variable_decl(name, definition); + } + } + } + + /// TODO: This is currently only respected for generic free functions + /// there's a bunch of other places where trait constraints can pop up + fn resolve_trait_constraints( + &mut self, + where_clause: &[UnresolvedTraitConstraint], + ) -> Vec { + where_clause + .iter() + .cloned() + .filter_map(|constraint| self.resolve_trait_constraint(constraint)) + .collect() + } + + pub fn resolve_trait_constraint( + &mut self, + constraint: UnresolvedTraitConstraint, + ) -> Option { + let typ = self.resolve_type(constraint.typ); + let trait_generics = + vecmap(constraint.trait_bound.trait_generics, |typ| self.resolve_type(typ)); + + let span = constraint.trait_bound.trait_path.span(); + let the_trait = self.lookup_trait_or_error(constraint.trait_bound.trait_path)?; + let trait_id = the_trait.id; + + let expected_generics = the_trait.generics.len(); + let actual_generics = trait_generics.len(); + + if actual_generics != expected_generics { + let item_name = the_trait.name.to_string(); + self.push_err(ResolverError::IncorrectGenericCount { + span, + item_name, + actual: actual_generics, + expected: expected_generics, + }); + } + + Some(TraitConstraint { typ, trait_id, trait_generics }) + } + + /// Extract metadata from a NoirFunction + /// to be used in analysis and intern the function parameters + /// Prerequisite: self.add_generics() has already been called with the given + /// function's generics, including any generics from the impl, if any. + fn extract_meta(&mut self, func: &NoirFunction, func_id: FuncId) -> FuncMeta { + let location = Location::new(func.name_ident().span(), self.file); + let id = self.interner.function_definition_id(func_id); + let name_ident = HirIdent::non_trait_method(id, location); + + let attributes = func.attributes().clone(); + let has_no_predicates_attribute = attributes.is_no_predicates(); + let should_fold = attributes.is_foldable(); + if !self.inline_attribute_allowed(func) { + if has_no_predicates_attribute { + self.push_err(ResolverError::NoPredicatesAttributeOnUnconstrained { + ident: func.name_ident().clone(), + }); + } else if should_fold { + self.push_err(ResolverError::FoldAttributeOnUnconstrained { + ident: func.name_ident().clone(), + }); + } + } + // Both the #[fold] and #[no_predicates] alter a function's inline type and code generation in similar ways. + // In certain cases such as type checking (for which the following flag will be used) both attributes + // indicate we should code generate in the same way. Thus, we unify the attributes into one flag here. + let has_inline_attribute = has_no_predicates_attribute || should_fold; + let is_entry_point = self.is_entry_point_function(func); + + let mut generics = vecmap(&self.generics, |(_, typevar, _)| typevar.clone()); + let mut parameters = vec![]; + let mut parameter_types = vec![]; + + for Param { visibility, pattern, typ, span: _ } in func.parameters().iter().cloned() { + if visibility == Visibility::Public && !self.pub_allowed(func) { + self.push_err(ResolverError::UnnecessaryPub { + ident: func.name_ident().clone(), + position: PubPosition::Parameter, + }); + } + + let type_span = typ.span.unwrap_or_else(|| pattern.span()); + let typ = self.resolve_type_inner(typ, &mut generics); + self.check_if_type_is_valid_for_program_input( + &typ, + is_entry_point, + has_inline_attribute, + type_span, + ); + let pattern = self.elaborate_pattern(pattern, typ.clone(), DefinitionKind::Local(None)); + + parameters.push((pattern, typ.clone(), visibility)); + parameter_types.push(typ); + } + + let return_type = Box::new(self.resolve_type(func.return_type())); + + self.declare_numeric_generics(¶meter_types, &return_type); + + if !self.pub_allowed(func) && func.def.return_visibility == Visibility::Public { + self.push_err(ResolverError::UnnecessaryPub { + ident: func.name_ident().clone(), + position: PubPosition::ReturnType, + }); + } + + let is_low_level_function = + attributes.function.as_ref().map_or(false, |func| func.is_low_level()); + + if !self.crate_id.is_stdlib() && is_low_level_function { + let error = + ResolverError::LowLevelFunctionOutsideOfStdlib { ident: func.name_ident().clone() }; + self.push_err(error); + } + + // 'pub' is required on return types for entry point functions + if is_entry_point + && return_type.as_ref() != &Type::Unit + && func.def.return_visibility == Visibility::Private + { + self.push_err(ResolverError::NecessaryPub { ident: func.name_ident().clone() }); + } + // '#[recursive]' attribute is only allowed for entry point functions + if !is_entry_point && func.kind == FunctionKind::Recursive { + self.push_err(ResolverError::MisplacedRecursiveAttribute { + ident: func.name_ident().clone(), + }); + } + + if matches!(attributes.function, Some(FunctionAttribute::Test { .. })) + && !parameters.is_empty() + { + self.push_err(ResolverError::TestFunctionHasParameters { + span: func.name_ident().span(), + }); + } + + let mut typ = Type::Function(parameter_types, return_type, Box::new(Type::Unit)); + + if !generics.is_empty() { + typ = Type::Forall(generics, Box::new(typ)); + } + + self.interner.push_definition_type(name_ident.id, typ.clone()); + + let direct_generics = func.def.generics.iter(); + let direct_generics = direct_generics + .filter_map(|generic| self.find_generic(&generic.0.contents)) + .map(|(name, typevar, _span)| (name.clone(), typevar.clone())) + .collect(); + + FuncMeta { + name: name_ident, + kind: func.kind, + location, + typ, + direct_generics, + trait_impl: self.current_trait_impl, + parameters: parameters.into(), + return_type: func.def.return_type.clone(), + return_visibility: func.def.return_visibility, + has_body: !func.def.body.is_empty(), + trait_constraints: self.resolve_trait_constraints(&func.def.where_clause), + is_entry_point, + has_inline_attribute, + } + } + + /// Only sized types are valid to be used as main's parameters or the parameters to a contract + /// function. If the given type is not sized (e.g. contains a slice or NamedGeneric type), an + /// error is issued. + fn check_if_type_is_valid_for_program_input( + &mut self, + typ: &Type, + is_entry_point: bool, + has_inline_attribute: bool, + span: Span, + ) { + if (is_entry_point && !typ.is_valid_for_program_input()) + || (has_inline_attribute && !typ.is_valid_non_inlined_function_input()) + { + self.push_err(TypeCheckError::InvalidTypeForEntryPoint { span }); + } + } + + fn inline_attribute_allowed(&self, func: &NoirFunction) -> bool { + // Inline attributes are only relevant for constrained functions + // as all unconstrained functions are not inlined + !func.def.is_unconstrained + } + + /// True if the 'pub' keyword is allowed on parameters in this function + /// 'pub' on function parameters is only allowed for entry point functions + fn pub_allowed(&self, func: &NoirFunction) -> bool { + self.is_entry_point_function(func) || func.attributes().is_foldable() + } + + fn is_entry_point_function(&self, func: &NoirFunction) -> bool { + if self.in_contract { + func.attributes().is_contract_entry_point() + } else { + func.name() == MAIN_FUNCTION + } + } + + fn declare_numeric_generics(&mut self, params: &[Type], return_type: &Type) { + if self.generics.is_empty() { + return; + } + + for (name_to_find, type_variable) in Self::find_numeric_generics(params, return_type) { + // Declare any generics to let users use numeric generics in scope. + // Don't issue a warning if these are unused + // + // We can fail to find the generic in self.generics if it is an implicit one created + // by the compiler. This can happen when, e.g. eliding array lengths using the slice + // syntax [T]. + if let Some((name, _, span)) = + self.generics.iter().find(|(name, _, _)| name.as_ref() == &name_to_find) + { + let ident = Ident::new(name.to_string(), *span); + let definition = DefinitionKind::GenericType(type_variable); + self.add_variable_decl_inner(ident, false, false, false, definition); + } + } + } + + fn find_numeric_generics( + parameters: &[Type], + return_type: &Type, + ) -> Vec<(String, TypeVariable)> { + let mut found = BTreeMap::new(); + for parameter in parameters { + Self::find_numeric_generics_in_type(parameter, &mut found); + } + Self::find_numeric_generics_in_type(return_type, &mut found); + found.into_iter().collect() + } + + fn find_numeric_generics_in_type(typ: &Type, found: &mut BTreeMap) { + match typ { + Type::FieldElement + | Type::Integer(_, _) + | Type::Bool + | Type::Unit + | Type::Error + | Type::TypeVariable(_, _) + | Type::Constant(_) + | Type::NamedGeneric(_, _) + | Type::Code + | Type::Forall(_, _) => (), + + Type::TraitAsType(_, _, args) => { + for arg in args { + Self::find_numeric_generics_in_type(arg, found); + } + } + + Type::Array(length, element_type) => { + if let Type::NamedGeneric(type_variable, name) = length.as_ref() { + found.insert(name.to_string(), type_variable.clone()); + } + Self::find_numeric_generics_in_type(element_type, found); + } + + Type::Slice(element_type) => { + Self::find_numeric_generics_in_type(element_type, found); + } + + Type::Tuple(fields) => { + for field in fields { + Self::find_numeric_generics_in_type(field, found); + } + } + + Type::Function(parameters, return_type, _env) => { + for parameter in parameters { + Self::find_numeric_generics_in_type(parameter, found); + } + Self::find_numeric_generics_in_type(return_type, found); + } + + Type::Struct(struct_type, generics) => { + for (i, generic) in generics.iter().enumerate() { + if let Type::NamedGeneric(type_variable, name) = generic { + if struct_type.borrow().generic_is_numeric(i) { + found.insert(name.to_string(), type_variable.clone()); + } + } else { + Self::find_numeric_generics_in_type(generic, found); + } + } + } + Type::Alias(alias, generics) => { + for (i, generic) in generics.iter().enumerate() { + if let Type::NamedGeneric(type_variable, name) = generic { + if alias.borrow().generic_is_numeric(i) { + found.insert(name.to_string(), type_variable.clone()); + } + } else { + Self::find_numeric_generics_in_type(generic, found); + } + } + } + Type::MutableReference(element) => Self::find_numeric_generics_in_type(element, found), + Type::String(length) => { + if let Type::NamedGeneric(type_variable, name) = length.as_ref() { + found.insert(name.to_string(), type_variable.clone()); + } + } + Type::FmtString(length, fields) => { + if let Type::NamedGeneric(type_variable, name) = length.as_ref() { + found.insert(name.to_string(), type_variable.clone()); + } + Self::find_numeric_generics_in_type(fields, found); + } + } + } + + fn add_trait_constraints_to_scope(&mut self, func_meta: &FuncMeta) { + for constraint in &func_meta.trait_constraints { + let object = constraint.typ.clone(); + let trait_id = constraint.trait_id; + let generics = constraint.trait_generics.clone(); + + if !self.interner.add_assumed_trait_implementation(object, trait_id, generics) { + if let Some(the_trait) = self.interner.try_get_trait(trait_id) { + let trait_name = the_trait.name.to_string(); + let typ = constraint.typ.clone(); + let span = func_meta.location.span; + self.push_err(TypeCheckError::UnneededTraitConstraint { + trait_name, + typ, + span, + }); + } + } + } + } +} diff --git a/compiler/noirc_frontend/src/elaborator/patterns.rs b/compiler/noirc_frontend/src/elaborator/patterns.rs new file mode 100644 index 0000000000..195d37878f --- /dev/null +++ b/compiler/noirc_frontend/src/elaborator/patterns.rs @@ -0,0 +1,465 @@ +use iter_extended::vecmap; +use noirc_errors::{Location, Span}; +use rustc_hash::FxHashSet as HashSet; + +use crate::{ + ast::ERROR_IDENT, + hir::{ + resolution::errors::ResolverError, + type_check::{Source, TypeCheckError}, + }, + hir_def::{ + expr::{HirIdent, ImplKind}, + stmt::HirPattern, + }, + macros_api::{HirExpression, Ident, Path, Pattern}, + node_interner::{DefinitionId, DefinitionKind, ExprId, TraitImplKind}, + Shared, StructType, Type, TypeBindings, +}; + +use super::{Elaborator, ResolverMeta}; + +impl<'context> Elaborator<'context> { + pub(super) fn elaborate_pattern( + &mut self, + pattern: Pattern, + expected_type: Type, + definition_kind: DefinitionKind, + ) -> HirPattern { + self.elaborate_pattern_mut(pattern, expected_type, definition_kind, None) + } + + fn elaborate_pattern_mut( + &mut self, + pattern: Pattern, + expected_type: Type, + definition: DefinitionKind, + mutable: Option, + ) -> HirPattern { + match pattern { + Pattern::Identifier(name) => { + // If this definition is mutable, do not store the rhs because it will + // not always refer to the correct value of the variable + let definition = match (mutable, definition) { + (Some(_), DefinitionKind::Local(_)) => DefinitionKind::Local(None), + (_, other) => other, + }; + let ident = self.add_variable_decl(name, mutable.is_some(), true, definition); + self.interner.push_definition_type(ident.id, expected_type); + HirPattern::Identifier(ident) + } + Pattern::Mutable(pattern, span, _) => { + if let Some(first_mut) = mutable { + self.push_err(ResolverError::UnnecessaryMut { first_mut, second_mut: span }); + } + + let pattern = + self.elaborate_pattern_mut(*pattern, expected_type, definition, Some(span)); + let location = Location::new(span, self.file); + HirPattern::Mutable(Box::new(pattern), location) + } + Pattern::Tuple(fields, span) => { + let field_types = match expected_type { + Type::Tuple(fields) => fields, + Type::Error => Vec::new(), + expected_type => { + let tuple = + Type::Tuple(vecmap(&fields, |_| self.interner.next_type_variable())); + + self.push_err(TypeCheckError::TypeMismatchWithSource { + expected: expected_type, + actual: tuple, + span, + source: Source::Assignment, + }); + Vec::new() + } + }; + + let fields = vecmap(fields.into_iter().enumerate(), |(i, field)| { + let field_type = field_types.get(i).cloned().unwrap_or(Type::Error); + self.elaborate_pattern_mut(field, field_type, definition.clone(), mutable) + }); + let location = Location::new(span, self.file); + HirPattern::Tuple(fields, location) + } + Pattern::Struct(name, fields, span) => self.elaborate_struct_pattern( + name, + fields, + span, + expected_type, + definition, + mutable, + ), + } + } + + fn elaborate_struct_pattern( + &mut self, + name: Path, + fields: Vec<(Ident, Pattern)>, + span: Span, + expected_type: Type, + definition: DefinitionKind, + mutable: Option, + ) -> HirPattern { + let error_identifier = |this: &mut Self| { + // Must create a name here to return a HirPattern::Identifier. Allowing + // shadowing here lets us avoid further errors if we define ERROR_IDENT + // multiple times. + let name = ERROR_IDENT.into(); + let identifier = this.add_variable_decl(name, false, true, definition.clone()); + HirPattern::Identifier(identifier) + }; + + let (struct_type, generics) = match self.lookup_type_or_error(name) { + Some(Type::Struct(struct_type, generics)) => (struct_type, generics), + None => return error_identifier(self), + Some(typ) => { + self.push_err(ResolverError::NonStructUsedInConstructor { typ, span }); + return error_identifier(self); + } + }; + + let actual_type = Type::Struct(struct_type.clone(), generics); + let location = Location::new(span, self.file); + + self.unify(&actual_type, &expected_type, || TypeCheckError::TypeMismatchWithSource { + expected: expected_type.clone(), + actual: actual_type.clone(), + span: location.span, + source: Source::Assignment, + }); + + let typ = struct_type.clone(); + let fields = self.resolve_constructor_pattern_fields( + typ, + fields, + span, + expected_type.clone(), + definition, + mutable, + ); + + HirPattern::Struct(expected_type, fields, location) + } + + /// Resolve all the fields of a struct constructor expression. + /// Ensures all fields are present, none are repeated, and all + /// are part of the struct. + fn resolve_constructor_pattern_fields( + &mut self, + struct_type: Shared, + fields: Vec<(Ident, Pattern)>, + span: Span, + expected_type: Type, + definition: DefinitionKind, + mutable: Option, + ) -> Vec<(Ident, HirPattern)> { + let mut ret = Vec::with_capacity(fields.len()); + let mut seen_fields = HashSet::default(); + let mut unseen_fields = struct_type.borrow().field_names(); + + for (field, pattern) in fields { + let field_type = expected_type.get_field_type(&field.0.contents).unwrap_or(Type::Error); + let resolved = + self.elaborate_pattern_mut(pattern, field_type, definition.clone(), mutable); + + if unseen_fields.contains(&field) { + unseen_fields.remove(&field); + seen_fields.insert(field.clone()); + } else if seen_fields.contains(&field) { + // duplicate field + self.push_err(ResolverError::DuplicateField { field: field.clone() }); + } else { + // field not required by struct + self.push_err(ResolverError::NoSuchField { + field: field.clone(), + struct_definition: struct_type.borrow().name.clone(), + }); + } + + ret.push((field, resolved)); + } + + if !unseen_fields.is_empty() { + self.push_err(ResolverError::MissingFields { + span, + missing_fields: unseen_fields.into_iter().map(|field| field.to_string()).collect(), + struct_definition: struct_type.borrow().name.clone(), + }); + } + + ret + } + + pub(super) fn add_variable_decl( + &mut self, + name: Ident, + mutable: bool, + allow_shadowing: bool, + definition: DefinitionKind, + ) -> HirIdent { + self.add_variable_decl_inner(name, mutable, allow_shadowing, true, definition) + } + + pub fn add_variable_decl_inner( + &mut self, + name: Ident, + mutable: bool, + allow_shadowing: bool, + warn_if_unused: bool, + definition: DefinitionKind, + ) -> HirIdent { + if definition.is_global() { + return self.add_global_variable_decl(name, definition); + } + + let location = Location::new(name.span(), self.file); + let id = + self.interner.push_definition(name.0.contents.clone(), mutable, definition, location); + let ident = HirIdent::non_trait_method(id, location); + let resolver_meta = + ResolverMeta { num_times_used: 0, ident: ident.clone(), warn_if_unused }; + + let scope = self.scopes.get_mut_scope(); + let old_value = scope.add_key_value(name.0.contents.clone(), resolver_meta); + + if !allow_shadowing { + if let Some(old_value) = old_value { + self.push_err(ResolverError::DuplicateDefinition { + name: name.0.contents, + first_span: old_value.ident.location.span, + second_span: location.span, + }); + } + } + + ident + } + + pub fn add_global_variable_decl( + &mut self, + name: Ident, + definition: DefinitionKind, + ) -> HirIdent { + let scope = self.scopes.get_mut_scope(); + + // This check is necessary to maintain the same definition ids in the interner. Currently, each function uses a new resolver that has its own ScopeForest and thus global scope. + // We must first check whether an existing definition ID has been inserted as otherwise there will be multiple definitions for the same global statement. + // This leads to an error in evaluation where the wrong definition ID is selected when evaluating a statement using the global. The check below prevents this error. + let mut global_id = None; + let global = self.interner.get_all_globals(); + for global_info in global { + if global_info.ident == name && global_info.local_id == self.local_module { + global_id = Some(global_info.id); + } + } + + let (ident, resolver_meta) = if let Some(id) = global_id { + let global = self.interner.get_global(id); + let hir_ident = HirIdent::non_trait_method(global.definition_id, global.location); + let ident = hir_ident.clone(); + let resolver_meta = ResolverMeta { num_times_used: 0, ident, warn_if_unused: true }; + (hir_ident, resolver_meta) + } else { + let location = Location::new(name.span(), self.file); + let id = + self.interner.push_definition(name.0.contents.clone(), false, definition, location); + let ident = HirIdent::non_trait_method(id, location); + let resolver_meta = + ResolverMeta { num_times_used: 0, ident: ident.clone(), warn_if_unused: true }; + (ident, resolver_meta) + }; + + let old_global_value = scope.add_key_value(name.0.contents.clone(), resolver_meta); + if let Some(old_global_value) = old_global_value { + self.push_err(ResolverError::DuplicateDefinition { + name: name.0.contents.clone(), + first_span: old_global_value.ident.location.span, + second_span: name.span(), + }); + } + ident + } + + // Checks for a variable having been declared before. + // (Variable declaration and definition cannot be separate in Noir.) + // Once the variable has been found, intern and link `name` to this definition, + // returning (the ident, the IdentId of `name`) + // + // If a variable is not found, then an error is logged and a dummy id + // is returned, for better error reporting UX + pub(super) fn find_variable_or_default(&mut self, name: &Ident) -> (HirIdent, usize) { + self.use_variable(name).unwrap_or_else(|error| { + self.push_err(error); + let id = DefinitionId::dummy_id(); + let location = Location::new(name.span(), self.file); + (HirIdent::non_trait_method(id, location), 0) + }) + } + + /// Lookup and use the specified variable. + /// This will increment its use counter by one and return the variable if found. + /// If the variable is not found, an error is returned. + pub(super) fn use_variable( + &mut self, + name: &Ident, + ) -> Result<(HirIdent, usize), ResolverError> { + // Find the definition for this Ident + let scope_tree = self.scopes.current_scope_tree(); + let variable = scope_tree.find(&name.0.contents); + + let location = Location::new(name.span(), self.file); + if let Some((variable_found, scope)) = variable { + variable_found.num_times_used += 1; + let id = variable_found.ident.id; + Ok((HirIdent::non_trait_method(id, location), scope)) + } else { + Err(ResolverError::VariableNotDeclared { + name: name.0.contents.clone(), + span: name.0.span(), + }) + } + } + + pub(super) fn elaborate_variable(&mut self, variable: Path) -> (ExprId, Type) { + let span = variable.span; + let expr = self.resolve_variable(variable); + let id = self.interner.push_expr(HirExpression::Ident(expr.clone())); + self.interner.push_expr_location(id, span, self.file); + let typ = self.type_check_variable(expr, id); + self.interner.push_expr_type(id, typ.clone()); + (id, typ) + } + + fn resolve_variable(&mut self, path: Path) -> HirIdent { + if let Some((method, constraint, assumed)) = self.resolve_trait_generic_path(&path) { + HirIdent { + location: Location::new(path.span, self.file), + id: self.interner.trait_method_id(method), + impl_kind: ImplKind::TraitMethod(method, constraint, assumed), + } + } else { + // If the Path is being used as an Expression, then it is referring to a global from a separate module + // Otherwise, then it is referring to an Identifier + // This lookup allows support of such statements: let x = foo::bar::SOME_GLOBAL + 10; + // If the expression is a singular indent, we search the resolver's current scope as normal. + let (hir_ident, var_scope_index) = self.get_ident_from_path(path); + + if hir_ident.id != DefinitionId::dummy_id() { + match self.interner.definition(hir_ident.id).kind { + DefinitionKind::Function(id) => { + if let Some(current_item) = self.current_item { + self.interner.add_function_dependency(current_item, id); + } + } + DefinitionKind::Global(global_id) => { + if let Some(current_item) = self.current_item { + self.interner.add_global_dependency(current_item, global_id); + } + } + DefinitionKind::GenericType(_) => { + // Initialize numeric generics to a polymorphic integer type in case + // they're used in expressions. We must do this here since type_check_variable + // does not check definition kinds and otherwise expects parameters to + // already be typed. + if self.interner.definition_type(hir_ident.id) == Type::Error { + let typ = Type::polymorphic_integer_or_field(self.interner); + self.interner.push_definition_type(hir_ident.id, typ); + } + } + DefinitionKind::Local(_) => { + // only local variables can be captured by closures. + self.resolve_local_variable(hir_ident.clone(), var_scope_index); + } + } + } + + hir_ident + } + } + + pub(super) fn type_check_variable(&mut self, ident: HirIdent, expr_id: ExprId) -> Type { + let mut bindings = TypeBindings::new(); + + // Add type bindings from any constraints that were used. + // We need to do this first since otherwise instantiating the type below + // will replace each trait generic with a fresh type variable, rather than + // the type used in the trait constraint (if it exists). See #4088. + if let ImplKind::TraitMethod(_, constraint, _) = &ident.impl_kind { + let the_trait = self.interner.get_trait(constraint.trait_id); + assert_eq!(the_trait.generics.len(), constraint.trait_generics.len()); + + for (param, arg) in the_trait.generics.iter().zip(&constraint.trait_generics) { + // Avoid binding t = t + if !arg.occurs(param.id()) { + bindings.insert(param.id(), (param.clone(), arg.clone())); + } + } + } + + // An identifiers type may be forall-quantified in the case of generic functions. + // E.g. `fn foo(t: T, field: Field) -> T` has type `forall T. fn(T, Field) -> T`. + // We must instantiate identifiers at every call site to replace this T with a new type + // variable to handle generic functions. + let t = self.interner.id_type_substitute_trait_as_type(ident.id); + + // This instantiates a trait's generics as well which need to be set + // when the constraint below is later solved for when the function is + // finished. How to link the two? + let (typ, bindings) = t.instantiate_with_bindings(bindings, self.interner); + + // Push any trait constraints required by this definition to the context + // to be checked later when the type of this variable is further constrained. + if let Some(definition) = self.interner.try_definition(ident.id) { + if let DefinitionKind::Function(function) = definition.kind { + let function = self.interner.function_meta(&function); + + for mut constraint in function.trait_constraints.clone() { + constraint.apply_bindings(&bindings); + self.trait_constraints.push((constraint, expr_id)); + } + } + } + + if let ImplKind::TraitMethod(_, mut constraint, assumed) = ident.impl_kind { + constraint.apply_bindings(&bindings); + if assumed { + let trait_impl = TraitImplKind::Assumed { + object_type: constraint.typ, + trait_generics: constraint.trait_generics, + }; + self.interner.select_impl_for_expression(expr_id, trait_impl); + } else { + // Currently only one impl can be selected per expr_id, so this + // constraint needs to be pushed after any other constraints so + // that monomorphization can resolve this trait method to the correct impl. + self.trait_constraints.push((constraint, expr_id)); + } + } + + self.interner.store_instantiation_bindings(expr_id, bindings); + typ + } + + fn get_ident_from_path(&mut self, path: Path) -> (HirIdent, usize) { + let location = Location::new(path.span(), self.file); + + let error = match path.as_ident().map(|ident| self.use_variable(ident)) { + Some(Ok(found)) => return found, + // Try to look it up as a global, but still issue the first error if we fail + Some(Err(error)) => match self.lookup_global(path) { + Ok(id) => return (HirIdent::non_trait_method(id, location), 0), + Err(_) => error, + }, + None => match self.lookup_global(path) { + Ok(id) => return (HirIdent::non_trait_method(id, location), 0), + Err(error) => error, + }, + }; + self.push_err(error); + let id = DefinitionId::dummy_id(); + (HirIdent::non_trait_method(id, location), 0) + } +} diff --git a/compiler/noirc_frontend/src/elaborator/scope.rs b/compiler/noirc_frontend/src/elaborator/scope.rs new file mode 100644 index 0000000000..cf10dbbc2b --- /dev/null +++ b/compiler/noirc_frontend/src/elaborator/scope.rs @@ -0,0 +1,200 @@ +use noirc_errors::Spanned; +use rustc_hash::FxHashMap as HashMap; + +use crate::ast::ERROR_IDENT; +use crate::hir::comptime::Value; +use crate::hir::def_map::{LocalModuleId, ModuleId}; +use crate::hir::resolution::path_resolver::{PathResolver, StandardPathResolver}; +use crate::hir::resolution::resolver::SELF_TYPE_NAME; +use crate::hir::scope::{Scope as GenericScope, ScopeTree as GenericScopeTree}; +use crate::macros_api::Ident; +use crate::{ + hir::{ + def_map::{ModuleDefId, TryFromModuleDefId}, + resolution::errors::ResolverError, + }, + hir_def::{ + expr::{HirCapturedVar, HirIdent}, + traits::Trait, + }, + macros_api::{Path, StructId}, + node_interner::{DefinitionId, TraitId, TypeAliasId}, + Shared, StructType, +}; +use crate::{Type, TypeAlias}; + +use super::{Elaborator, ResolverMeta}; + +type Scope = GenericScope; +type ScopeTree = GenericScopeTree; + +impl<'context> Elaborator<'context> { + pub(super) fn lookup(&mut self, path: Path) -> Result { + let span = path.span(); + let id = self.resolve_path(path)?; + T::try_from(id).ok_or_else(|| ResolverError::Expected { + expected: T::description(), + got: id.as_str().to_owned(), + span, + }) + } + + pub(super) fn module_id(&self) -> ModuleId { + assert_ne!(self.local_module, LocalModuleId::dummy_id(), "local_module is unset"); + ModuleId { krate: self.crate_id, local_id: self.local_module } + } + + pub(super) fn resolve_path(&mut self, path: Path) -> Result { + let resolver = StandardPathResolver::new(self.module_id()); + let path_resolution = resolver.resolve(self.def_maps, path)?; + + if let Some(error) = path_resolution.error { + self.push_err(error); + } + + Ok(path_resolution.module_def_id) + } + + pub(super) fn get_struct(&self, type_id: StructId) -> Shared { + self.interner.get_struct(type_id) + } + + pub(super) fn get_trait_mut(&mut self, trait_id: TraitId) -> &mut Trait { + self.interner.get_trait_mut(trait_id) + } + + pub(super) fn resolve_local_variable(&mut self, hir_ident: HirIdent, var_scope_index: usize) { + let mut transitive_capture_index: Option = None; + + for lambda_index in 0..self.lambda_stack.len() { + if self.lambda_stack[lambda_index].scope_index > var_scope_index { + // Beware: the same variable may be captured multiple times, so we check + // for its presence before adding the capture below. + let position = self.lambda_stack[lambda_index] + .captures + .iter() + .position(|capture| capture.ident.id == hir_ident.id); + + if position.is_none() { + self.lambda_stack[lambda_index].captures.push(HirCapturedVar { + ident: hir_ident.clone(), + transitive_capture_index, + }); + } + + if lambda_index + 1 < self.lambda_stack.len() { + // There is more than one closure between the current scope and + // the scope of the variable, so this is a propagated capture. + // We need to track the transitive capture index as we go up in + // the closure stack. + transitive_capture_index = Some(position.unwrap_or( + // If this was a fresh capture, we added it to the end of + // the captures vector: + self.lambda_stack[lambda_index].captures.len() - 1, + )); + } + } + } + } + + pub(super) fn lookup_global(&mut self, path: Path) -> Result { + let span = path.span(); + let id = self.resolve_path(path)?; + + if let Some(function) = TryFromModuleDefId::try_from(id) { + return Ok(self.interner.function_definition_id(function)); + } + + if let Some(global) = TryFromModuleDefId::try_from(id) { + let global = self.interner.get_global(global); + return Ok(global.definition_id); + } + + let expected = "global variable".into(); + let got = "local variable".into(); + Err(ResolverError::Expected { span, expected, got }) + } + + pub fn push_scope(&mut self) { + self.scopes.start_scope(); + } + + pub fn pop_scope(&mut self) { + let scope = self.scopes.end_scope(); + self.check_for_unused_variables_in_scope_tree(scope.into()); + } + + pub fn check_for_unused_variables_in_scope_tree(&mut self, scope_decls: ScopeTree) { + let mut unused_vars = Vec::new(); + for scope in scope_decls.0.into_iter() { + Self::check_for_unused_variables_in_local_scope(scope, &mut unused_vars); + } + + for unused_var in unused_vars.iter() { + if let Some(definition_info) = self.interner.try_definition(unused_var.id) { + let name = &definition_info.name; + if name != ERROR_IDENT && !definition_info.is_global() { + let ident = Ident(Spanned::from(unused_var.location.span, name.to_owned())); + self.push_err(ResolverError::UnusedVariable { ident }); + } + } + } + } + + fn check_for_unused_variables_in_local_scope(decl_map: Scope, unused_vars: &mut Vec) { + let unused_variables = decl_map.filter(|(variable_name, metadata)| { + let has_underscore_prefix = variable_name.starts_with('_'); // XXX: This is used for development mode, and will be removed + metadata.warn_if_unused && metadata.num_times_used == 0 && !has_underscore_prefix + }); + unused_vars.extend(unused_variables.map(|(_, meta)| meta.ident.clone())); + } + + /// Lookup a given trait by name/path. + pub fn lookup_trait_or_error(&mut self, path: Path) -> Option<&mut Trait> { + match self.lookup(path) { + Ok(trait_id) => Some(self.get_trait_mut(trait_id)), + Err(error) => { + self.push_err(error); + None + } + } + } + + /// Lookup a given struct type by name. + pub fn lookup_struct_or_error(&mut self, path: Path) -> Option> { + match self.lookup(path) { + Ok(struct_id) => Some(self.get_struct(struct_id)), + Err(error) => { + self.push_err(error); + None + } + } + } + + /// Looks up a given type by name. + /// This will also instantiate any struct types found. + pub(super) fn lookup_type_or_error(&mut self, path: Path) -> Option { + let ident = path.as_ident(); + if ident.map_or(false, |i| i == SELF_TYPE_NAME) { + if let Some(typ) = &self.self_type { + return Some(typ.clone()); + } + } + + match self.lookup(path) { + Ok(struct_id) => { + let struct_type = self.get_struct(struct_id); + let generics = struct_type.borrow().instantiate(self.interner); + Some(Type::Struct(struct_type, generics)) + } + Err(error) => { + self.push_err(error); + None + } + } + } + + pub fn lookup_type_alias(&mut self, path: Path) -> Option> { + self.lookup(path).ok().map(|id| self.interner.get_type_alias(id)) + } +} diff --git a/compiler/noirc_frontend/src/elaborator/statements.rs b/compiler/noirc_frontend/src/elaborator/statements.rs new file mode 100644 index 0000000000..a7a2df4041 --- /dev/null +++ b/compiler/noirc_frontend/src/elaborator/statements.rs @@ -0,0 +1,409 @@ +use noirc_errors::{Location, Span}; + +use crate::{ + ast::{AssignStatement, ConstrainStatement, LValue}, + hir::{ + resolution::errors::ResolverError, + type_check::{Source, TypeCheckError}, + }, + hir_def::{ + expr::HirIdent, + stmt::{ + HirAssignStatement, HirConstrainStatement, HirForStatement, HirLValue, HirLetStatement, + }, + }, + macros_api::{ + ForLoopStatement, ForRange, HirStatement, LetStatement, Statement, StatementKind, + }, + node_interner::{DefinitionId, DefinitionKind, StmtId}, + Type, +}; + +use super::Elaborator; + +impl<'context> Elaborator<'context> { + fn elaborate_statement_value(&mut self, statement: Statement) -> (HirStatement, Type) { + match statement.kind { + StatementKind::Let(let_stmt) => self.elaborate_let(let_stmt), + StatementKind::Constrain(constrain) => self.elaborate_constrain(constrain), + StatementKind::Assign(assign) => self.elaborate_assign(assign), + StatementKind::For(for_stmt) => self.elaborate_for(for_stmt), + StatementKind::Break => self.elaborate_jump(true, statement.span), + StatementKind::Continue => self.elaborate_jump(false, statement.span), + StatementKind::Comptime(statement) => self.elaborate_comptime(*statement), + StatementKind::Expression(expr) => { + let (expr, typ) = self.elaborate_expression(expr); + (HirStatement::Expression(expr), typ) + } + StatementKind::Semi(expr) => { + let (expr, _typ) = self.elaborate_expression(expr); + (HirStatement::Semi(expr), Type::Unit) + } + StatementKind::Error => (HirStatement::Error, Type::Error), + } + } + + pub(super) fn elaborate_statement(&mut self, statement: Statement) -> (StmtId, Type) { + let span = statement.span; + let (hir_statement, typ) = self.elaborate_statement_value(statement); + let id = self.interner.push_stmt(hir_statement); + self.interner.push_stmt_location(id, span, self.file); + (id, typ) + } + + pub(super) fn elaborate_let(&mut self, let_stmt: LetStatement) -> (HirStatement, Type) { + let expr_span = let_stmt.expression.span; + let (expression, expr_type) = self.elaborate_expression(let_stmt.expression); + let definition = DefinitionKind::Local(Some(expression)); + let annotated_type = self.resolve_type(let_stmt.r#type); + + // First check if the LHS is unspecified + // If so, then we give it the same type as the expression + let r#type = if annotated_type != Type::Error { + // Now check if LHS is the same type as the RHS + // Importantly, we do not coerce any types implicitly + self.unify_with_coercions(&expr_type, &annotated_type, expression, || { + TypeCheckError::TypeMismatch { + expected_typ: annotated_type.to_string(), + expr_typ: expr_type.to_string(), + expr_span, + } + }); + if annotated_type.is_unsigned() { + self.lint_overflowing_uint(&expression, &annotated_type); + } + annotated_type + } else { + expr_type + }; + + let let_ = HirLetStatement { + pattern: self.elaborate_pattern(let_stmt.pattern, r#type.clone(), definition), + r#type, + expression, + attributes: let_stmt.attributes, + comptime: let_stmt.comptime, + }; + (HirStatement::Let(let_), Type::Unit) + } + + pub(super) fn elaborate_constrain(&mut self, stmt: ConstrainStatement) -> (HirStatement, Type) { + let expr_span = stmt.0.span; + let (expr_id, expr_type) = self.elaborate_expression(stmt.0); + + // Must type check the assertion message expression so that we instantiate bindings + let msg = stmt.1.map(|assert_msg_expr| self.elaborate_expression(assert_msg_expr).0); + + self.unify(&expr_type, &Type::Bool, || TypeCheckError::TypeMismatch { + expr_typ: expr_type.to_string(), + expected_typ: Type::Bool.to_string(), + expr_span, + }); + + (HirStatement::Constrain(HirConstrainStatement(expr_id, self.file, msg)), Type::Unit) + } + + pub(super) fn elaborate_assign(&mut self, assign: AssignStatement) -> (HirStatement, Type) { + let span = assign.expression.span; + let (expression, expr_type) = self.elaborate_expression(assign.expression); + let (lvalue, lvalue_type, mutable) = self.elaborate_lvalue(assign.lvalue, span); + + if !mutable { + let (name, span) = self.get_lvalue_name_and_span(&lvalue); + self.push_err(TypeCheckError::VariableMustBeMutable { name, span }); + } + + self.unify_with_coercions(&expr_type, &lvalue_type, expression, || { + TypeCheckError::TypeMismatchWithSource { + actual: expr_type.clone(), + expected: lvalue_type.clone(), + span, + source: Source::Assignment, + } + }); + + let stmt = HirAssignStatement { lvalue, expression }; + (HirStatement::Assign(stmt), Type::Unit) + } + + pub(super) fn elaborate_for(&mut self, for_loop: ForLoopStatement) -> (HirStatement, Type) { + let (start, end) = match for_loop.range { + ForRange::Range(start, end) => (start, end), + ForRange::Array(_) => { + let for_stmt = + for_loop.range.into_for(for_loop.identifier, for_loop.block, for_loop.span); + + return self.elaborate_statement_value(for_stmt); + } + }; + + let start_span = start.span; + let end_span = end.span; + + let (start_range, start_range_type) = self.elaborate_expression(start); + let (end_range, end_range_type) = self.elaborate_expression(end); + let (identifier, block) = (for_loop.identifier, for_loop.block); + + self.nested_loops += 1; + self.push_scope(); + + // TODO: For loop variables are currently mutable by default since we haven't + // yet implemented syntax for them to be optionally mutable. + let kind = DefinitionKind::Local(None); + let identifier = self.add_variable_decl(identifier, false, true, kind); + + // Check that start range and end range have the same types + let range_span = start_span.merge(end_span); + self.unify(&start_range_type, &end_range_type, || TypeCheckError::TypeMismatch { + expected_typ: start_range_type.to_string(), + expr_typ: end_range_type.to_string(), + expr_span: range_span, + }); + + let expected_type = self.polymorphic_integer(); + + self.unify(&start_range_type, &expected_type, || TypeCheckError::TypeCannotBeUsed { + typ: start_range_type.clone(), + place: "for loop", + span: range_span, + }); + + self.interner.push_definition_type(identifier.id, start_range_type); + + let (block, _block_type) = self.elaborate_expression(block); + + self.pop_scope(); + self.nested_loops -= 1; + + let statement = + HirStatement::For(HirForStatement { start_range, end_range, block, identifier }); + + (statement, Type::Unit) + } + + fn elaborate_jump(&mut self, is_break: bool, span: noirc_errors::Span) -> (HirStatement, Type) { + if !self.in_unconstrained_fn { + self.push_err(ResolverError::JumpInConstrainedFn { is_break, span }); + } + if self.nested_loops == 0 { + self.push_err(ResolverError::JumpOutsideLoop { is_break, span }); + } + + let expr = if is_break { HirStatement::Break } else { HirStatement::Continue }; + (expr, self.interner.next_type_variable()) + } + + fn get_lvalue_name_and_span(&self, lvalue: &HirLValue) -> (String, Span) { + match lvalue { + HirLValue::Ident(name, _) => { + let span = name.location.span; + + if let Some(definition) = self.interner.try_definition(name.id) { + (definition.name.clone(), span) + } else { + ("(undeclared variable)".into(), span) + } + } + HirLValue::MemberAccess { object, .. } => self.get_lvalue_name_and_span(object), + HirLValue::Index { array, .. } => self.get_lvalue_name_and_span(array), + HirLValue::Dereference { lvalue, .. } => self.get_lvalue_name_and_span(lvalue), + } + } + + fn elaborate_lvalue(&mut self, lvalue: LValue, assign_span: Span) -> (HirLValue, Type, bool) { + match lvalue { + LValue::Ident(ident) => { + let mut mutable = true; + let (ident, scope_index) = self.find_variable_or_default(&ident); + self.resolve_local_variable(ident.clone(), scope_index); + + let typ = if ident.id == DefinitionId::dummy_id() { + Type::Error + } else { + if let Some(definition) = self.interner.try_definition(ident.id) { + mutable = definition.mutable; + } + + let typ = self.interner.definition_type(ident.id).instantiate(self.interner).0; + typ.follow_bindings() + }; + + (HirLValue::Ident(ident.clone(), typ.clone()), typ, mutable) + } + LValue::MemberAccess { object, field_name, span } => { + let (object, lhs_type, mut mutable) = self.elaborate_lvalue(*object, assign_span); + let mut object = Box::new(object); + let field_name = field_name.clone(); + + let object_ref = &mut object; + let mutable_ref = &mut mutable; + let location = Location::new(span, self.file); + + let dereference_lhs = move |_: &mut Self, _, element_type| { + // We must create a temporary value first to move out of object_ref before + // we eventually reassign to it. + let id = DefinitionId::dummy_id(); + let ident = HirIdent::non_trait_method(id, location); + let tmp_value = HirLValue::Ident(ident, Type::Error); + + let lvalue = std::mem::replace(object_ref, Box::new(tmp_value)); + *object_ref = + Box::new(HirLValue::Dereference { lvalue, element_type, location }); + *mutable_ref = true; + }; + + let name = &field_name.0.contents; + let (object_type, field_index) = self + .check_field_access(&lhs_type, name, field_name.span(), Some(dereference_lhs)) + .unwrap_or((Type::Error, 0)); + + let field_index = Some(field_index); + let typ = object_type.clone(); + let lvalue = + HirLValue::MemberAccess { object, field_name, field_index, typ, location }; + (lvalue, object_type, mutable) + } + LValue::Index { array, index, span } => { + let expr_span = index.span; + let (index, index_type) = self.elaborate_expression(index); + let location = Location::new(span, self.file); + + let expected = self.polymorphic_integer_or_field(); + self.unify(&index_type, &expected, || TypeCheckError::TypeMismatch { + expected_typ: "an integer".to_owned(), + expr_typ: index_type.to_string(), + expr_span, + }); + + let (mut lvalue, mut lvalue_type, mut mutable) = + self.elaborate_lvalue(*array, assign_span); + + // Before we check that the lvalue is an array, try to dereference it as many times + // as needed to unwrap any &mut wrappers. + while let Type::MutableReference(element) = lvalue_type.follow_bindings() { + let element_type = element.as_ref().clone(); + lvalue = + HirLValue::Dereference { lvalue: Box::new(lvalue), element_type, location }; + lvalue_type = *element; + // We know this value to be mutable now since we found an `&mut` + mutable = true; + } + + let typ = match lvalue_type.follow_bindings() { + Type::Array(_, elem_type) => *elem_type, + Type::Slice(elem_type) => *elem_type, + Type::Error => Type::Error, + Type::String(_) => { + let (_lvalue_name, lvalue_span) = self.get_lvalue_name_and_span(&lvalue); + self.push_err(TypeCheckError::StringIndexAssign { span: lvalue_span }); + Type::Error + } + other => { + // TODO: Need a better span here + self.push_err(TypeCheckError::TypeMismatch { + expected_typ: "array".to_string(), + expr_typ: other.to_string(), + expr_span: assign_span, + }); + Type::Error + } + }; + + let array = Box::new(lvalue); + let array_type = typ.clone(); + (HirLValue::Index { array, index, typ, location }, array_type, mutable) + } + LValue::Dereference(lvalue, span) => { + let (lvalue, reference_type, _) = self.elaborate_lvalue(*lvalue, assign_span); + let lvalue = Box::new(lvalue); + let location = Location::new(span, self.file); + + let element_type = Type::type_variable(self.interner.next_type_variable_id()); + let expected_type = Type::MutableReference(Box::new(element_type.clone())); + + self.unify(&reference_type, &expected_type, || TypeCheckError::TypeMismatch { + expected_typ: expected_type.to_string(), + expr_typ: reference_type.to_string(), + expr_span: assign_span, + }); + + // Dereferences are always mutable since we already type checked against a &mut T + let typ = element_type.clone(); + let lvalue = HirLValue::Dereference { lvalue, element_type, location }; + (lvalue, typ, true) + } + } + } + + /// Type checks a field access, adding dereference operators as necessary + pub(super) fn check_field_access( + &mut self, + lhs_type: &Type, + field_name: &str, + span: Span, + dereference_lhs: Option, + ) -> Option<(Type, usize)> { + let lhs_type = lhs_type.follow_bindings(); + + match &lhs_type { + Type::Struct(s, args) => { + let s = s.borrow(); + if let Some((field, index)) = s.get_field(field_name, args) { + return Some((field, index)); + } + } + Type::Tuple(elements) => { + if let Ok(index) = field_name.parse::() { + let length = elements.len(); + if index < length { + return Some((elements[index].clone(), index)); + } else { + self.push_err(TypeCheckError::TupleIndexOutOfBounds { + index, + lhs_type, + length, + span, + }); + return None; + } + } + } + // If the lhs is a mutable reference we automatically transform + // lhs.field into (*lhs).field + Type::MutableReference(element) => { + if let Some(mut dereference_lhs) = dereference_lhs { + dereference_lhs(self, lhs_type.clone(), element.as_ref().clone()); + return self.check_field_access( + element, + field_name, + span, + Some(dereference_lhs), + ); + } else { + let (element, index) = + self.check_field_access(element, field_name, span, dereference_lhs)?; + return Some((Type::MutableReference(Box::new(element)), index)); + } + } + _ => (), + } + + // If we get here the type has no field named 'access.rhs'. + // Now we specialize the error message based on whether we know the object type in question yet. + if let Type::TypeVariable(..) = &lhs_type { + self.push_err(TypeCheckError::TypeAnnotationsNeeded { span }); + } else if lhs_type != Type::Error { + self.push_err(TypeCheckError::AccessUnknownMember { + lhs_type, + field_name: field_name.to_string(), + span, + }); + } + + None + } + + pub(super) fn elaborate_comptime(&self, _statement: Statement) -> (HirStatement, Type) { + todo!("Comptime scanning") + } +} diff --git a/compiler/noirc_frontend/src/elaborator/types.rs b/compiler/noirc_frontend/src/elaborator/types.rs new file mode 100644 index 0000000000..4c8364b6dd --- /dev/null +++ b/compiler/noirc_frontend/src/elaborator/types.rs @@ -0,0 +1,1438 @@ +use std::rc::Rc; + +use iter_extended::vecmap; +use noirc_errors::{Location, Span}; + +use crate::{ + ast::{BinaryOpKind, IntegerBitSize, UnresolvedTraitConstraint, UnresolvedTypeExpression}, + hir::{ + def_map::ModuleDefId, + resolution::{ + errors::ResolverError, + import::PathResolution, + resolver::{verify_mutable_reference, SELF_TYPE_NAME}, + }, + type_check::{Source, TypeCheckError}, + }, + hir_def::{ + expr::{ + HirBinaryOp, HirCallExpression, HirIdent, HirMemberAccess, HirMethodReference, + HirPrefixExpression, + }, + function::FuncMeta, + traits::{Trait, TraitConstraint}, + }, + macros_api::{ + HirExpression, HirLiteral, HirStatement, Path, PathKind, SecondaryAttribute, Signedness, + UnaryOp, UnresolvedType, UnresolvedTypeData, + }, + node_interner::{DefinitionKind, ExprId, GlobalId, TraitId, TraitImplKind, TraitMethodId}, + Generics, Shared, StructType, Type, TypeAlias, TypeBinding, TypeVariable, TypeVariableKind, +}; + +use super::Elaborator; + +impl<'context> Elaborator<'context> { + /// Translates an UnresolvedType to a Type + pub(super) fn resolve_type(&mut self, typ: UnresolvedType) -> Type { + let span = typ.span; + let resolved_type = self.resolve_type_inner(typ, &mut vec![]); + if resolved_type.is_nested_slice() { + self.push_err(ResolverError::NestedSlices { span: span.unwrap() }); + } + + resolved_type + } + + /// Translates an UnresolvedType into a Type and appends any + /// freshly created TypeVariables created to new_variables. + pub fn resolve_type_inner( + &mut self, + typ: UnresolvedType, + new_variables: &mut Generics, + ) -> Type { + use crate::ast::UnresolvedTypeData::*; + + let resolved_type = match typ.typ { + FieldElement => Type::FieldElement, + Array(size, elem) => { + let elem = Box::new(self.resolve_type_inner(*elem, new_variables)); + let size = self.resolve_array_size(Some(size), new_variables); + Type::Array(Box::new(size), elem) + } + Slice(elem) => { + let elem = Box::new(self.resolve_type_inner(*elem, new_variables)); + Type::Slice(elem) + } + Expression(expr) => self.convert_expression_type(expr), + Integer(sign, bits) => Type::Integer(sign, bits), + Bool => Type::Bool, + String(size) => { + let resolved_size = self.resolve_array_size(size, new_variables); + Type::String(Box::new(resolved_size)) + } + FormatString(size, fields) => { + let resolved_size = self.convert_expression_type(size); + let fields = self.resolve_type_inner(*fields, new_variables); + Type::FmtString(Box::new(resolved_size), Box::new(fields)) + } + Code => Type::Code, + Unit => Type::Unit, + Unspecified => Type::Error, + Error => Type::Error, + Named(path, args, _) => self.resolve_named_type(path, args, new_variables), + TraitAsType(path, args) => self.resolve_trait_as_type(path, args, new_variables), + + Tuple(fields) => { + Type::Tuple(vecmap(fields, |field| self.resolve_type_inner(field, new_variables))) + } + Function(args, ret, env) => { + let args = vecmap(args, |arg| self.resolve_type_inner(arg, new_variables)); + let ret = Box::new(self.resolve_type_inner(*ret, new_variables)); + + // expect() here is valid, because the only places we don't have a span are omitted types + // e.g. a function without return type implicitly has a spanless UnresolvedType::Unit return type + // To get an invalid env type, the user must explicitly specify the type, which will have a span + let env_span = + env.span.expect("Unexpected missing span for closure environment type"); + + let env = Box::new(self.resolve_type_inner(*env, new_variables)); + + match *env { + Type::Unit | Type::Tuple(_) | Type::NamedGeneric(_, _) => { + Type::Function(args, ret, env) + } + _ => { + self.push_err(ResolverError::InvalidClosureEnvironment { + typ: *env, + span: env_span, + }); + Type::Error + } + } + } + MutableReference(element) => { + Type::MutableReference(Box::new(self.resolve_type_inner(*element, new_variables))) + } + Parenthesized(typ) => self.resolve_type_inner(*typ, new_variables), + }; + + if let Type::Struct(_, _) = resolved_type { + if let Some(unresolved_span) = typ.span { + // Record the location of the type reference + self.interner.push_type_ref_location( + resolved_type.clone(), + Location::new(unresolved_span, self.file), + ); + } + } + resolved_type + } + + pub fn find_generic(&self, target_name: &str) -> Option<&(Rc, TypeVariable, Span)> { + self.generics.iter().find(|(name, _, _)| name.as_ref() == target_name) + } + + fn resolve_named_type( + &mut self, + path: Path, + args: Vec, + new_variables: &mut Generics, + ) -> Type { + if args.is_empty() { + if let Some(typ) = self.lookup_generic_or_global_type(&path) { + return typ; + } + } + + // Check if the path is a type variable first. We currently disallow generics on type + // variables since we do not support higher-kinded types. + if path.segments.len() == 1 { + let name = &path.last_segment().0.contents; + + if name == SELF_TYPE_NAME { + if let Some(self_type) = self.self_type.clone() { + if !args.is_empty() { + self.push_err(ResolverError::GenericsOnSelfType { span: path.span() }); + } + return self_type; + } + } + } + + let span = path.span(); + let mut args = vecmap(args, |arg| self.resolve_type_inner(arg, new_variables)); + + if let Some(type_alias) = self.lookup_type_alias(path.clone()) { + let type_alias = type_alias.borrow(); + let expected_generic_count = type_alias.generics.len(); + let type_alias_string = type_alias.to_string(); + let id = type_alias.id; + + self.verify_generics_count(expected_generic_count, &mut args, span, || { + type_alias_string + }); + + if let Some(item) = self.current_item { + self.interner.add_type_alias_dependency(item, id); + } + + // Collecting Type Alias references [Location]s to be used by LSP in order + // to resolve the definition of the type alias + self.interner.add_type_alias_ref(id, Location::new(span, self.file)); + + // Because there is no ordering to when type aliases (and other globals) are resolved, + // it is possible for one to refer to an Error type and issue no error if it is set + // equal to another type alias. Fixing this fully requires an analysis to create a DFG + // of definition ordering, but for now we have an explicit check here so that we at + // least issue an error that the type was not found instead of silently passing. + let alias = self.interner.get_type_alias(id); + return Type::Alias(alias, args); + } + + match self.lookup_struct_or_error(path) { + Some(struct_type) => { + if self.resolving_ids.contains(&struct_type.borrow().id) { + self.push_err(ResolverError::SelfReferentialStruct { + span: struct_type.borrow().name.span(), + }); + + return Type::Error; + } + + let expected_generic_count = struct_type.borrow().generics.len(); + if !self.in_contract + && self + .interner + .struct_attributes(&struct_type.borrow().id) + .iter() + .any(|attr| matches!(attr, SecondaryAttribute::Abi(_))) + { + self.push_err(ResolverError::AbiAttributeOutsideContract { + span: struct_type.borrow().name.span(), + }); + } + self.verify_generics_count(expected_generic_count, &mut args, span, || { + struct_type.borrow().to_string() + }); + + if let Some(current_item) = self.current_item { + let dependency_id = struct_type.borrow().id; + self.interner.add_type_dependency(current_item, dependency_id); + } + + Type::Struct(struct_type, args) + } + None => Type::Error, + } + } + + fn resolve_trait_as_type( + &mut self, + path: Path, + args: Vec, + new_variables: &mut Generics, + ) -> Type { + let args = vecmap(args, |arg| self.resolve_type_inner(arg, new_variables)); + + if let Some(t) = self.lookup_trait_or_error(path) { + Type::TraitAsType(t.id, Rc::new(t.name.to_string()), args) + } else { + Type::Error + } + } + + fn verify_generics_count( + &mut self, + expected_count: usize, + args: &mut Vec, + span: Span, + type_name: impl FnOnce() -> String, + ) { + if args.len() != expected_count { + self.push_err(ResolverError::IncorrectGenericCount { + span, + item_name: type_name(), + actual: args.len(), + expected: expected_count, + }); + + // Fix the generic count so we can continue typechecking + args.resize_with(expected_count, || Type::Error); + } + } + + pub fn lookup_generic_or_global_type(&mut self, path: &Path) -> Option { + if path.segments.len() == 1 { + let name = &path.last_segment().0.contents; + if let Some((name, var, _)) = self.find_generic(name) { + return Some(Type::NamedGeneric(var.clone(), name.clone())); + } + } + + // If we cannot find a local generic of the same name, try to look up a global + match self.resolve_path(path.clone()) { + Ok(ModuleDefId::GlobalId(id)) => { + if let Some(current_item) = self.current_item { + self.interner.add_global_dependency(current_item, id); + } + + Some(Type::Constant(self.eval_global_as_array_length(id, path))) + } + _ => None, + } + } + + fn resolve_array_size( + &mut self, + length: Option, + new_variables: &mut Generics, + ) -> Type { + match length { + None => { + let id = self.interner.next_type_variable_id(); + let typevar = TypeVariable::unbound(id); + new_variables.push(typevar.clone()); + + // 'Named'Generic is a bit of a misnomer here, we want a type variable that + // wont be bound over but this one has no name since we do not currently + // require users to explicitly be generic over array lengths. + Type::NamedGeneric(typevar, Rc::new("".into())) + } + Some(length) => self.convert_expression_type(length), + } + } + + pub(super) fn convert_expression_type(&mut self, length: UnresolvedTypeExpression) -> Type { + match length { + UnresolvedTypeExpression::Variable(path) => { + self.lookup_generic_or_global_type(&path).unwrap_or_else(|| { + self.push_err(ResolverError::NoSuchNumericTypeVariable { path }); + Type::Constant(0) + }) + } + UnresolvedTypeExpression::Constant(int, _) => Type::Constant(int), + UnresolvedTypeExpression::BinaryOperation(lhs, op, rhs, _) => { + let (lhs_span, rhs_span) = (lhs.span(), rhs.span()); + let lhs = self.convert_expression_type(*lhs); + let rhs = self.convert_expression_type(*rhs); + + match (lhs, rhs) { + (Type::Constant(lhs), Type::Constant(rhs)) => { + Type::Constant(op.function()(lhs, rhs)) + } + (lhs, _) => { + let span = + if !matches!(lhs, Type::Constant(_)) { lhs_span } else { rhs_span }; + self.push_err(ResolverError::InvalidArrayLengthExpr { span }); + Type::Constant(0) + } + } + } + } + } + + // this resolves Self::some_static_method, inside an impl block (where we don't have a concrete self_type) + // + // Returns the trait method, trait constraint, and whether the impl is assumed to exist by a where clause or not + // E.g. `t.method()` with `where T: Foo` in scope will return `(Foo::method, T, vec![Bar])` + fn resolve_trait_static_method_by_self( + &mut self, + path: &Path, + ) -> Option<(TraitMethodId, TraitConstraint, bool)> { + let trait_id = self.trait_id?; + + if path.kind == PathKind::Plain && path.segments.len() == 2 { + let name = &path.segments[0].0.contents; + let method = &path.segments[1]; + + if name == SELF_TYPE_NAME { + let the_trait = self.interner.get_trait(trait_id); + let method = the_trait.find_method(method.0.contents.as_str())?; + + let constraint = TraitConstraint { + typ: self.self_type.clone()?, + trait_generics: Type::from_generics(&the_trait.generics), + trait_id, + }; + return Some((method, constraint, false)); + } + } + None + } + + // this resolves TraitName::some_static_method + // + // Returns the trait method, trait constraint, and whether the impl is assumed to exist by a where clause or not + // E.g. `t.method()` with `where T: Foo` in scope will return `(Foo::method, T, vec![Bar])` + fn resolve_trait_static_method( + &mut self, + path: &Path, + ) -> Option<(TraitMethodId, TraitConstraint, bool)> { + if path.kind == PathKind::Plain && path.segments.len() == 2 { + let method = &path.segments[1]; + + let mut trait_path = path.clone(); + trait_path.pop(); + let trait_id = self.lookup(trait_path).ok()?; + let the_trait = self.interner.get_trait(trait_id); + + let method = the_trait.find_method(method.0.contents.as_str())?; + let constraint = TraitConstraint { + typ: Type::TypeVariable( + the_trait.self_type_typevar.clone(), + TypeVariableKind::Normal, + ), + trait_generics: Type::from_generics(&the_trait.generics), + trait_id, + }; + return Some((method, constraint, false)); + } + None + } + + // This resolves a static trait method T::trait_method by iterating over the where clause + // + // Returns the trait method, trait constraint, and whether the impl is assumed from a where + // clause. This is always true since this helper searches where clauses for a generic constraint. + // E.g. `t.method()` with `where T: Foo` in scope will return `(Foo::method, T, vec![Bar])` + fn resolve_trait_method_by_named_generic( + &mut self, + path: &Path, + ) -> Option<(TraitMethodId, TraitConstraint, bool)> { + if path.segments.len() != 2 { + return None; + } + + for UnresolvedTraitConstraint { typ, trait_bound } in self.trait_bounds.clone() { + if let UnresolvedTypeData::Named(constraint_path, _, _) = &typ.typ { + // if `path` is `T::method_name`, we're looking for constraint of the form `T: SomeTrait` + if constraint_path.segments.len() == 1 + && path.segments[0] != constraint_path.last_segment() + { + continue; + } + + if let Ok(ModuleDefId::TraitId(trait_id)) = + self.resolve_path(trait_bound.trait_path.clone()) + { + let the_trait = self.interner.get_trait(trait_id); + if let Some(method) = + the_trait.find_method(path.segments.last().unwrap().0.contents.as_str()) + { + let constraint = TraitConstraint { + trait_id, + typ: self.resolve_type(typ.clone()), + trait_generics: vecmap(trait_bound.trait_generics, |typ| { + self.resolve_type(typ) + }), + }; + return Some((method, constraint, true)); + } + } + } + } + None + } + + // Try to resolve the given trait method path. + // + // Returns the trait method, trait constraint, and whether the impl is assumed to exist by a where clause or not + // E.g. `t.method()` with `where T: Foo` in scope will return `(Foo::method, T, vec![Bar])` + pub(super) fn resolve_trait_generic_path( + &mut self, + path: &Path, + ) -> Option<(TraitMethodId, TraitConstraint, bool)> { + self.resolve_trait_static_method_by_self(path) + .or_else(|| self.resolve_trait_static_method(path)) + .or_else(|| self.resolve_trait_method_by_named_generic(path)) + } + + fn eval_global_as_array_length(&mut self, global: GlobalId, path: &Path) -> u64 { + let Some(stmt) = self.interner.get_global_let_statement(global) else { + let path = path.clone(); + self.push_err(ResolverError::NoSuchNumericTypeVariable { path }); + return 0; + }; + + let length = stmt.expression; + let span = self.interner.expr_span(&length); + let result = self.try_eval_array_length_id(length, span); + + match result.map(|length| length.try_into()) { + Ok(Ok(length_value)) => return length_value, + Ok(Err(_cast_err)) => self.push_err(ResolverError::IntegerTooLarge { span }), + Err(Some(error)) => self.push_err(error), + Err(None) => (), + } + 0 + } + + fn try_eval_array_length_id( + &self, + rhs: ExprId, + span: Span, + ) -> Result> { + // Arbitrary amount of recursive calls to try before giving up + let fuel = 100; + self.try_eval_array_length_id_with_fuel(rhs, span, fuel) + } + + fn try_eval_array_length_id_with_fuel( + &self, + rhs: ExprId, + span: Span, + fuel: u32, + ) -> Result> { + if fuel == 0 { + // If we reach here, it is likely from evaluating cyclic globals. We expect an error to + // be issued for them after name resolution so issue no error now. + return Err(None); + } + + match self.interner.expression(&rhs) { + HirExpression::Literal(HirLiteral::Integer(int, false)) => { + int.try_into_u128().ok_or(Some(ResolverError::IntegerTooLarge { span })) + } + HirExpression::Ident(ident) => { + let definition = self.interner.definition(ident.id); + match definition.kind { + DefinitionKind::Global(global_id) => { + let let_statement = self.interner.get_global_let_statement(global_id); + if let Some(let_statement) = let_statement { + let expression = let_statement.expression; + self.try_eval_array_length_id_with_fuel(expression, span, fuel - 1) + } else { + Err(Some(ResolverError::InvalidArrayLengthExpr { span })) + } + } + _ => Err(Some(ResolverError::InvalidArrayLengthExpr { span })), + } + } + HirExpression::Infix(infix) => { + let lhs = self.try_eval_array_length_id_with_fuel(infix.lhs, span, fuel - 1)?; + let rhs = self.try_eval_array_length_id_with_fuel(infix.rhs, span, fuel - 1)?; + + match infix.operator.kind { + BinaryOpKind::Add => Ok(lhs + rhs), + BinaryOpKind::Subtract => Ok(lhs - rhs), + BinaryOpKind::Multiply => Ok(lhs * rhs), + BinaryOpKind::Divide => Ok(lhs / rhs), + BinaryOpKind::Equal => Ok((lhs == rhs) as u128), + BinaryOpKind::NotEqual => Ok((lhs != rhs) as u128), + BinaryOpKind::Less => Ok((lhs < rhs) as u128), + BinaryOpKind::LessEqual => Ok((lhs <= rhs) as u128), + BinaryOpKind::Greater => Ok((lhs > rhs) as u128), + BinaryOpKind::GreaterEqual => Ok((lhs >= rhs) as u128), + BinaryOpKind::And => Ok(lhs & rhs), + BinaryOpKind::Or => Ok(lhs | rhs), + BinaryOpKind::Xor => Ok(lhs ^ rhs), + BinaryOpKind::ShiftRight => Ok(lhs >> rhs), + BinaryOpKind::ShiftLeft => Ok(lhs << rhs), + BinaryOpKind::Modulo => Ok(lhs % rhs), + } + } + _other => Err(Some(ResolverError::InvalidArrayLengthExpr { span })), + } + } + + /// Check if an assignment is overflowing with respect to `annotated_type` + /// in a declaration statement where `annotated_type` is an unsigned integer + pub(super) fn lint_overflowing_uint(&mut self, rhs_expr: &ExprId, annotated_type: &Type) { + let expr = self.interner.expression(rhs_expr); + let span = self.interner.expr_span(rhs_expr); + match expr { + HirExpression::Literal(HirLiteral::Integer(value, false)) => { + let v = value.to_u128(); + if let Type::Integer(_, bit_count) = annotated_type { + let bit_count: u32 = (*bit_count).into(); + let max = 1 << bit_count; + if v >= max { + self.push_err(TypeCheckError::OverflowingAssignment { + expr: value, + ty: annotated_type.clone(), + range: format!("0..={}", max - 1), + span, + }); + }; + }; + } + HirExpression::Prefix(expr) => { + self.lint_overflowing_uint(&expr.rhs, annotated_type); + if matches!(expr.operator, UnaryOp::Minus) { + self.push_err(TypeCheckError::InvalidUnaryOp { + kind: "annotated_type".to_string(), + span, + }); + } + } + HirExpression::Infix(expr) => { + self.lint_overflowing_uint(&expr.lhs, annotated_type); + self.lint_overflowing_uint(&expr.rhs, annotated_type); + } + _ => {} + } + } + + pub(super) fn unify( + &mut self, + actual: &Type, + expected: &Type, + make_error: impl FnOnce() -> TypeCheckError, + ) { + let mut errors = Vec::new(); + actual.unify(expected, &mut errors, make_error); + self.errors.extend(errors.into_iter().map(|error| (error.into(), self.file))); + } + + /// Wrapper of Type::unify_with_coercions using self.errors + pub(super) fn unify_with_coercions( + &mut self, + actual: &Type, + expected: &Type, + expression: ExprId, + make_error: impl FnOnce() -> TypeCheckError, + ) { + let mut errors = Vec::new(); + actual.unify_with_coercions(expected, expression, self.interner, &mut errors, make_error); + self.errors.extend(errors.into_iter().map(|error| (error.into(), self.file))); + } + + /// Return a fresh integer or field type variable and log it + /// in self.type_variables to default it later. + pub(super) fn polymorphic_integer_or_field(&mut self) -> Type { + let typ = Type::polymorphic_integer_or_field(self.interner); + self.type_variables.push(typ.clone()); + typ + } + + /// Return a fresh integer type variable and log it + /// in self.type_variables to default it later. + pub(super) fn polymorphic_integer(&mut self) -> Type { + let typ = Type::polymorphic_integer(self.interner); + self.type_variables.push(typ.clone()); + typ + } + + /// Translates a (possibly Unspecified) UnresolvedType to a Type. + /// Any UnresolvedType::Unspecified encountered are replaced with fresh type variables. + pub(super) fn resolve_inferred_type(&mut self, typ: UnresolvedType) -> Type { + match &typ.typ { + UnresolvedTypeData::Unspecified => self.interner.next_type_variable(), + _ => self.resolve_type_inner(typ, &mut vec![]), + } + } + + pub(super) fn type_check_prefix_operand( + &mut self, + op: &crate::ast::UnaryOp, + rhs_type: &Type, + span: Span, + ) -> Type { + let mut unify = |this: &mut Self, expected| { + this.unify(rhs_type, &expected, || TypeCheckError::TypeMismatch { + expr_typ: rhs_type.to_string(), + expected_typ: expected.to_string(), + expr_span: span, + }); + expected + }; + + match op { + crate::ast::UnaryOp::Minus => { + if rhs_type.is_unsigned() { + self.push_err(TypeCheckError::InvalidUnaryOp { + kind: rhs_type.to_string(), + span, + }); + } + let expected = self.polymorphic_integer_or_field(); + self.unify(rhs_type, &expected, || TypeCheckError::InvalidUnaryOp { + kind: rhs_type.to_string(), + span, + }); + expected + } + crate::ast::UnaryOp::Not => { + let rhs_type = rhs_type.follow_bindings(); + + // `!` can work on booleans or integers + if matches!(rhs_type, Type::Integer(..)) { + return rhs_type; + } + + unify(self, Type::Bool) + } + crate::ast::UnaryOp::MutableReference => { + Type::MutableReference(Box::new(rhs_type.follow_bindings())) + } + crate::ast::UnaryOp::Dereference { implicitly_added: _ } => { + let element_type = self.interner.next_type_variable(); + unify(self, Type::MutableReference(Box::new(element_type.clone()))); + element_type + } + } + } + + /// Insert as many dereference operations as necessary to automatically dereference a method + /// call object to its base value type T. + pub(super) fn insert_auto_dereferences(&mut self, object: ExprId, typ: Type) -> (ExprId, Type) { + if let Type::MutableReference(element) = typ { + let location = self.interner.id_location(object); + + let object = self.interner.push_expr(HirExpression::Prefix(HirPrefixExpression { + operator: UnaryOp::Dereference { implicitly_added: true }, + rhs: object, + })); + self.interner.push_expr_type(object, element.as_ref().clone()); + self.interner.push_expr_location(object, location.span, location.file); + + // Recursively dereference to allow for converting &mut &mut T to T + self.insert_auto_dereferences(object, *element) + } else { + (object, typ) + } + } + + /// Given a method object: `(*foo).bar` of a method call `(*foo).bar.baz()`, remove the + /// implicitly added dereference operator if one is found. + /// + /// Returns Some(new_expr_id) if a dereference was removed and None otherwise. + fn try_remove_implicit_dereference(&mut self, object: ExprId) -> Option { + match self.interner.expression(&object) { + HirExpression::MemberAccess(mut access) => { + let new_lhs = self.try_remove_implicit_dereference(access.lhs)?; + access.lhs = new_lhs; + access.is_offset = true; + + // `object` will have a different type now, which will be filled in + // later when type checking the method call as a function call. + self.interner.replace_expr(&object, HirExpression::MemberAccess(access)); + Some(object) + } + HirExpression::Prefix(prefix) => match prefix.operator { + // Found a dereference we can remove. Now just replace it with its rhs to remove it. + UnaryOp::Dereference { implicitly_added: true } => Some(prefix.rhs), + _ => None, + }, + _ => None, + } + } + + fn bind_function_type_impl( + &mut self, + fn_params: &[Type], + fn_ret: &Type, + callsite_args: &[(Type, ExprId, Span)], + span: Span, + ) -> Type { + if fn_params.len() != callsite_args.len() { + self.push_err(TypeCheckError::ParameterCountMismatch { + expected: fn_params.len(), + found: callsite_args.len(), + span, + }); + return Type::Error; + } + + for (param, (arg, _, arg_span)) in fn_params.iter().zip(callsite_args) { + self.unify(arg, param, || TypeCheckError::TypeMismatch { + expected_typ: param.to_string(), + expr_typ: arg.to_string(), + expr_span: *arg_span, + }); + } + + fn_ret.clone() + } + + pub(super) fn bind_function_type( + &mut self, + function: Type, + args: Vec<(Type, ExprId, Span)>, + span: Span, + ) -> Type { + // Could do a single unification for the entire function type, but matching beforehand + // lets us issue a more precise error on the individual argument that fails to type check. + match function { + Type::TypeVariable(binding, TypeVariableKind::Normal) => { + if let TypeBinding::Bound(typ) = &*binding.borrow() { + return self.bind_function_type(typ.clone(), args, span); + } + + let ret = self.interner.next_type_variable(); + let args = vecmap(args, |(arg, _, _)| arg); + let env_type = self.interner.next_type_variable(); + let expected = Type::Function(args, Box::new(ret.clone()), Box::new(env_type)); + + if let Err(error) = binding.try_bind(expected, span) { + self.push_err(error); + } + ret + } + // The closure env is ignored on purpose: call arguments never place + // constraints on closure environments. + Type::Function(parameters, ret, _env) => { + self.bind_function_type_impl(¶meters, &ret, &args, span) + } + Type::Error => Type::Error, + found => { + self.push_err(TypeCheckError::ExpectedFunction { found, span }); + Type::Error + } + } + } + + pub(super) fn check_cast(&mut self, from: Type, to: &Type, span: Span) -> Type { + match from.follow_bindings() { + Type::Integer(..) + | Type::FieldElement + | Type::TypeVariable(_, TypeVariableKind::IntegerOrField) + | Type::TypeVariable(_, TypeVariableKind::Integer) + | Type::Bool => (), + + Type::TypeVariable(_, _) => { + self.push_err(TypeCheckError::TypeAnnotationsNeeded { span }); + return Type::Error; + } + Type::Error => return Type::Error, + from => { + self.push_err(TypeCheckError::InvalidCast { from, span }); + return Type::Error; + } + } + + match to { + Type::Integer(sign, bits) => Type::Integer(*sign, *bits), + Type::FieldElement => Type::FieldElement, + Type::Bool => Type::Bool, + Type::Error => Type::Error, + _ => { + self.push_err(TypeCheckError::UnsupportedCast { span }); + Type::Error + } + } + } + + // Given a binary comparison operator and another type. This method will produce the output type + // and a boolean indicating whether to use the trait impl corresponding to the operator + // or not. A value of false indicates the caller to use a primitive operation for this + // operator, while a true value indicates a user-provided trait impl is required. + fn comparator_operand_type_rules( + &mut self, + lhs_type: &Type, + rhs_type: &Type, + op: &HirBinaryOp, + span: Span, + ) -> Result<(Type, bool), TypeCheckError> { + use Type::*; + + match (lhs_type, rhs_type) { + // Avoid reporting errors multiple times + (Error, _) | (_, Error) => Ok((Bool, false)), + (Alias(alias, args), other) | (other, Alias(alias, args)) => { + let alias = alias.borrow().get_type(args); + self.comparator_operand_type_rules(&alias, other, op, span) + } + + // Matches on TypeVariable must be first to follow any type + // bindings. + (TypeVariable(var, _), other) | (other, TypeVariable(var, _)) => { + if let TypeBinding::Bound(binding) = &*var.borrow() { + return self.comparator_operand_type_rules(other, binding, op, span); + } + + let use_impl = self.bind_type_variables_for_infix(lhs_type, op, rhs_type, span); + Ok((Bool, use_impl)) + } + (Integer(sign_x, bit_width_x), Integer(sign_y, bit_width_y)) => { + if sign_x != sign_y { + return Err(TypeCheckError::IntegerSignedness { + sign_x: *sign_x, + sign_y: *sign_y, + span, + }); + } + if bit_width_x != bit_width_y { + return Err(TypeCheckError::IntegerBitWidth { + bit_width_x: *bit_width_x, + bit_width_y: *bit_width_y, + span, + }); + } + Ok((Bool, false)) + } + (FieldElement, FieldElement) => { + if op.kind.is_valid_for_field_type() { + Ok((Bool, false)) + } else { + Err(TypeCheckError::FieldComparison { span }) + } + } + + // <= and friends are technically valid for booleans, just not very useful + (Bool, Bool) => Ok((Bool, false)), + + (lhs, rhs) => { + self.unify(lhs, rhs, || TypeCheckError::TypeMismatchWithSource { + expected: lhs.clone(), + actual: rhs.clone(), + span: op.location.span, + source: Source::Binary, + }); + Ok((Bool, true)) + } + } + } + + /// Handles the TypeVariable case for checking binary operators. + /// Returns true if we should use the impl for the operator instead of the primitive + /// version of it. + fn bind_type_variables_for_infix( + &mut self, + lhs_type: &Type, + op: &HirBinaryOp, + rhs_type: &Type, + span: Span, + ) -> bool { + self.unify(lhs_type, rhs_type, || TypeCheckError::TypeMismatchWithSource { + expected: lhs_type.clone(), + actual: rhs_type.clone(), + source: Source::Binary, + span, + }); + + let use_impl = !lhs_type.is_numeric(); + + // If this operator isn't valid for fields we have to possibly narrow + // TypeVariableKind::IntegerOrField to TypeVariableKind::Integer. + // Doing so also ensures a type error if Field is used. + // The is_numeric check is to allow impls for custom types to bypass this. + if !op.kind.is_valid_for_field_type() && lhs_type.is_numeric() { + let target = Type::polymorphic_integer(self.interner); + + use crate::ast::BinaryOpKind::*; + use TypeCheckError::*; + self.unify(lhs_type, &target, || match op.kind { + Less | LessEqual | Greater | GreaterEqual => FieldComparison { span }, + And | Or | Xor | ShiftRight | ShiftLeft => FieldBitwiseOp { span }, + Modulo => FieldModulo { span }, + other => unreachable!("Operator {other:?} should be valid for Field"), + }); + } + + use_impl + } + + // Given a binary operator and another type. This method will produce the output type + // and a boolean indicating whether to use the trait impl corresponding to the operator + // or not. A value of false indicates the caller to use a primitive operation for this + // operator, while a true value indicates a user-provided trait impl is required. + pub(super) fn infix_operand_type_rules( + &mut self, + lhs_type: &Type, + op: &HirBinaryOp, + rhs_type: &Type, + span: Span, + ) -> Result<(Type, bool), TypeCheckError> { + if op.kind.is_comparator() { + return self.comparator_operand_type_rules(lhs_type, rhs_type, op, span); + } + + use Type::*; + match (lhs_type, rhs_type) { + // An error type on either side will always return an error + (Error, _) | (_, Error) => Ok((Error, false)), + (Alias(alias, args), other) | (other, Alias(alias, args)) => { + let alias = alias.borrow().get_type(args); + self.infix_operand_type_rules(&alias, op, other, span) + } + + // Matches on TypeVariable must be first so that we follow any type + // bindings. + (TypeVariable(int, _), other) | (other, TypeVariable(int, _)) => { + if let TypeBinding::Bound(binding) = &*int.borrow() { + return self.infix_operand_type_rules(binding, op, other, span); + } + if op.kind == BinaryOpKind::ShiftLeft || op.kind == BinaryOpKind::ShiftRight { + self.unify( + rhs_type, + &Type::Integer(Signedness::Unsigned, IntegerBitSize::Eight), + || TypeCheckError::InvalidShiftSize { span }, + ); + let use_impl = if lhs_type.is_numeric() { + let integer_type = Type::polymorphic_integer(self.interner); + self.bind_type_variables_for_infix(lhs_type, op, &integer_type, span) + } else { + true + }; + return Ok((lhs_type.clone(), use_impl)); + } + let use_impl = self.bind_type_variables_for_infix(lhs_type, op, rhs_type, span); + Ok((other.clone(), use_impl)) + } + (Integer(sign_x, bit_width_x), Integer(sign_y, bit_width_y)) => { + if op.kind == BinaryOpKind::ShiftLeft || op.kind == BinaryOpKind::ShiftRight { + if *sign_y != Signedness::Unsigned || *bit_width_y != IntegerBitSize::Eight { + return Err(TypeCheckError::InvalidShiftSize { span }); + } + return Ok((Integer(*sign_x, *bit_width_x), false)); + } + if sign_x != sign_y { + return Err(TypeCheckError::IntegerSignedness { + sign_x: *sign_x, + sign_y: *sign_y, + span, + }); + } + if bit_width_x != bit_width_y { + return Err(TypeCheckError::IntegerBitWidth { + bit_width_x: *bit_width_x, + bit_width_y: *bit_width_y, + span, + }); + } + Ok((Integer(*sign_x, *bit_width_x), false)) + } + // The result of two Fields is always a witness + (FieldElement, FieldElement) => { + if !op.kind.is_valid_for_field_type() { + if op.kind == BinaryOpKind::Modulo { + return Err(TypeCheckError::FieldModulo { span }); + } else { + return Err(TypeCheckError::FieldBitwiseOp { span }); + } + } + Ok((FieldElement, false)) + } + + (Bool, Bool) => Ok((Bool, false)), + + (lhs, rhs) => { + if op.kind == BinaryOpKind::ShiftLeft || op.kind == BinaryOpKind::ShiftRight { + if rhs == &Type::Integer(Signedness::Unsigned, IntegerBitSize::Eight) { + return Ok((lhs.clone(), true)); + } + return Err(TypeCheckError::InvalidShiftSize { span }); + } + self.unify(lhs, rhs, || TypeCheckError::TypeMismatchWithSource { + expected: lhs.clone(), + actual: rhs.clone(), + span: op.location.span, + source: Source::Binary, + }); + Ok((lhs.clone(), true)) + } + } + } + + /// Prerequisite: verify_trait_constraint of the operator's trait constraint. + /// + /// Although by this point the operator is expected to already have a trait impl, + /// we still need to match the operator's type against the method's instantiated type + /// to ensure the instantiation bindings are correct and the monomorphizer can + /// re-apply the needed bindings. + pub(super) fn type_check_operator_method( + &mut self, + expr_id: ExprId, + trait_method_id: TraitMethodId, + object_type: &Type, + span: Span, + ) { + let the_trait = self.interner.get_trait(trait_method_id.trait_id); + + let method = &the_trait.methods[trait_method_id.method_index]; + let (method_type, mut bindings) = method.typ.clone().instantiate(self.interner); + + match method_type { + Type::Function(args, _, _) => { + // We can cheat a bit and match against only the object type here since no operator + // overload uses other generic parameters or return types aside from the object type. + let expected_object_type = &args[0]; + self.unify(object_type, expected_object_type, || TypeCheckError::TypeMismatch { + expected_typ: expected_object_type.to_string(), + expr_typ: object_type.to_string(), + expr_span: span, + }); + } + other => { + unreachable!("Expected operator method to have a function type, but found {other}") + } + } + + // We must also remember to apply these substitutions to the object_type + // referenced by the selected trait impl, if one has yet to be selected. + let impl_kind = self.interner.get_selected_impl_for_expression(expr_id); + if let Some(TraitImplKind::Assumed { object_type, trait_generics }) = impl_kind { + let the_trait = self.interner.get_trait(trait_method_id.trait_id); + let object_type = object_type.substitute(&bindings); + bindings.insert( + the_trait.self_type_typevar_id, + (the_trait.self_type_typevar.clone(), object_type.clone()), + ); + self.interner.select_impl_for_expression( + expr_id, + TraitImplKind::Assumed { object_type, trait_generics }, + ); + } + + self.interner.store_instantiation_bindings(expr_id, bindings); + } + + pub(super) fn type_check_member_access( + &mut self, + mut access: HirMemberAccess, + expr_id: ExprId, + lhs_type: Type, + span: Span, + ) -> Type { + let access_lhs = &mut access.lhs; + + let dereference_lhs = |this: &mut Self, lhs_type, element| { + let old_lhs = *access_lhs; + *access_lhs = this.interner.push_expr(HirExpression::Prefix(HirPrefixExpression { + operator: crate::ast::UnaryOp::Dereference { implicitly_added: true }, + rhs: old_lhs, + })); + this.interner.push_expr_type(old_lhs, lhs_type); + this.interner.push_expr_type(*access_lhs, element); + + let old_location = this.interner.id_location(old_lhs); + this.interner.push_expr_location(*access_lhs, span, old_location.file); + }; + + // If this access is just a field offset, we want to avoid dereferencing + let dereference_lhs = (!access.is_offset).then_some(dereference_lhs); + + match self.check_field_access(&lhs_type, &access.rhs.0.contents, span, dereference_lhs) { + Some((element_type, index)) => { + self.interner.set_field_index(expr_id, index); + // We must update `access` in case we added any dereferences to it + self.interner.replace_expr(&expr_id, HirExpression::MemberAccess(access)); + element_type + } + None => Type::Error, + } + } + + pub(super) fn lookup_method( + &mut self, + object_type: &Type, + method_name: &str, + span: Span, + ) -> Option { + match object_type.follow_bindings() { + Type::Struct(typ, _args) => { + let id = typ.borrow().id; + match self.interner.lookup_method(object_type, id, method_name, false) { + Some(method_id) => Some(HirMethodReference::FuncId(method_id)), + None => { + self.push_err(TypeCheckError::UnresolvedMethodCall { + method_name: method_name.to_string(), + object_type: object_type.clone(), + span, + }); + None + } + } + } + // TODO: We should allow method calls on `impl Trait`s eventually. + // For now it is fine since they are only allowed on return types. + Type::TraitAsType(..) => { + self.push_err(TypeCheckError::UnresolvedMethodCall { + method_name: method_name.to_string(), + object_type: object_type.clone(), + span, + }); + None + } + Type::NamedGeneric(_, _) => { + let func_meta = self.interner.function_meta( + &self.current_function.expect("unexpected method outside a function"), + ); + + for constraint in &func_meta.trait_constraints { + if *object_type == constraint.typ { + if let Some(the_trait) = self.interner.try_get_trait(constraint.trait_id) { + for (method_index, method) in the_trait.methods.iter().enumerate() { + if method.name.0.contents == method_name { + let trait_method = TraitMethodId { + trait_id: constraint.trait_id, + method_index, + }; + return Some(HirMethodReference::TraitMethodId( + trait_method, + constraint.trait_generics.clone(), + )); + } + } + } + } + } + + self.push_err(TypeCheckError::UnresolvedMethodCall { + method_name: method_name.to_string(), + object_type: object_type.clone(), + span, + }); + None + } + // Mutable references to another type should resolve to methods of their element type. + // This may be a struct or a primitive type. + Type::MutableReference(element) => self + .interner + .lookup_primitive_trait_method_mut(element.as_ref(), method_name) + .map(HirMethodReference::FuncId) + .or_else(|| self.lookup_method(&element, method_name, span)), + + // If we fail to resolve the object to a struct type, we have no way of type + // checking its arguments as we can't even resolve the name of the function + Type::Error => None, + + // The type variable must be unbound at this point since follow_bindings was called + Type::TypeVariable(_, TypeVariableKind::Normal) => { + self.push_err(TypeCheckError::TypeAnnotationsNeeded { span }); + None + } + + other => match self.interner.lookup_primitive_method(&other, method_name) { + Some(method_id) => Some(HirMethodReference::FuncId(method_id)), + None => { + self.push_err(TypeCheckError::UnresolvedMethodCall { + method_name: method_name.to_string(), + object_type: object_type.clone(), + span, + }); + None + } + }, + } + } + + pub(super) fn type_check_call( + &mut self, + call: &HirCallExpression, + func_type: Type, + args: Vec<(Type, ExprId, Span)>, + span: Span, + ) -> Type { + // Need to setup these flags here as `self` is borrowed mutably to type check the rest of the call expression + // These flags are later used to type check calls to unconstrained functions from constrained functions + let func_mod = self.current_function.map(|func| self.interner.function_modifiers(&func)); + let is_current_func_constrained = + func_mod.map_or(true, |func_mod| !func_mod.is_unconstrained); + + let is_unconstrained_call = self.is_unconstrained_call(call.func); + self.check_if_deprecated(call.func); + + // Check that we are not passing a mutable reference from a constrained runtime to an unconstrained runtime + if is_current_func_constrained && is_unconstrained_call { + for (typ, _, _) in args.iter() { + if matches!(&typ.follow_bindings(), Type::MutableReference(_)) { + self.push_err(TypeCheckError::ConstrainedReferenceToUnconstrained { span }); + } + } + } + + let return_type = self.bind_function_type(func_type, args, span); + + // Check that we are not passing a slice from an unconstrained runtime to a constrained runtime + if is_current_func_constrained && is_unconstrained_call { + if return_type.contains_slice() { + self.push_err(TypeCheckError::UnconstrainedSliceReturnToConstrained { span }); + } else if matches!(&return_type.follow_bindings(), Type::MutableReference(_)) { + self.push_err(TypeCheckError::UnconstrainedReferenceToConstrained { span }); + } + }; + + return_type + } + + fn check_if_deprecated(&mut self, expr: ExprId) { + if let HirExpression::Ident(HirIdent { location, id, impl_kind: _ }) = + self.interner.expression(&expr) + { + if let Some(DefinitionKind::Function(func_id)) = + self.interner.try_definition(id).map(|def| &def.kind) + { + let attributes = self.interner.function_attributes(func_id); + if let Some(note) = attributes.get_deprecated_note() { + self.push_err(TypeCheckError::CallDeprecated { + name: self.interner.definition_name(id).to_string(), + note, + span: location.span, + }); + } + } + } + } + + fn is_unconstrained_call(&self, expr: ExprId) -> bool { + if let HirExpression::Ident(HirIdent { id, .. }) = self.interner.expression(&expr) { + if let Some(DefinitionKind::Function(func_id)) = + self.interner.try_definition(id).map(|def| &def.kind) + { + let modifiers = self.interner.function_modifiers(func_id); + return modifiers.is_unconstrained; + } + } + false + } + + /// Check if the given method type requires a mutable reference to the object type, and check + /// if the given object type is already a mutable reference. If not, add one. + /// This is used to automatically transform a method call: `foo.bar()` into a function + /// call: `bar(&mut foo)`. + /// + /// A notable corner case of this function is where it interacts with auto-deref of `.`. + /// If a field is being mutated e.g. `foo.bar.mutate_bar()` where `foo: &mut Foo`, the compiler + /// will insert a dereference before bar `(*foo).bar.mutate_bar()` which would cause us to + /// mutate a copy of bar rather than a reference to it. We must check for this corner case here + /// and remove the implicitly added dereference operator if we find one. + pub(super) fn try_add_mutable_reference_to_object( + &mut self, + function_type: &Type, + object_type: &mut Type, + object: &mut ExprId, + ) { + let expected_object_type = match function_type { + Type::Function(args, _, _) => args.first(), + Type::Forall(_, typ) => match typ.as_ref() { + Type::Function(args, _, _) => args.first(), + typ => unreachable!("Unexpected type for function: {typ}"), + }, + typ => unreachable!("Unexpected type for function: {typ}"), + }; + + if let Some(expected_object_type) = expected_object_type { + let actual_type = object_type.follow_bindings(); + + if matches!(expected_object_type.follow_bindings(), Type::MutableReference(_)) { + if !matches!(actual_type, Type::MutableReference(_)) { + if let Err(error) = verify_mutable_reference(self.interner, *object) { + self.push_err(TypeCheckError::ResolverError(error)); + } + + let new_type = Type::MutableReference(Box::new(actual_type)); + *object_type = new_type.clone(); + + // First try to remove a dereference operator that may have been implicitly + // inserted by a field access expression `foo.bar` on a mutable reference `foo`. + let new_object = self.try_remove_implicit_dereference(*object); + + // If that didn't work, then wrap the whole expression in an `&mut` + *object = new_object.unwrap_or_else(|| { + let location = self.interner.id_location(*object); + + let new_object = + self.interner.push_expr(HirExpression::Prefix(HirPrefixExpression { + operator: UnaryOp::MutableReference, + rhs: *object, + })); + self.interner.push_expr_type(new_object, new_type); + self.interner.push_expr_location(new_object, location.span, location.file); + new_object + }); + } + // Otherwise if the object type is a mutable reference and the method is not, insert as + // many dereferences as needed. + } else if matches!(actual_type, Type::MutableReference(_)) { + let (new_object, new_type) = self.insert_auto_dereferences(*object, actual_type); + *object_type = new_type; + *object = new_object; + } + } + } + + pub fn type_check_function_body(&mut self, body_type: Type, meta: &FuncMeta, body_id: ExprId) { + let (expr_span, empty_function) = self.function_info(body_id); + let declared_return_type = meta.return_type(); + + let func_span = self.interner.expr_span(&body_id); // XXX: We could be more specific and return the span of the last stmt, however stmts do not have spans yet + if let Type::TraitAsType(trait_id, _, generics) = declared_return_type { + if self.interner.lookup_trait_implementation(&body_type, *trait_id, generics).is_err() { + self.push_err(TypeCheckError::TypeMismatchWithSource { + expected: declared_return_type.clone(), + actual: body_type, + span: func_span, + source: Source::Return(meta.return_type.clone(), expr_span), + }); + } + } else { + self.unify_with_coercions(&body_type, declared_return_type, body_id, || { + let mut error = TypeCheckError::TypeMismatchWithSource { + expected: declared_return_type.clone(), + actual: body_type.clone(), + span: func_span, + source: Source::Return(meta.return_type.clone(), expr_span), + }; + + if empty_function { + error = error.add_context( + "implicitly returns `()` as its body has no tail or `return` expression", + ); + } + error + }); + } + } + + fn function_info(&self, function_body_id: ExprId) -> (noirc_errors::Span, bool) { + let (expr_span, empty_function) = + if let HirExpression::Block(block) = self.interner.expression(&function_body_id) { + let last_stmt = block.statements().last(); + let mut span = self.interner.expr_span(&function_body_id); + + if let Some(last_stmt) = last_stmt { + if let HirStatement::Expression(expr) = self.interner.statement(last_stmt) { + span = self.interner.expr_span(&expr); + } + } + + (span, last_stmt.is_none()) + } else { + (self.interner.expr_span(&function_body_id), false) + }; + (expr_span, empty_function) + } + + pub fn verify_trait_constraint( + &mut self, + object_type: &Type, + trait_id: TraitId, + trait_generics: &[Type], + function_ident_id: ExprId, + span: Span, + ) { + match self.interner.lookup_trait_implementation(object_type, trait_id, trait_generics) { + Ok(impl_kind) => { + self.interner.select_impl_for_expression(function_ident_id, impl_kind); + } + Err(erroring_constraints) => { + if erroring_constraints.is_empty() { + self.push_err(TypeCheckError::TypeAnnotationsNeeded { span }); + } else { + // Don't show any errors where try_get_trait returns None. + // This can happen if a trait is used that was never declared. + let constraints = erroring_constraints + .into_iter() + .map(|constraint| { + let r#trait = self.interner.try_get_trait(constraint.trait_id)?; + let mut name = r#trait.name.to_string(); + if !constraint.trait_generics.is_empty() { + let generics = + vecmap(&constraint.trait_generics, ToString::to_string); + name += &format!("<{}>", generics.join(", ")); + } + Some((constraint.typ, name)) + }) + .collect::>>(); + + if let Some(constraints) = constraints { + self.push_err(TypeCheckError::NoMatchingImplFound { constraints, span }); + } + } + } + } + } +} diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index 2f6b101e62..4aac0fec9c 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -1,5 +1,6 @@ use super::dc_mod::collect_defs; use super::errors::{DefCollectorErrorKind, DuplicateType}; +use crate::elaborator::Elaborator; use crate::graph::CrateId; use crate::hir::comptime::{Interpreter, InterpreterError}; use crate::hir::def_map::{CrateDefMap, LocalModuleId, ModuleId}; @@ -129,14 +130,18 @@ pub struct UnresolvedGlobal { /// Given a Crate root, collect all definitions in that crate pub struct DefCollector { pub(crate) def_map: CrateDefMap, - pub(crate) collected_imports: Vec, - pub(crate) collected_functions: Vec, - pub(crate) collected_types: BTreeMap, - pub(crate) collected_type_aliases: BTreeMap, - pub(crate) collected_traits: BTreeMap, - pub(crate) collected_globals: Vec, - pub(crate) collected_impls: ImplMap, - pub(crate) collected_traits_impls: Vec, + pub(crate) imports: Vec, + pub(crate) items: CollectedItems, +} + +pub struct CollectedItems { + pub(crate) functions: Vec, + pub(crate) types: BTreeMap, + pub(crate) type_aliases: BTreeMap, + pub(crate) traits: BTreeMap, + pub(crate) globals: Vec, + pub(crate) impls: ImplMap, + pub(crate) trait_impls: Vec, } /// Maps the type and the module id in which the impl is defined to the functions contained in that @@ -210,14 +215,16 @@ impl DefCollector { fn new(def_map: CrateDefMap) -> DefCollector { DefCollector { def_map, - collected_imports: vec![], - collected_functions: vec![], - collected_types: BTreeMap::new(), - collected_type_aliases: BTreeMap::new(), - collected_traits: BTreeMap::new(), - collected_impls: HashMap::new(), - collected_globals: vec![], - collected_traits_impls: vec![], + imports: vec![], + items: CollectedItems { + functions: vec![], + types: BTreeMap::new(), + type_aliases: BTreeMap::new(), + traits: BTreeMap::new(), + impls: HashMap::new(), + globals: vec![], + trait_impls: vec![], + }, } } @@ -229,6 +236,7 @@ impl DefCollector { context: &mut Context, ast: SortedModule, root_file_id: FileId, + use_elaborator: bool, macro_processors: &[&dyn MacroProcessor], ) -> Vec<(CompilationError, FileId)> { let mut errors: Vec<(CompilationError, FileId)> = vec![]; @@ -242,7 +250,12 @@ impl DefCollector { let crate_graph = &context.crate_graph[crate_id]; for dep in crate_graph.dependencies.clone() { - errors.extend(CrateDefMap::collect_defs(dep.crate_id, context, macro_processors)); + errors.extend(CrateDefMap::collect_defs( + dep.crate_id, + context, + use_elaborator, + macro_processors, + )); let dep_def_root = context.def_map(&dep.crate_id).expect("ice: def map was just created").root; @@ -275,18 +288,13 @@ impl DefCollector { // Add the current crate to the collection of DefMaps context.def_maps.insert(crate_id, def_collector.def_map); - inject_prelude(crate_id, context, crate_root, &mut def_collector.collected_imports); + inject_prelude(crate_id, context, crate_root, &mut def_collector.imports); for submodule in submodules { - inject_prelude( - crate_id, - context, - LocalModuleId(submodule), - &mut def_collector.collected_imports, - ); + inject_prelude(crate_id, context, LocalModuleId(submodule), &mut def_collector.imports); } // Resolve unresolved imports collected from the crate, one by one. - for collected_import in def_collector.collected_imports { + for collected_import in std::mem::take(&mut def_collector.imports) { match resolve_import(crate_id, &collected_import, &context.def_maps) { Ok(resolved_import) => { if let Some(error) = resolved_import.error { @@ -323,6 +331,12 @@ impl DefCollector { } } + if use_elaborator { + let mut more_errors = Elaborator::elaborate(context, crate_id, def_collector.items); + more_errors.append(&mut errors); + return errors; + } + let mut resolved_module = ResolvedModule { errors, ..Default::default() }; // We must first resolve and intern the globals before we can resolve any stmts inside each function. @@ -330,26 +344,25 @@ impl DefCollector { // // Additionally, we must resolve integer globals before structs since structs may refer to // the values of integer globals as numeric generics. - let (literal_globals, other_globals) = - filter_literal_globals(def_collector.collected_globals); + let (literal_globals, other_globals) = filter_literal_globals(def_collector.items.globals); resolved_module.resolve_globals(context, literal_globals, crate_id); resolved_module.errors.extend(resolve_type_aliases( context, - def_collector.collected_type_aliases, + def_collector.items.type_aliases, crate_id, )); resolved_module.errors.extend(resolve_traits( context, - def_collector.collected_traits, + def_collector.items.traits, crate_id, )); // Must resolve structs before we resolve globals. resolved_module.errors.extend(resolve_structs( context, - def_collector.collected_types, + def_collector.items.types, crate_id, )); @@ -358,7 +371,7 @@ impl DefCollector { resolved_module.errors.extend(collect_trait_impls( context, crate_id, - &mut def_collector.collected_traits_impls, + &mut def_collector.items.trait_impls, )); // Before we resolve any function symbols we must go through our impls and @@ -368,11 +381,7 @@ impl DefCollector { // // These are resolved after trait impls so that struct methods are chosen // over trait methods if there are name conflicts. - resolved_module.errors.extend(collect_impls( - context, - crate_id, - &def_collector.collected_impls, - )); + resolved_module.errors.extend(collect_impls(context, crate_id, &def_collector.items.impls)); // We must wait to resolve non-integer globals until after we resolve structs since struct // globals will need to reference the struct type they're initialized to to ensure they are valid. @@ -383,7 +392,7 @@ impl DefCollector { &mut context.def_interner, crate_id, &context.def_maps, - def_collector.collected_functions, + def_collector.items.functions, None, &mut resolved_module.errors, ); @@ -392,13 +401,13 @@ impl DefCollector { &mut context.def_interner, crate_id, &context.def_maps, - def_collector.collected_impls, + def_collector.items.impls, &mut resolved_module.errors, )); resolved_module.trait_impl_functions = resolve_trait_impls( context, - def_collector.collected_traits_impls, + def_collector.items.trait_impls, crate_id, &mut resolved_module.errors, ); @@ -431,15 +440,18 @@ fn inject_prelude( crate_root: LocalModuleId, collected_imports: &mut Vec, ) { - let segments: Vec<_> = "std::prelude" - .split("::") - .map(|segment| crate::ast::Ident::new(segment.into(), Span::default())) - .collect(); + if !crate_id.is_stdlib() { + let segments: Vec<_> = "std::prelude" + .split("::") + .map(|segment| crate::ast::Ident::new(segment.into(), Span::default())) + .collect(); - let path = - Path { segments: segments.clone(), kind: crate::ast::PathKind::Dep, span: Span::default() }; + let path = Path { + segments: segments.clone(), + kind: crate::ast::PathKind::Dep, + span: Span::default(), + }; - if !crate_id.is_stdlib() { if let Ok(PathResolution { module_def_id, error }) = path_resolver::resolve_path( &context.def_maps, ModuleId { krate: crate_id, local_id: crate_root }, diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs index b2ec7dbc81..e688f192d3 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -70,7 +70,7 @@ pub fn collect_defs( // Then add the imports to defCollector to resolve once all modules in the hierarchy have been resolved for import in ast.imports { - collector.def_collector.collected_imports.push(ImportDirective { + collector.def_collector.imports.push(ImportDirective { module_id: collector.module_id, path: import.path, alias: import.alias, @@ -126,7 +126,7 @@ impl<'a> ModCollector<'a> { errors.push((err.into(), self.file_id)); } - self.def_collector.collected_globals.push(UnresolvedGlobal { + self.def_collector.items.globals.push(UnresolvedGlobal { file_id: self.file_id, module_id: self.module_id, global_id, @@ -154,7 +154,7 @@ impl<'a> ModCollector<'a> { } let key = (r#impl.object_type, self.module_id); - let methods = self.def_collector.collected_impls.entry(key).or_default(); + let methods = self.def_collector.items.impls.entry(key).or_default(); methods.push((r#impl.generics, r#impl.type_span, unresolved_functions)); } } @@ -191,7 +191,7 @@ impl<'a> ModCollector<'a> { trait_generics: trait_impl.trait_generics, }; - self.def_collector.collected_traits_impls.push(unresolved_trait_impl); + self.def_collector.items.trait_impls.push(unresolved_trait_impl); } } @@ -269,7 +269,7 @@ impl<'a> ModCollector<'a> { } } - self.def_collector.collected_functions.push(unresolved_functions); + self.def_collector.items.functions.push(unresolved_functions); errors } @@ -316,7 +316,7 @@ impl<'a> ModCollector<'a> { } // And store the TypeId -> StructType mapping somewhere it is reachable - self.def_collector.collected_types.insert(id, unresolved); + self.def_collector.items.types.insert(id, unresolved); } definition_errors } @@ -354,7 +354,7 @@ impl<'a> ModCollector<'a> { errors.push((err.into(), self.file_id)); } - self.def_collector.collected_type_aliases.insert(type_alias_id, unresolved); + self.def_collector.items.type_aliases.insert(type_alias_id, unresolved); } errors } @@ -506,7 +506,7 @@ impl<'a> ModCollector<'a> { method_ids, fns_with_default_impl: unresolved_functions, }; - self.def_collector.collected_traits.insert(trait_id, unresolved); + self.def_collector.items.traits.insert(trait_id, unresolved); } errors } diff --git a/compiler/noirc_frontend/src/hir/def_map/mod.rs b/compiler/noirc_frontend/src/hir/def_map/mod.rs index 590c2e3d6b..19e06387d4 100644 --- a/compiler/noirc_frontend/src/hir/def_map/mod.rs +++ b/compiler/noirc_frontend/src/hir/def_map/mod.rs @@ -73,6 +73,7 @@ impl CrateDefMap { pub fn collect_defs( crate_id: CrateId, context: &mut Context, + use_elaborator: bool, macro_processors: &[&dyn MacroProcessor], ) -> Vec<(CompilationError, FileId)> { // Check if this Crate has already been compiled @@ -116,7 +117,14 @@ impl CrateDefMap { }; // Now we want to populate the CrateDefMap using the DefCollector - errors.extend(DefCollector::collect(def_map, context, ast, root_file_id, macro_processors)); + errors.extend(DefCollector::collect( + def_map, + context, + ast, + root_file_id, + use_elaborator, + macro_processors, + )); errors.extend( parsing_errors.iter().map(|e| (e.clone().into(), root_file_id)).collect::>(), diff --git a/compiler/noirc_frontend/src/hir/resolution/import.rs b/compiler/noirc_frontend/src/hir/resolution/import.rs index 8850331f68..343113836e 100644 --- a/compiler/noirc_frontend/src/hir/resolution/import.rs +++ b/compiler/noirc_frontend/src/hir/resolution/import.rs @@ -2,11 +2,14 @@ use noirc_errors::{CustomDiagnostic, Span}; use thiserror::Error; use crate::graph::CrateId; +use crate::hir::def_collector::dc_crate::CompilationError; use std::collections::BTreeMap; use crate::ast::{Ident, ItemVisibility, Path, PathKind}; use crate::hir::def_map::{CrateDefMap, LocalModuleId, ModuleDefId, ModuleId, PerNs}; +use super::errors::ResolverError; + #[derive(Debug, Clone)] pub struct ImportDirective { pub module_id: LocalModuleId, @@ -53,6 +56,12 @@ pub struct ResolvedImport { pub error: Option, } +impl From for CompilationError { + fn from(error: PathResolutionError) -> Self { + Self::ResolverError(ResolverError::PathResolutionError(error)) + } +} + impl<'a> From<&'a PathResolutionError> for CustomDiagnostic { fn from(error: &'a PathResolutionError) -> Self { match &error { diff --git a/compiler/noirc_frontend/src/hir/resolution/resolver.rs b/compiler/noirc_frontend/src/hir/resolution/resolver.rs index 60baaecab5..7dc307fe71 100644 --- a/compiler/noirc_frontend/src/hir/resolution/resolver.rs +++ b/compiler/noirc_frontend/src/hir/resolution/resolver.rs @@ -56,17 +56,17 @@ use crate::hir_def::{ use super::errors::{PubPosition, ResolverError}; use super::import::PathResolution; -const SELF_TYPE_NAME: &str = "Self"; +pub const SELF_TYPE_NAME: &str = "Self"; type Scope = GenericScope; type ScopeTree = GenericScopeTree; type ScopeForest = GenericScopeForest; pub struct LambdaContext { - captures: Vec, + pub captures: Vec, /// the index in the scope tree /// (sometimes being filled by ScopeTree's find method) - scope_index: usize, + pub scope_index: usize, } /// The primary jobs of the Resolver are to validate that every variable found refers to exactly 1 @@ -1345,7 +1345,7 @@ impl<'a> Resolver<'a> { range @ ForRange::Array(_) => { let for_stmt = range.into_for(for_loop.identifier, for_loop.block, for_loop.span); - self.resolve_stmt(for_stmt, for_loop.span) + self.resolve_stmt(for_stmt.kind, for_loop.span) } } } @@ -1361,7 +1361,7 @@ impl<'a> Resolver<'a> { StatementKind::Comptime(statement) => { let hir_statement = self.resolve_stmt(statement.kind, statement.span); let statement_id = self.interner.push_stmt(hir_statement); - self.interner.push_statement_location(statement_id, statement.span, self.file); + self.interner.push_stmt_location(statement_id, statement.span, self.file); HirStatement::Comptime(statement_id) } } @@ -1370,7 +1370,7 @@ impl<'a> Resolver<'a> { pub fn intern_stmt(&mut self, stmt: Statement) -> StmtId { let hir_stmt = self.resolve_stmt(stmt.kind, stmt.span); let id = self.interner.push_stmt(hir_stmt); - self.interner.push_statement_location(id, stmt.span, self.file); + self.interner.push_stmt_location(id, stmt.span, self.file); id } diff --git a/compiler/noirc_frontend/src/hir/type_check/expr.rs b/compiler/noirc_frontend/src/hir/type_check/expr.rs index 9b40c95998..4859810982 100644 --- a/compiler/noirc_frontend/src/hir/type_check/expr.rs +++ b/compiler/noirc_frontend/src/hir/type_check/expr.rs @@ -250,14 +250,14 @@ impl<'interner> TypeChecker<'interner> { } // TODO: update object_type here? - let function_call = method_call.into_function_call( + let (_, function_call) = method_call.into_function_call( &method_ref, object_type, location, self.interner, ); - self.interner.replace_expr(expr_id, function_call); + self.interner.replace_expr(expr_id, HirExpression::Call(function_call)); // Type check the new call now that it has been changed from a method call // to a function call. This way we avoid duplicating code. diff --git a/compiler/noirc_frontend/src/hir/type_check/mod.rs b/compiler/noirc_frontend/src/hir/type_check/mod.rs index 0f8131d6eb..2e448858d9 100644 --- a/compiler/noirc_frontend/src/hir/type_check/mod.rs +++ b/compiler/noirc_frontend/src/hir/type_check/mod.rs @@ -25,7 +25,7 @@ use crate::{ Type, TypeBindings, }; -use self::errors::Source; +pub use self::errors::Source; pub struct TypeChecker<'interner> { interner: &'interner mut NodeInterner, diff --git a/compiler/noirc_frontend/src/hir_def/expr.rs b/compiler/noirc_frontend/src/hir_def/expr.rs index bf7d9b7b4b..8df6785e0e 100644 --- a/compiler/noirc_frontend/src/hir_def/expr.rs +++ b/compiler/noirc_frontend/src/hir_def/expr.rs @@ -200,13 +200,15 @@ pub enum HirMethodReference { impl HirMethodCallExpression { /// Converts a method call into a function call + /// + /// Returns ((func_var_id, func_var), call_expr) pub fn into_function_call( mut self, method: &HirMethodReference, object_type: Type, location: Location, interner: &mut NodeInterner, - ) -> HirExpression { + ) -> ((ExprId, HirIdent), HirCallExpression) { let mut arguments = vec![self.object]; arguments.append(&mut self.arguments); @@ -224,10 +226,11 @@ impl HirMethodCallExpression { (id, ImplKind::TraitMethod(*method_id, constraint, false)) } }; - let func = HirExpression::Ident(HirIdent { location, id, impl_kind }); - let func = interner.push_expr(func); + let func_var = HirIdent { location, id, impl_kind }; + let func = interner.push_expr(HirExpression::Ident(func_var.clone())); interner.push_expr_location(func, location.span, location.file); - HirExpression::Call(HirCallExpression { func, arguments, location }) + let expr = HirCallExpression { func, arguments, location }; + ((func, func_var), expr) } } diff --git a/compiler/noirc_frontend/src/hir_def/function.rs b/compiler/noirc_frontend/src/hir_def/function.rs index c38dd41fd3..ceec9ad858 100644 --- a/compiler/noirc_frontend/src/hir_def/function.rs +++ b/compiler/noirc_frontend/src/hir_def/function.rs @@ -135,10 +135,7 @@ impl FuncMeta { /// So this method tells the type checker to ignore the return /// of the empty function, which is unit pub fn can_ignore_return_type(&self) -> bool { - match self.kind { - FunctionKind::LowLevel | FunctionKind::Builtin | FunctionKind::Oracle => true, - FunctionKind::Normal | FunctionKind::Recursive => false, - } + self.kind.can_ignore_return_type() } pub fn function_signature(&self) -> FunctionSignature { diff --git a/compiler/noirc_frontend/src/hir_def/types.rs b/compiler/noirc_frontend/src/hir_def/types.rs index f3b2a24c1f..f31aeea055 100644 --- a/compiler/noirc_frontend/src/hir_def/types.rs +++ b/compiler/noirc_frontend/src/hir_def/types.rs @@ -1423,14 +1423,14 @@ impl Type { /// Retrieves the type of the given field name /// Panics if the type is not a struct or tuple. - pub fn get_field_type(&self, field_name: &str) -> Type { + pub fn get_field_type(&self, field_name: &str) -> Option { match self { - Type::Struct(def, args) => def.borrow().get_field(field_name, args).unwrap().0, + Type::Struct(def, args) => def.borrow().get_field(field_name, args).map(|(typ, _)| typ), Type::Tuple(fields) => { let mut fields = fields.iter().enumerate(); - fields.find(|(i, _)| i.to_string() == *field_name).unwrap().1.clone() + fields.find(|(i, _)| i.to_string() == *field_name).map(|(_, typ)| typ).cloned() } - other => panic!("Tried to iterate over the fields of '{other}', which has none"), + _ => None, } } diff --git a/compiler/noirc_frontend/src/lib.rs b/compiler/noirc_frontend/src/lib.rs index 958a18ac2f..b05c635f43 100644 --- a/compiler/noirc_frontend/src/lib.rs +++ b/compiler/noirc_frontend/src/lib.rs @@ -12,6 +12,7 @@ pub mod ast; pub mod debug; +pub mod elaborator; pub mod graph; pub mod lexer; pub mod monomorphization; diff --git a/compiler/noirc_frontend/src/node_interner.rs b/compiler/noirc_frontend/src/node_interner.rs index 88adc7a941..faf89016f9 100644 --- a/compiler/noirc_frontend/src/node_interner.rs +++ b/compiler/noirc_frontend/src/node_interner.rs @@ -532,7 +532,7 @@ impl NodeInterner { self.id_to_type.insert(expr_id.into(), typ); } - /// Store the type for an interned expression + /// Store the type for a definition pub fn push_definition_type(&mut self, definition_id: DefinitionId, typ: Type) { self.definition_to_type.insert(definition_id, typ); } @@ -696,7 +696,7 @@ impl NodeInterner { let statement = self.push_stmt(HirStatement::Error); let span = name.span(); let id = self.push_global(name, local_id, statement, file, attributes, mutable); - self.push_statement_location(statement, span, file); + self.push_stmt_location(statement, span, file); id } @@ -942,7 +942,7 @@ impl NodeInterner { self.id_location(stmt_id) } - pub fn push_statement_location(&mut self, id: StmtId, span: Span, file: FileId) { + pub fn push_stmt_location(&mut self, id: StmtId, span: Span, file: FileId) { self.id_to_location.insert(id.into(), Location::new(span, file)); } diff --git a/compiler/noirc_frontend/src/tests.rs b/compiler/noirc_frontend/src/tests.rs index 5f99e9e347..fb80a7d801 100644 --- a/compiler/noirc_frontend/src/tests.rs +++ b/compiler/noirc_frontend/src/tests.rs @@ -1,1236 +1,1215 @@ +#![cfg(test)] + +#[cfg(test)] +mod name_shadowing; + // XXX: These tests repeat a lot of code // what we should do is have test cases which are passed to a test harness // A test harness will allow for more expressive and readable tests -#[cfg(test)] -mod test { - - use core::panic; - use std::collections::BTreeMap; - - use fm::FileId; - - use iter_extended::vecmap; - use noirc_errors::Location; - - use crate::hir::def_collector::dc_crate::CompilationError; - use crate::hir::def_collector::errors::{DefCollectorErrorKind, DuplicateType}; - use crate::hir::def_map::ModuleData; - use crate::hir::resolution::errors::ResolverError; - use crate::hir::resolution::import::PathResolutionError; - use crate::hir::type_check::TypeCheckError; - use crate::hir::Context; - use crate::node_interner::{NodeInterner, StmtId}; - - use crate::hir::def_collector::dc_crate::DefCollector; - use crate::hir_def::expr::HirExpression; - use crate::hir_def::stmt::HirStatement; - use crate::monomorphization::monomorphize; - use crate::parser::ParserErrorReason; - use crate::ParsedModule; - use crate::{ - hir::def_map::{CrateDefMap, LocalModuleId}, - parse_program, - }; - use fm::FileManager; - use noirc_arena::Arena; +use core::panic; +use std::collections::BTreeMap; + +use fm::FileId; + +use iter_extended::vecmap; +use noirc_errors::Location; + +use crate::hir::def_collector::dc_crate::CompilationError; +use crate::hir::def_collector::errors::{DefCollectorErrorKind, DuplicateType}; +use crate::hir::def_map::ModuleData; +use crate::hir::resolution::errors::ResolverError; +use crate::hir::resolution::import::PathResolutionError; +use crate::hir::type_check::TypeCheckError; +use crate::hir::Context; +use crate::node_interner::{NodeInterner, StmtId}; + +use crate::hir::def_collector::dc_crate::DefCollector; +use crate::hir_def::expr::HirExpression; +use crate::hir_def::stmt::HirStatement; +use crate::monomorphization::monomorphize; +use crate::parser::ParserErrorReason; +use crate::ParsedModule; +use crate::{ + hir::def_map::{CrateDefMap, LocalModuleId}, + parse_program, +}; +use fm::FileManager; +use noirc_arena::Arena; + +pub(crate) fn has_parser_error(errors: &[(CompilationError, FileId)]) -> bool { + errors.iter().any(|(e, _f)| matches!(e, CompilationError::ParseError(_))) +} - pub(crate) fn has_parser_error(errors: &[(CompilationError, FileId)]) -> bool { - errors.iter().any(|(e, _f)| matches!(e, CompilationError::ParseError(_))) - } +pub(crate) fn remove_experimental_warnings(errors: &mut Vec<(CompilationError, FileId)>) { + errors.retain(|(error, _)| match error { + CompilationError::ParseError(error) => { + !matches!(error.reason(), Some(ParserErrorReason::ExperimentalFeature(..))) + } + _ => true, + }); +} - pub(crate) fn remove_experimental_warnings(errors: &mut Vec<(CompilationError, FileId)>) { - errors.retain(|(error, _)| match error { - CompilationError::ParseError(error) => { - !matches!(error.reason(), Some(ParserErrorReason::ExperimentalFeature(..))) - } - _ => true, - }); - } - - pub(crate) fn get_program( - src: &str, - ) -> (ParsedModule, Context, Vec<(CompilationError, FileId)>) { - let root = std::path::Path::new("/"); - let fm = FileManager::new(root); - - let mut context = Context::new(fm, Default::default()); - context.def_interner.populate_dummy_operator_traits(); - let root_file_id = FileId::dummy(); - let root_crate_id = context.crate_graph.add_crate_root(root_file_id); - - let (program, parser_errors) = parse_program(src); - let mut errors = vecmap(parser_errors, |e| (e.into(), root_file_id)); - remove_experimental_warnings(&mut errors); - - if !has_parser_error(&errors) { - // Allocate a default Module for the root, giving it a ModuleId - let mut modules: Arena = Arena::default(); - let location = Location::new(Default::default(), root_file_id); - let root = modules.insert(ModuleData::new(None, location, false)); - - let def_map = CrateDefMap { - root: LocalModuleId(root), - modules, - krate: root_crate_id, - extern_prelude: BTreeMap::new(), - }; +pub(crate) fn get_program(src: &str) -> (ParsedModule, Context, Vec<(CompilationError, FileId)>) { + let root = std::path::Path::new("/"); + let fm = FileManager::new(root); + + let mut context = Context::new(fm, Default::default()); + context.def_interner.populate_dummy_operator_traits(); + let root_file_id = FileId::dummy(); + let root_crate_id = context.crate_graph.add_crate_root(root_file_id); + + let (program, parser_errors) = parse_program(src); + let mut errors = vecmap(parser_errors, |e| (e.into(), root_file_id)); + remove_experimental_warnings(&mut errors); + + if !has_parser_error(&errors) { + // Allocate a default Module for the root, giving it a ModuleId + let mut modules: Arena = Arena::default(); + let location = Location::new(Default::default(), root_file_id); + let root = modules.insert(ModuleData::new(None, location, false)); + + let def_map = CrateDefMap { + root: LocalModuleId(root), + modules, + krate: root_crate_id, + extern_prelude: BTreeMap::new(), + }; - // Now we want to populate the CrateDefMap using the DefCollector - errors.extend(DefCollector::collect( - def_map, - &mut context, - program.clone().into_sorted(), - root_file_id, - &[], // No macro processors - )); - } - (program, context, errors) + // Now we want to populate the CrateDefMap using the DefCollector + errors.extend(DefCollector::collect( + def_map, + &mut context, + program.clone().into_sorted(), + root_file_id, + false, + &[], // No macro processors + )); } + (program, context, errors) +} - pub(crate) fn get_program_errors(src: &str) -> Vec<(CompilationError, FileId)> { - get_program(src).2 - } +pub(crate) fn get_program_errors(src: &str) -> Vec<(CompilationError, FileId)> { + get_program(src).2 +} - #[test] - fn check_trait_implemented_for_all_t() { - let src = " - trait Default { - fn default() -> Self; - } - - trait Eq { - fn eq(self, other: Self) -> bool; +#[test] +fn check_trait_implemented_for_all_t() { + let src = " + trait Default { + fn default() -> Self; + } + + trait Eq { + fn eq(self, other: Self) -> bool; + } + + trait IsDefault { + fn is_default(self) -> bool; + } + + impl IsDefault for T where T: Default + Eq { + fn is_default(self) -> bool { + self.eq(T::default()) } - - trait IsDefault { - fn is_default(self) -> bool; + } + + struct Foo { + a: u64, + } + + impl Eq for Foo { + fn eq(self, other: Foo) -> bool { self.a == other.a } + } + + impl Default for u64 { + fn default() -> Self { + 0 } - - impl IsDefault for T where T: Default + Eq { - fn is_default(self) -> bool { - self.eq(T::default()) - } + } + + impl Default for Foo { + fn default() -> Self { + Foo { a: Default::default() } } - - struct Foo { - a: u64, + } + + fn main(a: Foo) -> pub bool { + a.is_default() + }"; + + let errors = get_program_errors(src); + errors.iter().for_each(|err| println!("{:?}", err)); + assert!(errors.is_empty()); +} + +#[test] +fn check_trait_implementation_duplicate_method() { + let src = " + trait Default { + fn default(x: Field, y: Field) -> Field; + } + + struct Foo { + bar: Field, + array: [Field; 2], + } + + impl Default for Foo { + // Duplicate trait methods should not compile + fn default(x: Field, y: Field) -> Field { + y + 2 * x } - - impl Eq for Foo { - fn eq(self, other: Foo) -> bool { self.a == other.a } + // Duplicate trait methods should not compile + fn default(x: Field, y: Field) -> Field { + x + 2 * y } - - impl Default for u64 { - fn default() -> Self { - 0 + } + + fn main() {}"; + + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + + for (err, _file_id) in errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::Duplicate { + typ, + first_def, + second_def, + }) => { + assert_eq!(typ, &DuplicateType::TraitAssociatedFunction); + assert_eq!(first_def, "default"); + assert_eq!(second_def, "default"); } - } - - impl Default for Foo { - fn default() -> Self { - Foo { a: Default::default() } + _ => { + panic!("No other errors are expected! Found = {:?}", err); } - } - - fn main(a: Foo) -> pub bool { - a.is_default() - }"; - - let errors = get_program_errors(src); - errors.iter().for_each(|err| println!("{:?}", err)); - assert!(errors.is_empty()); + }; } +} - #[test] - fn check_trait_implementation_duplicate_method() { - let src = " - trait Default { - fn default(x: Field, y: Field) -> Field; - } - - struct Foo { - bar: Field, - array: [Field; 2], +#[test] +fn check_trait_wrong_method_return_type() { + let src = " + trait Default { + fn default() -> Self; + } + + struct Foo { + } + + impl Default for Foo { + fn default() -> Field { + 0 } - - impl Default for Foo { - // Duplicate trait methods should not compile - fn default(x: Field, y: Field) -> Field { - y + 2 * x + } + + fn main() { + } + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + + for (err, _file_id) in errors { + match &err { + CompilationError::TypeError(TypeCheckError::TypeMismatch { + expected_typ, + expr_typ, + expr_span: _, + }) => { + assert_eq!(expected_typ, "Foo"); + assert_eq!(expr_typ, "Field"); } - // Duplicate trait methods should not compile - fn default(x: Field, y: Field) -> Field { - x + 2 * y + _ => { + panic!("No other errors are expected! Found = {:?}", err); } - } - - fn main() {}"; - - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::Duplicate { - typ, - first_def, - second_def, - }) => { - assert_eq!(typ, &DuplicateType::TraitAssociatedFunction); - assert_eq!(first_def, "default"); - assert_eq!(second_def, "default"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + }; } +} - #[test] - fn check_trait_wrong_method_return_type() { - let src = " - trait Default { - fn default() -> Self; - } - - struct Foo { +#[test] +fn check_trait_wrong_method_return_type2() { + let src = " + trait Default { + fn default(x: Field, y: Field) -> Self; + } + + struct Foo { + bar: Field, + array: [Field; 2], + } + + impl Default for Foo { + fn default(x: Field, _y: Field) -> Field { + x } - - impl Default for Foo { - fn default() -> Field { - 0 + } + + fn main() { + }"; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + + for (err, _file_id) in errors { + match &err { + CompilationError::TypeError(TypeCheckError::TypeMismatch { + expected_typ, + expr_typ, + expr_span: _, + }) => { + assert_eq!(expected_typ, "Foo"); + assert_eq!(expr_typ, "Field"); } - } - - fn main() { - } - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::TypeError(TypeCheckError::TypeMismatch { - expected_typ, - expr_typ, - expr_span: _, - }) => { - assert_eq!(expected_typ, "Foo"); - assert_eq!(expr_typ, "Field"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_wrong_method_return_type2() { - let src = " - trait Default { - fn default(x: Field, y: Field) -> Self; - } - - struct Foo { - bar: Field, - array: [Field; 2], +#[test] +fn check_trait_missing_implementation() { + let src = " + trait Default { + fn default(x: Field, y: Field) -> Self; + + fn method2(x: Field) -> Field; + + } + + struct Foo { + bar: Field, + array: [Field; 2], + } + + impl Default for Foo { + fn default(x: Field, y: Field) -> Self { + Self { bar: x, array: [x,y] } } - - impl Default for Foo { - fn default(x: Field, _y: Field) -> Field { - x + } + + fn main() { + } + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + + for (err, _file_id) in errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::TraitMissingMethod { + trait_name, + method_name, + trait_impl_span: _, + }) => { + assert_eq!(trait_name, "Default"); + assert_eq!(method_name, "method2"); } - } - - fn main() { - }"; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::TypeError(TypeCheckError::TypeMismatch { - expected_typ, - expr_typ, - expr_span: _, - }) => { - assert_eq!(expected_typ, "Foo"); - assert_eq!(expr_typ, "Field"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_missing_implementation() { - let src = " - trait Default { - fn default(x: Field, y: Field) -> Self; - - fn method2(x: Field) -> Field; - - } - - struct Foo { - bar: Field, - array: [Field; 2], +#[test] +fn check_trait_not_in_scope() { + let src = " + struct Foo { + bar: Field, + array: [Field; 2], + } + + // Default trait does not exist + impl Default for Foo { + fn default(x: Field, y: Field) -> Self { + Self { bar: x, array: [x,y] } } - - impl Default for Foo { - fn default(x: Field, y: Field) -> Self { - Self { bar: x, array: [x,y] } + } + + fn main() { + } + + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + for (err, _file_id) in errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::TraitNotFound { + trait_path, + }) => { + assert_eq!(trait_path.as_string(), "Default"); } - } - - fn main() { - } - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::TraitMissingMethod { - trait_name, - method_name, - trait_impl_span: _, - }) => { - assert_eq!(trait_name, "Default"); - assert_eq!(method_name, "method2"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_not_in_scope() { - let src = " - struct Foo { - bar: Field, - array: [Field; 2], +#[test] +fn check_trait_wrong_method_name() { + let src = " + trait Default { + } + + struct Foo { + bar: Field, + array: [Field; 2], + } + + // wrong trait name method should not compile + impl Default for Foo { + fn does_not_exist(x: Field, y: Field) -> Self { + Self { bar: x, array: [x,y] } } - - // Default trait does not exist - impl Default for Foo { - fn default(x: Field, y: Field) -> Self { - Self { bar: x, array: [x,y] } + } + + fn main() { + }"; + let compilation_errors = get_program_errors(src); + assert!(!has_parser_error(&compilation_errors)); + assert!( + compilation_errors.len() == 1, + "Expected 1 compilation error, got: {:?}", + compilation_errors + ); + + for (err, _file_id) in compilation_errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::MethodNotInTrait { + trait_name, + impl_method, + }) => { + assert_eq!(trait_name, "Default"); + assert_eq!(impl_method, "does_not_exist"); } - } - - fn main() { - } - - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - for (err, _file_id) in errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::TraitNotFound { - trait_path, - }) => { - assert_eq!(trait_path.as_string(), "Default"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_wrong_method_name() { - let src = " - trait Default { - } - - struct Foo { - bar: Field, - array: [Field; 2], +#[test] +fn check_trait_wrong_parameter() { + let src = " + trait Default { + fn default(x: Field) -> Self; + } + + struct Foo { + bar: u32, + } + + impl Default for Foo { + fn default(x: u32) -> Self { + Foo {bar: x} } - - // wrong trait name method should not compile - impl Default for Foo { - fn does_not_exist(x: Field, y: Field) -> Self { - Self { bar: x, array: [x,y] } + } + + fn main() { + } + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + + for (err, _file_id) in errors { + match &err { + CompilationError::TypeError(TypeCheckError::TraitMethodParameterTypeMismatch { + method_name, + expected_typ, + actual_typ, + .. + }) => { + assert_eq!(method_name, "default"); + assert_eq!(expected_typ, "Field"); + assert_eq!(actual_typ, "u32"); } - } - - fn main() { - }"; - let compilation_errors = get_program_errors(src); - assert!(!has_parser_error(&compilation_errors)); - assert!( - compilation_errors.len() == 1, - "Expected 1 compilation error, got: {:?}", - compilation_errors - ); - - for (err, _file_id) in compilation_errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::MethodNotInTrait { - trait_name, - impl_method, - }) => { - assert_eq!(trait_name, "Default"); - assert_eq!(impl_method, "does_not_exist"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_wrong_parameter() { - let src = " - trait Default { - fn default(x: Field) -> Self; - } - - struct Foo { - bar: u32, +#[test] +fn check_trait_wrong_parameter2() { + let src = " + trait Default { + fn default(x: Field, y: Field) -> Self; + } + + struct Foo { + bar: Field, + array: [Field; 2], + } + + impl Default for Foo { + fn default(x: Field, y: Foo) -> Self { + Self { bar: x, array: [x, y.bar] } } - - impl Default for Foo { - fn default(x: u32) -> Self { - Foo {bar: x} + } + + fn main() { + }"; + + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + + for (err, _file_id) in errors { + match &err { + CompilationError::TypeError(TypeCheckError::TraitMethodParameterTypeMismatch { + method_name, + expected_typ, + actual_typ, + .. + }) => { + assert_eq!(method_name, "default"); + assert_eq!(expected_typ, "Field"); + assert_eq!(actual_typ, "Foo"); } - } - - fn main() { - } - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::TypeError(TypeCheckError::TraitMethodParameterTypeMismatch { - method_name, - expected_typ, - actual_typ, - .. - }) => { - assert_eq!(method_name, "default"); - assert_eq!(expected_typ, "Field"); - assert_eq!(actual_typ, "u32"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_wrong_parameter2() { - let src = " - trait Default { - fn default(x: Field, y: Field) -> Self; - } - - struct Foo { - bar: Field, - array: [Field; 2], - } - - impl Default for Foo { - fn default(x: Field, y: Foo) -> Self { - Self { bar: x, array: [x, y.bar] } +#[test] +fn check_trait_wrong_parameter_type() { + let src = " + trait Default { + fn default(x: Field, y: NotAType) -> Field; + } + + fn main(x: Field, y: Field) { + assert(y == x); + }"; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 2, "Expected 2 errors, got: {:?}", errors); + + for (err, _file_id) in errors { + match &err { + CompilationError::ResolverError(ResolverError::PathResolutionError( + PathResolutionError::Unresolved(ident), + )) => { + assert_eq!(ident, "NotAType"); } - } - - fn main() { - }"; - - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::TypeError(TypeCheckError::TraitMethodParameterTypeMismatch { - method_name, - expected_typ, - actual_typ, - .. - }) => { - assert_eq!(method_name, "default"); - assert_eq!(expected_typ, "Field"); - assert_eq!(actual_typ, "Foo"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_wrong_parameter_type() { - let src = " - trait Default { - fn default(x: Field, y: NotAType) -> Field; - } - - fn main(x: Field, y: Field) { - assert(y == x); - }"; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 2, "Expected 2 errors, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::ResolverError(ResolverError::PathResolutionError( - PathResolutionError::Unresolved(ident), - )) => { - assert_eq!(ident, "NotAType"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } +#[test] +fn check_trait_wrong_parameters_count() { + let src = " + trait Default { + fn default(x: Field, y: Field) -> Self; } - - #[test] - fn check_trait_wrong_parameters_count() { - let src = " - trait Default { - fn default(x: Field, y: Field) -> Self; - } - - struct Foo { - bar: Field, - array: [Field; 2], + + struct Foo { + bar: Field, + array: [Field; 2], + } + + impl Default for Foo { + fn default(x: Field) -> Self { + Self { bar: x, array: [x, x] } } - - impl Default for Foo { - fn default(x: Field) -> Self { - Self { bar: x, array: [x, x] } + } + + fn main() { + } + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + for (err, _file_id) in errors { + match &err { + CompilationError::TypeError(TypeCheckError::MismatchTraitImplNumParameters { + actual_num_parameters, + expected_num_parameters, + trait_name, + method_name, + .. + }) => { + assert_eq!(actual_num_parameters, &1_usize); + assert_eq!(expected_num_parameters, &2_usize); + assert_eq!(method_name, "default"); + assert_eq!(trait_name, "Default"); } - } - - fn main() { - } - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - for (err, _file_id) in errors { - match &err { - CompilationError::TypeError(TypeCheckError::MismatchTraitImplNumParameters { - actual_num_parameters, - expected_num_parameters, - trait_name, - method_name, - .. - }) => { - assert_eq!(actual_num_parameters, &1_usize); - assert_eq!(expected_num_parameters, &2_usize); - assert_eq!(method_name, "default"); - assert_eq!(trait_name, "Default"); - } - _ => { - panic!("No other errors are expected in this test case! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected in this test case! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_impl_for_non_type() { - let src = " - trait Default { - fn default(x: Field, y: Field) -> Field; - } - - impl Default for main { - fn default(x: Field, y: Field) -> Field { - x + y - } - } +#[test] +fn check_trait_impl_for_non_type() { + let src = " + trait Default { + fn default(x: Field, y: Field) -> Field; + } - fn main() {} - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - for (err, _file_id) in errors { - match &err { - CompilationError::ResolverError(ResolverError::Expected { - expected, got, .. - }) => { - assert_eq!(expected, "type"); - assert_eq!(got, "function"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; + impl Default for main { + fn default(x: Field, y: Field) -> Field { + x + y } } - #[test] - fn check_impl_struct_not_trait() { - let src = " - struct Foo { - bar: Field, - array: [Field; 2], - } - - struct Default { - x: Field, - z: Field, - } - - // Default is struct not a trait - impl Default for Foo { - fn default(x: Field, y: Field) -> Self { - Self { bar: x, array: [x,y] } + fn main() {} + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + for (err, _file_id) in errors { + match &err { + CompilationError::ResolverError(ResolverError::Expected { expected, got, .. }) => { + assert_eq!(expected, "type"); + assert_eq!(got, "function"); } - } - - fn main() { - } - - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - for (err, _file_id) in errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::NotATrait { - not_a_trait_name, - }) => { - assert_eq!(not_a_trait_name.to_string(), "plain::Default"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_duplicate_declaration() { - let src = " - trait Default { - fn default(x: Field, y: Field) -> Self; - } - - struct Foo { - bar: Field, - array: [Field; 2], +#[test] +fn check_impl_struct_not_trait() { + let src = " + struct Foo { + bar: Field, + array: [Field; 2], + } + + struct Default { + x: Field, + z: Field, + } + + // Default is struct not a trait + impl Default for Foo { + fn default(x: Field, y: Field) -> Self { + Self { bar: x, array: [x,y] } } - - impl Default for Foo { - fn default(x: Field,y: Field) -> Self { - Self { bar: x, array: [x,y] } + } + + fn main() { + } + + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + for (err, _file_id) in errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::NotATrait { + not_a_trait_name, + }) => { + assert_eq!(not_a_trait_name.to_string(), "plain::Default"); } - } - - - trait Default { - fn default(x: Field) -> Self; - } - - fn main() { - }"; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - for (err, _file_id) in errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::Duplicate { - typ, - first_def, - second_def, - }) => { - assert_eq!(typ, &DuplicateType::Trait); - assert_eq!(first_def, "Default"); - assert_eq!(second_def, "Default"); - } - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; } +} - #[test] - fn check_trait_duplicate_implementation() { - let src = " - trait Default { - } - struct Foo { - bar: Field, - } - - impl Default for Foo { - } - impl Default for Foo { - } - fn main() { - } - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 2, "Expected 2 errors, got: {:?}", errors); - for (err, _file_id) in errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImpl { - .. - }) => (), - CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImplNote { - .. - }) => (), - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; - } +#[test] +fn check_trait_duplicate_declaration() { + let src = " + trait Default { + fn default(x: Field, y: Field) -> Self; } - - #[test] - fn check_trait_duplicate_implementation_with_alias() { - let src = " - trait Default { - } - - struct MyStruct { - } - - type MyType = MyStruct; - - impl Default for MyStruct { - } - - impl Default for MyType { - } - - fn main() { - } - "; - let errors = get_program_errors(src); - assert!(!has_parser_error(&errors)); - assert!(errors.len() == 2, "Expected 2 errors, got: {:?}", errors); - for (err, _file_id) in errors { - match &err { - CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImpl { - .. - }) => (), - CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImplNote { - .. - }) => (), - _ => { - panic!("No other errors are expected! Found = {:?}", err); - } - }; + + struct Foo { + bar: Field, + array: [Field; 2], + } + + impl Default for Foo { + fn default(x: Field,y: Field) -> Self { + Self { bar: x, array: [x,y] } } } + + + trait Default { + fn default(x: Field) -> Self; + } + + fn main() { + }"; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + for (err, _file_id) in errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::Duplicate { + typ, + first_def, + second_def, + }) => { + assert_eq!(typ, &DuplicateType::Trait); + assert_eq!(first_def, "Default"); + assert_eq!(second_def, "Default"); + } + _ => { + panic!("No other errors are expected! Found = {:?}", err); + } + }; + } +} - #[test] - fn test_impl_self_within_default_def() { - let src = " - trait Bar { - fn ok(self) -> Self; - - fn ref_ok(self) -> Self { - self.ok() +#[test] +fn check_trait_duplicate_implementation() { + let src = " + trait Default { + } + struct Foo { + bar: Field, + } + + impl Default for Foo { + } + impl Default for Foo { + } + fn main() { + } + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 2, "Expected 2 errors, got: {:?}", errors); + for (err, _file_id) in errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImpl { + .. + }) => (), + CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImplNote { + .. + }) => (), + _ => { + panic!("No other errors are expected! Found = {:?}", err); } - } + }; + } +} - impl Bar for (T, T) where T: Bar { - fn ok(self) -> Self { - self +#[test] +fn check_trait_duplicate_implementation_with_alias() { + let src = " + trait Default { + } + + struct MyStruct { + } + + type MyType = MyStruct; + + impl Default for MyStruct { + } + + impl Default for MyType { + } + + fn main() { + } + "; + let errors = get_program_errors(src); + assert!(!has_parser_error(&errors)); + assert!(errors.len() == 2, "Expected 2 errors, got: {:?}", errors); + for (err, _file_id) in errors { + match &err { + CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImpl { + .. + }) => (), + CompilationError::DefinitionError(DefCollectorErrorKind::OverlappingImplNote { + .. + }) => (), + _ => { + panic!("No other errors are expected! Found = {:?}", err); } - }"; - let errors = get_program_errors(src); - errors.iter().for_each(|err| println!("{:?}", err)); - assert!(errors.is_empty()); + }; } +} - #[test] - fn check_trait_as_type_as_fn_parameter() { - let src = " - trait Eq { - fn eq(self, other: Self) -> bool; - } +#[test] +fn test_impl_self_within_default_def() { + let src = " + trait Bar { + fn ok(self) -> Self; - struct Foo { - a: u64, + fn ref_ok(self) -> Self { + self.ok() } + } - impl Eq for Foo { - fn eq(self, other: Foo) -> bool { self.a == other.a } + impl Bar for (T, T) where T: Bar { + fn ok(self) -> Self { + self } + }"; + let errors = get_program_errors(src); + errors.iter().for_each(|err| println!("{:?}", err)); + assert!(errors.is_empty()); +} - fn test_eq(x: impl Eq) -> bool { - x.eq(x) - } +#[test] +fn check_trait_as_type_as_fn_parameter() { + let src = " + trait Eq { + fn eq(self, other: Self) -> bool; + } - fn main(a: Foo) -> pub bool { - test_eq(a) - }"; + struct Foo { + a: u64, + } - let errors = get_program_errors(src); - errors.iter().for_each(|err| println!("{:?}", err)); - assert!(errors.is_empty()); + impl Eq for Foo { + fn eq(self, other: Foo) -> bool { self.a == other.a } } - #[test] - fn check_trait_as_type_as_two_fn_parameters() { - let src = " - trait Eq { - fn eq(self, other: Self) -> bool; - } + fn test_eq(x: impl Eq) -> bool { + x.eq(x) + } - trait Test { - fn test(self) -> bool; - } + fn main(a: Foo) -> pub bool { + test_eq(a) + }"; - struct Foo { - a: u64, - } + let errors = get_program_errors(src); + errors.iter().for_each(|err| println!("{:?}", err)); + assert!(errors.is_empty()); +} - impl Eq for Foo { - fn eq(self, other: Foo) -> bool { self.a == other.a } - } +#[test] +fn check_trait_as_type_as_two_fn_parameters() { + let src = " + trait Eq { + fn eq(self, other: Self) -> bool; + } - impl Test for u64 { - fn test(self) -> bool { self == self } - } + trait Test { + fn test(self) -> bool; + } - fn test_eq(x: impl Eq, y: impl Test) -> bool { - x.eq(x) == y.test() - } + struct Foo { + a: u64, + } - fn main(a: Foo, b: u64) -> pub bool { - test_eq(a, b) - }"; - - let errors = get_program_errors(src); - errors.iter().for_each(|err| println!("{:?}", err)); - assert!(errors.is_empty()); - } - - fn get_program_captures(src: &str) -> Vec> { - let (program, context, _errors) = get_program(src); - let interner = context.def_interner; - let mut all_captures: Vec> = Vec::new(); - for func in program.into_sorted().functions { - let func_id = interner.find_function(func.name()).unwrap(); - let hir_func = interner.function(&func_id); - // Iterate over function statements and apply filtering function - find_lambda_captures( - hir_func.block(&interner).statements(), - &interner, - &mut all_captures, - ); - } - all_captures - } - - fn find_lambda_captures( - stmts: &[StmtId], - interner: &NodeInterner, - result: &mut Vec>, - ) { - for stmt_id in stmts.iter() { - let hir_stmt = interner.statement(stmt_id); - let expr_id = match hir_stmt { - HirStatement::Expression(expr_id) => expr_id, - HirStatement::Let(let_stmt) => let_stmt.expression, - HirStatement::Assign(assign_stmt) => assign_stmt.expression, - HirStatement::Constrain(constr_stmt) => constr_stmt.0, - HirStatement::Semi(semi_expr) => semi_expr, - HirStatement::For(for_loop) => for_loop.block, - HirStatement::Error => panic!("Invalid HirStatement!"), - HirStatement::Break => panic!("Unexpected break"), - HirStatement::Continue => panic!("Unexpected continue"), - HirStatement::Comptime(_) => panic!("Unexpected comptime"), - }; - let expr = interner.expression(&expr_id); + impl Eq for Foo { + fn eq(self, other: Foo) -> bool { self.a == other.a } + } - get_lambda_captures(expr, interner, result); // TODO: dyn filter function as parameter - } + impl Test for u64 { + fn test(self) -> bool { self == self } } - fn get_lambda_captures( - expr: HirExpression, - interner: &NodeInterner, - result: &mut Vec>, - ) { - if let HirExpression::Lambda(lambda_expr) = expr { - let mut cur_capture = Vec::new(); + fn test_eq(x: impl Eq, y: impl Test) -> bool { + x.eq(x) == y.test() + } - for capture in lambda_expr.captures.iter() { - cur_capture.push(interner.definition(capture.ident.id).name.clone()); - } - result.push(cur_capture); + fn main(a: Foo, b: u64) -> pub bool { + test_eq(a, b) + }"; - // Check for other captures recursively within the lambda body - let hir_body_expr = interner.expression(&lambda_expr.body); - if let HirExpression::Block(block_expr) = hir_body_expr { - find_lambda_captures(block_expr.statements(), interner, result); - } - } + let errors = get_program_errors(src); + errors.iter().for_each(|err| println!("{:?}", err)); + assert!(errors.is_empty()); +} + +fn get_program_captures(src: &str) -> Vec> { + let (program, context, _errors) = get_program(src); + let interner = context.def_interner; + let mut all_captures: Vec> = Vec::new(); + for func in program.into_sorted().functions { + let func_id = interner.find_function(func.name()).unwrap(); + let hir_func = interner.function(&func_id); + // Iterate over function statements and apply filtering function + find_lambda_captures(hir_func.block(&interner).statements(), &interner, &mut all_captures); } + all_captures +} - #[test] - fn resolve_empty_function() { - let src = " - fn main() { +fn find_lambda_captures(stmts: &[StmtId], interner: &NodeInterner, result: &mut Vec>) { + for stmt_id in stmts.iter() { + let hir_stmt = interner.statement(stmt_id); + let expr_id = match hir_stmt { + HirStatement::Expression(expr_id) => expr_id, + HirStatement::Let(let_stmt) => let_stmt.expression, + HirStatement::Assign(assign_stmt) => assign_stmt.expression, + HirStatement::Constrain(constr_stmt) => constr_stmt.0, + HirStatement::Semi(semi_expr) => semi_expr, + HirStatement::For(for_loop) => for_loop.block, + HirStatement::Error => panic!("Invalid HirStatement!"), + HirStatement::Break => panic!("Unexpected break"), + HirStatement::Continue => panic!("Unexpected continue"), + HirStatement::Comptime(_) => panic!("Unexpected comptime"), + }; + let expr = interner.expression(&expr_id); - } - "; - assert!(get_program_errors(src).is_empty()); - } - #[test] - fn resolve_basic_function() { - let src = r#" - fn main(x : Field) { - let y = x + x; - assert(y == x); - } - "#; - assert!(get_program_errors(src).is_empty()); - } - #[test] - fn resolve_unused_var() { - let src = r#" - fn main(x : Field) { - let y = x + x; - assert(x == x); - } - "#; - - let errors = get_program_errors(src); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - // It should be regarding the unused variable - match &errors[0].0 { - CompilationError::ResolverError(ResolverError::UnusedVariable { ident }) => { - assert_eq!(&ident.0.contents, "y"); - } - _ => unreachable!("we should only have an unused var error"), - } + get_lambda_captures(expr, interner, result); // TODO: dyn filter function as parameter } +} - #[test] - fn resolve_unresolved_var() { - let src = r#" - fn main(x : Field) { - let y = x + x; - assert(y == z); - } - "#; - let errors = get_program_errors(src); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - // It should be regarding the unresolved var `z` (Maybe change to undeclared and special case) - match &errors[0].0 { - CompilationError::ResolverError(ResolverError::VariableNotDeclared { - name, - span: _, - }) => assert_eq!(name, "z"), - _ => unimplemented!("we should only have an unresolved variable"), +fn get_lambda_captures( + expr: HirExpression, + interner: &NodeInterner, + result: &mut Vec>, +) { + if let HirExpression::Lambda(lambda_expr) = expr { + let mut cur_capture = Vec::new(); + + for capture in lambda_expr.captures.iter() { + cur_capture.push(interner.definition(capture.ident.id).name.clone()); + } + result.push(cur_capture); + + // Check for other captures recursively within the lambda body + let hir_body_expr = interner.expression(&lambda_expr.body); + if let HirExpression::Block(block_expr) = hir_body_expr { + find_lambda_captures(block_expr.statements(), interner, result); } } +} + +#[test] +fn resolve_empty_function() { + let src = " + fn main() { - #[test] - fn unresolved_path() { - let src = " - fn main(x : Field) { - let _z = some::path::to::a::func(x); - } - "; - let errors = get_program_errors(src); - assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); - for (compilation_error, _file_id) in errors { - match compilation_error { - CompilationError::ResolverError(err) => { - match err { - ResolverError::PathResolutionError(PathResolutionError::Unresolved( - name, - )) => { - assert_eq!(name.to_string(), "some"); - } - _ => unimplemented!("we should only have an unresolved function"), - }; - } - _ => unimplemented!(), - } } + "; + assert!(get_program_errors(src).is_empty()); +} +#[test] +fn resolve_basic_function() { + let src = r#" + fn main(x : Field) { + let y = x + x; + assert(y == x); + } + "#; + assert!(get_program_errors(src).is_empty()); +} +#[test] +fn resolve_unused_var() { + let src = r#" + fn main(x : Field) { + let y = x + x; + assert(x == x); + } + "#; + + let errors = get_program_errors(src); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + // It should be regarding the unused variable + match &errors[0].0 { + CompilationError::ResolverError(ResolverError::UnusedVariable { ident }) => { + assert_eq!(&ident.0.contents, "y"); + } + _ => unreachable!("we should only have an unused var error"), } +} - #[test] - fn resolve_literal_expr() { - let src = r#" - fn main(x : Field) { - let y = 5; - assert(y == x); - } - "#; - assert!(get_program_errors(src).is_empty()); +#[test] +fn resolve_unresolved_var() { + let src = r#" + fn main(x : Field) { + let y = x + x; + assert(y == z); + } + "#; + let errors = get_program_errors(src); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + // It should be regarding the unresolved var `z` (Maybe change to undeclared and special case) + match &errors[0].0 { + CompilationError::ResolverError(ResolverError::VariableNotDeclared { name, span: _ }) => { + assert_eq!(name, "z"); + } + _ => unimplemented!("we should only have an unresolved variable"), } +} - #[test] - fn multiple_resolution_errors() { - let src = r#" - fn main(x : Field) { - let y = foo::bar(x); - let z = y + a; - } - "#; - - let errors = get_program_errors(src); - assert!(errors.len() == 3, "Expected 3 errors, got: {:?}", errors); - - // Errors are: - // `a` is undeclared - // `z` is unused - // `foo::bar` does not exist - for (compilation_error, _file_id) in errors { - match compilation_error { - CompilationError::ResolverError(err) => { - match err { - ResolverError::UnusedVariable { ident } => { - assert_eq!(&ident.0.contents, "z"); - } - ResolverError::VariableNotDeclared { name, .. } => { - assert_eq!(name, "a"); - } - ResolverError::PathResolutionError(PathResolutionError::Unresolved( - name, - )) => { - assert_eq!(name.to_string(), "foo"); - } - _ => unimplemented!(), - }; - } - _ => unimplemented!(), +#[test] +fn unresolved_path() { + let src = " + fn main(x : Field) { + let _z = some::path::to::a::func(x); + } + "; + let errors = get_program_errors(src); + assert!(errors.len() == 1, "Expected 1 error, got: {:?}", errors); + for (compilation_error, _file_id) in errors { + match compilation_error { + CompilationError::ResolverError(err) => { + match err { + ResolverError::PathResolutionError(PathResolutionError::Unresolved(name)) => { + assert_eq!(name.to_string(), "some"); + } + _ => unimplemented!("we should only have an unresolved function"), + }; } + _ => unimplemented!(), } } +} - #[test] - fn resolve_prefix_expr() { - let src = r#" - fn main(x : Field) { - let _y = -x; - } - "#; - assert!(get_program_errors(src).is_empty()); - } +#[test] +fn resolve_literal_expr() { + let src = r#" + fn main(x : Field) { + let y = 5; + assert(y == x); + } + "#; + assert!(get_program_errors(src).is_empty()); +} - #[test] - fn resolve_for_expr() { - let src = r#" - fn main(x : u64) { - for i in 1..20 { - let _z = x + i; +#[test] +fn multiple_resolution_errors() { + let src = r#" + fn main(x : Field) { + let y = foo::bar(x); + let z = y + a; + } + "#; + + let errors = get_program_errors(src); + assert!(errors.len() == 3, "Expected 3 errors, got: {:?}", errors); + + // Errors are: + // `a` is undeclared + // `z` is unused + // `foo::bar` does not exist + for (compilation_error, _file_id) in errors { + match compilation_error { + CompilationError::ResolverError(err) => { + match err { + ResolverError::UnusedVariable { ident } => { + assert_eq!(&ident.0.contents, "z"); + } + ResolverError::VariableNotDeclared { name, .. } => { + assert_eq!(name, "a"); + } + ResolverError::PathResolutionError(PathResolutionError::Unresolved(name)) => { + assert_eq!(name.to_string(), "foo"); + } + _ => unimplemented!(), }; } - "#; - assert!(get_program_errors(src).is_empty()); + _ => unimplemented!(), + } } +} - #[test] - fn resolve_call_expr() { - let src = r#" - fn main(x : Field) { - let _z = foo(x); - } +#[test] +fn resolve_prefix_expr() { + let src = r#" + fn main(x : Field) { + let _y = -x; + } + "#; + assert!(get_program_errors(src).is_empty()); +} - fn foo(x : Field) -> Field { - x - } - "#; - assert!(get_program_errors(src).is_empty()); - } - - #[test] - fn resolve_shadowing() { - let src = r#" - fn main(x : Field) { - let x = foo(x); - let x = x; - let (x, x) = (x, x); - let _ = x; - } +#[test] +fn resolve_for_expr() { + let src = r#" + fn main(x : u64) { + for i in 1..20 { + let _z = x + i; + }; + } + "#; + assert!(get_program_errors(src).is_empty()); +} - fn foo(x : Field) -> Field { - x - } - "#; - assert!(get_program_errors(src).is_empty()); - } +#[test] +fn resolve_call_expr() { + let src = r#" + fn main(x : Field) { + let _z = foo(x); + } - #[test] - fn resolve_basic_closure() { - let src = r#" - fn main(x : Field) -> pub Field { - let closure = |y| y + x; - closure(x) - } - "#; - assert!(get_program_errors(src).is_empty()); - } + fn foo(x : Field) -> Field { + x + } + "#; + assert!(get_program_errors(src).is_empty()); +} - #[test] - fn resolve_simplified_closure() { - // based on bug https://github.com/noir-lang/noir/issues/1088 +#[test] +fn resolve_shadowing() { + let src = r#" + fn main(x : Field) { + let x = foo(x); + let x = x; + let (x, x) = (x, x); + let _ = x; + } - let src = r#"fn do_closure(x: Field) -> Field { - let y = x; - let ret_capture = || { - y - }; - ret_capture() - } - - fn main(x: Field) { - assert(do_closure(x) == 100); - } - - "#; - let parsed_captures = get_program_captures(src); - let expected_captures = vec![vec!["y".to_string()]]; - assert_eq!(expected_captures, parsed_captures); - } - - #[test] - fn resolve_complex_closures() { - let src = r#" - fn main(x: Field) -> pub Field { - let closure_without_captures = |x: Field| -> Field { x + x }; - let a = closure_without_captures(1); - - let closure_capturing_a_param = |y: Field| -> Field { y + x }; - let b = closure_capturing_a_param(2); - - let closure_capturing_a_local_var = |y: Field| -> Field { y + b }; - let c = closure_capturing_a_local_var(3); - - let closure_with_transitive_captures = |y: Field| -> Field { - let d = 5; - let nested_closure = |z: Field| -> Field { - let doubly_nested_closure = |w: Field| -> Field { w + x + b }; - a + z + y + d + x + doubly_nested_closure(4) + x + y - }; - let res = nested_closure(5); - res + fn foo(x : Field) -> Field { + x + } + "#; + assert!(get_program_errors(src).is_empty()); +} + +#[test] +fn resolve_basic_closure() { + let src = r#" + fn main(x : Field) -> pub Field { + let closure = |y| y + x; + closure(x) + } + "#; + assert!(get_program_errors(src).is_empty()); +} + +#[test] +fn resolve_simplified_closure() { + // based on bug https://github.com/noir-lang/noir/issues/1088 + + let src = r#"fn do_closure(x: Field) -> Field { + let y = x; + let ret_capture = || { + y + }; + ret_capture() + } + + fn main(x: Field) { + assert(do_closure(x) == 100); + } + + "#; + let parsed_captures = get_program_captures(src); + let expected_captures = vec![vec!["y".to_string()]]; + assert_eq!(expected_captures, parsed_captures); +} + +#[test] +fn resolve_complex_closures() { + let src = r#" + fn main(x: Field) -> pub Field { + let closure_without_captures = |x: Field| -> Field { x + x }; + let a = closure_without_captures(1); + + let closure_capturing_a_param = |y: Field| -> Field { y + x }; + let b = closure_capturing_a_param(2); + + let closure_capturing_a_local_var = |y: Field| -> Field { y + b }; + let c = closure_capturing_a_local_var(3); + + let closure_with_transitive_captures = |y: Field| -> Field { + let d = 5; + let nested_closure = |z: Field| -> Field { + let doubly_nested_closure = |w: Field| -> Field { w + x + b }; + a + z + y + d + x + doubly_nested_closure(4) + x + y }; + let res = nested_closure(5); + res + }; + + a + b + c + closure_with_transitive_captures(6) + } + "#; + assert!(get_program_errors(src).is_empty(), "there should be no errors"); + + let expected_captures = vec![ + vec![], + vec!["x".to_string()], + vec!["b".to_string()], + vec!["x".to_string(), "b".to_string(), "a".to_string()], + vec!["x".to_string(), "b".to_string(), "a".to_string(), "y".to_string(), "d".to_string()], + vec!["x".to_string(), "b".to_string()], + ]; + + let parsed_captures = get_program_captures(src); + + assert_eq!(expected_captures, parsed_captures); +} + +#[test] +fn resolve_fmt_strings() { + let src = r#" + fn main() { + let string = f"this is i: {i}"; + println(string); + + println(f"I want to print {0}"); + + let new_val = 10; + println(f"random_string{new_val}{new_val}"); + } + fn println(x : T) -> T { + x + } + "#; + + let errors = get_program_errors(src); + assert!(errors.len() == 5, "Expected 5 errors, got: {:?}", errors); - a + b + c + closure_with_transitive_captures(6) + for (err, _file_id) in errors { + match &err { + CompilationError::ResolverError(ResolverError::VariableNotDeclared { + name, .. + }) => { + assert_eq!(name, "i"); } - "#; - assert!(get_program_errors(src).is_empty(), "there should be no errors"); - - let expected_captures = vec![ - vec![], - vec!["x".to_string()], - vec!["b".to_string()], - vec!["x".to_string(), "b".to_string(), "a".to_string()], - vec![ - "x".to_string(), - "b".to_string(), - "a".to_string(), - "y".to_string(), - "d".to_string(), - ], - vec!["x".to_string(), "b".to_string()], - ]; - - let parsed_captures = get_program_captures(src); - - assert_eq!(expected_captures, parsed_captures); - } - - #[test] - fn resolve_fmt_strings() { - let src = r#" - fn main() { - let string = f"this is i: {i}"; - println(string); - - println(f"I want to print {0}"); - - let new_val = 10; - println(f"random_string{new_val}{new_val}"); + CompilationError::ResolverError(ResolverError::NumericConstantInFormatString { + name, + .. + }) => { + assert_eq!(name, "0"); } - fn println(x : T) -> T { - x + CompilationError::TypeError(TypeCheckError::UnusedResultError { + expr_type: _, + expr_span, + }) => { + let a = src.get(expr_span.start() as usize..expr_span.end() as usize).unwrap(); + assert!( + a == "println(string)" + || a == "println(f\"I want to print {0}\")" + || a == "println(f\"random_string{new_val}{new_val}\")" + ); } - "#; - - let errors = get_program_errors(src); - assert!(errors.len() == 5, "Expected 5 errors, got: {:?}", errors); - - for (err, _file_id) in errors { - match &err { - CompilationError::ResolverError(ResolverError::VariableNotDeclared { - name, - .. - }) => { - assert_eq!(name, "i"); - } - CompilationError::ResolverError(ResolverError::NumericConstantInFormatString { - name, - .. - }) => { - assert_eq!(name, "0"); - } - CompilationError::TypeError(TypeCheckError::UnusedResultError { - expr_type: _, - expr_span, - }) => { - let a = src.get(expr_span.start() as usize..expr_span.end() as usize).unwrap(); - assert!( - a == "println(string)" - || a == "println(f\"I want to print {0}\")" - || a == "println(f\"random_string{new_val}{new_val}\")" - ); - } - _ => unimplemented!(), - }; - } + _ => unimplemented!(), + }; } +} - fn check_rewrite(src: &str, expected: &str) { - let (_program, mut context, _errors) = get_program(src); - let main_func_id = context.def_interner.find_function("main").unwrap(); - let program = monomorphize(main_func_id, &mut context.def_interner).unwrap(); - assert!(format!("{}", program) == expected); - } +fn check_rewrite(src: &str, expected: &str) { + let (_program, mut context, _errors) = get_program(src); + let main_func_id = context.def_interner.find_function("main").unwrap(); + let program = monomorphize(main_func_id, &mut context.def_interner).unwrap(); + assert!(format!("{}", program) == expected); +} - #[test] - fn simple_closure_with_no_captured_variables() { - let src = r#" - fn main() -> pub Field { - let x = 1; - let closure = || x; - closure() - } - "#; +#[test] +fn simple_closure_with_no_captured_variables() { + let src = r#" + fn main() -> pub Field { + let x = 1; + let closure = || x; + closure() + } + "#; - let expected_rewrite = r#"fn main$f0() -> Field { + let expected_rewrite = r#"fn main$f0() -> Field { let x$0 = 1; let closure$3 = { let closure_variable$2 = { @@ -1248,167 +1227,154 @@ fn lambda$f1(mut env$l1: (Field)) -> Field { env$l1.0 } "#; - check_rewrite(src, expected_rewrite); - } - - #[test] - fn deny_mutually_recursive_structs() { - let src = r#" - struct Foo { bar: Bar } - struct Bar { foo: Foo } - fn main() {} - "#; - assert_eq!(get_program_errors(src).len(), 1); - } - - #[test] - fn deny_cyclic_globals() { - let src = r#" - global A = B; - global B = A; - fn main() {} - "#; - assert_eq!(get_program_errors(src).len(), 1); - } - - #[test] - fn deny_cyclic_type_aliases() { - let src = r#" - type A = B; - type B = A; - fn main() {} - "#; - assert_eq!(get_program_errors(src).len(), 1); - } - - #[test] - fn ensure_nested_type_aliases_type_check() { - let src = r#" - type A = B; - type B = u8; - fn main() { - let _a: A = 0 as u16; - } - "#; - assert_eq!(get_program_errors(src).len(), 1); - } - - #[test] - fn type_aliases_in_entry_point() { - let src = r#" - type Foo = u8; - fn main(_x: Foo) {} - "#; - assert_eq!(get_program_errors(src).len(), 0); - } - - #[test] - fn operators_in_global_used_in_type() { - let src = r#" - global ONE = 1; - global COUNT = ONE + 2; - fn main() { - let _array: [Field; COUNT] = [1, 2, 3]; - } - "#; - assert_eq!(get_program_errors(src).len(), 0); - } + check_rewrite(src, expected_rewrite); +} - #[test] - fn break_and_continue_in_constrained_fn() { - let src = r#" - fn main() { - for i in 0 .. 10 { - if i == 2 { - continue; - } - if i == 5 { - break; - } +#[test] +fn deny_cyclic_globals() { + let src = r#" + global A = B; + global B = A; + fn main() {} + "#; + assert_eq!(get_program_errors(src).len(), 1); +} + +#[test] +fn deny_cyclic_type_aliases() { + let src = r#" + type A = B; + type B = A; + fn main() {} + "#; + assert_eq!(get_program_errors(src).len(), 1); +} + +#[test] +fn ensure_nested_type_aliases_type_check() { + let src = r#" + type A = B; + type B = u8; + fn main() { + let _a: A = 0 as u16; + } + "#; + assert_eq!(get_program_errors(src).len(), 1); +} + +#[test] +fn type_aliases_in_entry_point() { + let src = r#" + type Foo = u8; + fn main(_x: Foo) {} + "#; + assert_eq!(get_program_errors(src).len(), 0); +} + +#[test] +fn operators_in_global_used_in_type() { + let src = r#" + global ONE = 1; + global COUNT = ONE + 2; + fn main() { + let _array: [Field; COUNT] = [1, 2, 3]; + } + "#; + assert_eq!(get_program_errors(src).len(), 0); +} + +#[test] +fn break_and_continue_in_constrained_fn() { + let src = r#" + fn main() { + for i in 0 .. 10 { + if i == 2 { + continue; + } + if i == 5 { + break; } } - "#; - assert_eq!(get_program_errors(src).len(), 2); - } + } + "#; + assert_eq!(get_program_errors(src).len(), 2); +} - #[test] - fn break_and_continue_outside_loop() { - let src = r#" - unconstrained fn main() { - continue; - break; - } - "#; - assert_eq!(get_program_errors(src).len(), 2); - } +#[test] +fn break_and_continue_outside_loop() { + let src = r#" + unconstrained fn main() { + continue; + break; + } + "#; + assert_eq!(get_program_errors(src).len(), 2); +} - // Regression for #2540 - #[test] - fn for_loop_over_array() { - let src = r#" - fn hello(_array: [u1; N]) { - for _ in 0..N {} - } +// Regression for #2540 +#[test] +fn for_loop_over_array() { + let src = r#" + fn hello(_array: [u1; N]) { + for _ in 0..N {} + } - fn main() { - let array: [u1; 2] = [0, 1]; - hello(array); - } - "#; - assert_eq!(get_program_errors(src).len(), 0); - } - - // Regression for #4545 - #[test] - fn type_aliases_in_main() { - let src = r#" - type Outer = [u8; N]; - fn main(_arg: Outer<1>) {} - "#; - assert_eq!(get_program_errors(src).len(), 0); - } - - #[test] - fn ban_mutable_globals() { - // Mutable globals are only allowed in a comptime context - let src = r#" - mut global FOO: Field = 0; - fn main() {} - "#; - assert_eq!(get_program_errors(src).len(), 1); - } - - #[test] - fn deny_inline_attribute_on_unconstrained() { - let src = r#" - #[no_predicates] - unconstrained fn foo(x: Field, y: Field) { - assert(x != y); - } - "#; - let errors = get_program_errors(src); - assert_eq!(errors.len(), 1); - assert!(matches!( - errors[0].0, - CompilationError::ResolverError( - ResolverError::NoPredicatesAttributeOnUnconstrained { .. } - ) - )); - } + fn main() { + let array: [u1; 2] = [0, 1]; + hello(array); + } + "#; + assert_eq!(get_program_errors(src).len(), 0); +} - #[test] - fn deny_fold_attribute_on_unconstrained() { - let src = r#" - #[fold] - unconstrained fn foo(x: Field, y: Field) { - assert(x != y); - } - "#; - let errors = get_program_errors(src); - assert_eq!(errors.len(), 1); - assert!(matches!( - errors[0].0, - CompilationError::ResolverError(ResolverError::FoldAttributeOnUnconstrained { .. }) - )); - } +// Regression for #4545 +#[test] +fn type_aliases_in_main() { + let src = r#" + type Outer = [u8; N]; + fn main(_arg: Outer<1>) {} + "#; + assert_eq!(get_program_errors(src).len(), 0); +} + +#[test] +fn ban_mutable_globals() { + // Mutable globals are only allowed in a comptime context + let src = r#" + mut global FOO: Field = 0; + fn main() {} + "#; + assert_eq!(get_program_errors(src).len(), 1); +} + +#[test] +fn deny_inline_attribute_on_unconstrained() { + let src = r#" + #[no_predicates] + unconstrained fn foo(x: Field, y: Field) { + assert(x != y); + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + errors[0].0, + CompilationError::ResolverError(ResolverError::NoPredicatesAttributeOnUnconstrained { .. }) + )); +} + +#[test] +fn deny_fold_attribute_on_unconstrained() { + let src = r#" + #[fold] + unconstrained fn foo(x: Field, y: Field) { + assert(x != y); + } + "#; + let errors = get_program_errors(src); + assert_eq!(errors.len(), 1); + assert!(matches!( + errors[0].0, + CompilationError::ResolverError(ResolverError::FoldAttributeOnUnconstrained { .. }) + )); } diff --git a/compiler/noirc_frontend/src/tests/name_shadowing.rs b/compiler/noirc_frontend/src/tests/name_shadowing.rs new file mode 100644 index 0000000000..b0d8351003 --- /dev/null +++ b/compiler/noirc_frontend/src/tests/name_shadowing.rs @@ -0,0 +1,419 @@ +#![cfg(test)] +use super::get_program_errors; +use std::collections::HashSet; + +#[test] +fn test_name_shadowing() { + let src = " + trait Default { + fn default() -> Self; + } + + impl Default for bool { + fn default() -> bool { + false + } + } + + impl Default for Field { + fn default() -> Field { + 0 + } + } + + impl Default for [T; N] where T: Default { + fn default() -> [T; N] { + [Default::default(); N] + } + } + + impl Default for (T, U) where T: Default, U: Default { + fn default() -> (T, U) { + (Default::default(), Default::default()) + } + } + + fn drop_var(_x: T, y: U) -> U { y } + + mod local_module { + use crate::{Default, drop_var}; + + global LOCAL_GLOBAL_N: Field = 0; + + global LOCAL_GLOBAL_M: Field = 1; + + struct LocalStruct { + field1: A, + field2: B, + field3: [A; N], + field4: ([A; N], [B; M]), + field5: &mut A, + } + + impl Default for LocalStruct where A: Default, B: Default { + fn default() -> Self { + let mut mut_field = &mut Default::default(); + Self { + field1: Default::default(), + field2: Default::default(), + field3: Default::default(), + field4: Default::default(), + field5: mut_field, + } + } + } + + trait DefinedInLocalModule1 { + fn trait_fn1(self, x: A); + fn trait_fn2(self, y: B); + fn trait_fn3(&mut self, x: A, y: B); + fn trait_fn4(self, x: [A; 0], y: [B]); + fn trait_fn5(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn trait_fn6(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn trait_fn7(self, _x: fn([A; 0]) -> B) -> Field { + drop_var(self, N + M) + } + } + + impl DefinedInLocalModule1 for LocalStruct { + fn trait_fn1(self, _x: A) { drop_var(self, ()) } + fn trait_fn2(self, _y: B) { drop_var(self, ()) } + fn trait_fn3(&mut self, _x: A, _y: B) { drop_var(self, ()) } + fn trait_fn4(self, _x: [A; 0], _y: [B]) { drop_var(self, ()) } + fn trait_fn5(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, []) } + fn trait_fn6(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, []) } + } + + pub fn local_fn4(_x: (A, B), _y: [Field; N], _z: [Field; M]) -> [A; 0] { + assert(LOCAL_GLOBAL_N != LOCAL_GLOBAL_M); + let x: Field = 0; + assert(x == 0); + let x: Field = 1; + assert(x == 1); + [] + } + } + + mod library { + use crate::{Default, drop_var}; + + mod library2 { + use crate::{Default, drop_var}; + + global IMPORT_GLOBAL_N_2: Field = 4; + + global IMPORT_GLOBAL_M_2: Field = 5; + + // When we re-export this type from another library and then use it in + // main, we get a panic + struct ReExportMeFromAnotherLib1 { + x : Field, + } + + struct PubLibLocalStruct3 { + pub_field1: A, + pub_field2: B, + pub_field3: [A; N], + pub_field4: ([A; N], [B; M]), + pub_field5: &mut A, + } + + impl Default for PubLibLocalStruct3 where A: Default, B: Default { + fn default() -> Self { + let mut mut_field = &mut Default::default(); + Self { + pub_field1: Default::default(), + pub_field2: Default::default(), + pub_field3: Default::default(), + pub_field4: Default::default(), + pub_field5: mut_field, + } + } + } + + trait PubLibDefinedInLocalModule3 { + fn pub_trait_fn1(self, x: A); + fn pub_trait_fn2(self, y: B); + fn pub_trait_fn3(&mut self, x: A, y: B); + fn pub_trait_fn4(self, x: [A; 0], y: [B]); + fn pub_trait_fn5(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn pub_trait_fn6(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn pub_trait_fn7(self, _x: fn([A; 0]) -> B) -> Field { + drop_var(self, N + M) + } + } + + impl PubLibDefinedInLocalModule3 for PubLibLocalStruct3 { + fn pub_trait_fn1(self, _x: A) { drop_var(self, ()) } + fn pub_trait_fn2(self, _y: B) { drop_var(self, ()) } + fn pub_trait_fn3(&mut self, _x: A, _y: B) { drop_var(self, ()) } + fn pub_trait_fn4(self, _x: [A; 0], _y: [B]) { drop_var(self, ()) } + fn pub_trait_fn5(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, []) } + fn pub_trait_fn6(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, []) } + } + + pub fn PubLiblocal_fn3(_x: (A, B), _y: [Field; N], _z: [Field; M]) -> [A; 0] { + assert(IMPORT_GLOBAL_N_2 != IMPORT_GLOBAL_M_2); + [] + } + } + + // Re-export + use library2::ReExportMeFromAnotherLib1; + + global IMPORT_GLOBAL_N_1: Field = 2; + + global IMPORT_GLOBAL_M_1: Field = 3; + + struct LibLocalStruct1 { + lib_field1: A, + lib_field2: B, + lib_field3: [A; N], + lib_field4: ([A; N], [B; M]), + lib_field5: &mut A, + } + + impl Default for LibLocalStruct1 where A: Default, B: Default { + fn default() -> Self { + let mut mut_field = &mut Default::default(); + Self { + lib_field1: Default::default(), + lib_field2: Default::default(), + lib_field3: Default::default(), + lib_field4: Default::default(), + lib_field5: mut_field, + } + } + } + + trait LibDefinedInLocalModule1 { + fn lib_trait_fn1(self, x: A); + fn lib_trait_fn2(self, y: B); + fn lib_trait_fn3(&mut self, x: A, y: B); + fn lib_trait_fn4(self, x: [A; 0], y: [B]); + fn lib_trait_fn5(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn lib_trait_fn6(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn lib_trait_fn7(self, _x: fn([A; 0]) -> B) -> Field { + drop_var(self, N + M) + } + } + + impl LibDefinedInLocalModule1 for LibLocalStruct1 { + fn lib_trait_fn1(self, _x: A) { drop_var(self, ()) } + fn lib_trait_fn2(self, _y: B) { drop_var(self, ()) } + fn lib_trait_fn3(&mut self, _x: A, _y: B) { drop_var(self, ()) } + fn lib_trait_fn4(self, _x: [A; 0], _y: [B]) { drop_var(self, ()) } + fn lib_trait_fn5(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, []) } + fn lib_trait_fn6(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, []) } + } + + pub fn Liblocal_fn1(_x: (A, B), _y: [Field; N], _z: [Field; M]) -> [A; 0] { + assert(IMPORT_GLOBAL_N_1 != IMPORT_GLOBAL_M_1); + [] + } + } + + mod library3 { + use crate::{Default, drop_var}; + + global IMPORT_GLOBAL_N_3: Field = 6; + + global IMPORT_GLOBAL_M_3: Field = 7; + + struct ReExportMeFromAnotherLib2 { + x : Field, + } + + struct PubCrateLibLocalStruct2 { + crate_field1: A, + crate_field2: B, + crate_field3: [A; N], + crate_field4: ([A; N], [B; M]), + crate_field5: &mut A, + } + + impl Default for PubCrateLibLocalStruct2 where A: Default, B: Default { + fn default() -> Self { + let mut mut_field = &mut Default::default(); + Self { + crate_field1: Default::default(), + crate_field2: Default::default(), + crate_field3: Default::default(), + crate_field4: Default::default(), + crate_field5: mut_field, + } + } + } + + trait PubCrateLibDefinedInLocalModule2 { + fn crate_trait_fn1(self, x: A); + fn crate_trait_fn2(self, y: B); + fn crate_trait_fn3(&mut self, x: A, y: B); + fn crate_trait_fn4(self, x: [A; 0], y: [B]); + fn crate_trait_fn5(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn crate_trait_fn6(self, x: [A; N], y: [B; M]) -> [A; 0]; + fn crate_trait_fn7(self, _x: fn([A; 0]) -> B) -> Field { + drop_var(self, N + M) + } + } + + impl PubCrateLibDefinedInLocalModule2 for PubCrateLibLocalStruct2 { + fn crate_trait_fn1(self, _x: A) { drop_var(self, ()) } + fn crate_trait_fn2(self, _y: B) { drop_var(self, ()) } + fn crate_trait_fn3(&mut self, _x: A, _y: B) { drop_var(self, ()) } + fn crate_trait_fn4(self, _x: [A; 0], _y: [B]) { drop_var(self, ()) } + fn crate_trait_fn5(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, ()); [] } + fn crate_trait_fn6(self, _x: [A; N], _y: [B; M]) -> [A; 0] { drop_var(self, ()); [] } + } + + pub(crate) fn PubCrateLiblocal_fn2(_x: (A, B), _y: [Field; N], _z: [Field; M]) -> [A; 0] { + assert(IMPORT_GLOBAL_N_3 != IMPORT_GLOBAL_M_3); + [] + } + } + + + use crate::local_module::{local_fn4, LocalStruct, DefinedInLocalModule1, LOCAL_GLOBAL_N, LOCAL_GLOBAL_M}; + + use library::{ReExportMeFromAnotherLib1, LibLocalStruct1, LibDefinedInLocalModule1, Liblocal_fn1, IMPORT_GLOBAL_N_1, IMPORT_GLOBAL_M_1}; + + // overlapping + // use library::library2::ReExportMeFromAnotherLib1; + use crate::library::library2::{PubLibLocalStruct3, PubLibDefinedInLocalModule3, PubLiblocal_fn3, IMPORT_GLOBAL_N_2, IMPORT_GLOBAL_M_2}; + + use library3::{ReExportMeFromAnotherLib2, PubCrateLibLocalStruct2, PubCrateLibDefinedInLocalModule2, PubCrateLiblocal_fn2, IMPORT_GLOBAL_N_3, IMPORT_GLOBAL_M_3}; + + + fn main(_x: ReExportMeFromAnotherLib1, _y: ReExportMeFromAnotherLib2) { + assert(LOCAL_GLOBAL_N != LOCAL_GLOBAL_M); + assert(IMPORT_GLOBAL_N_1 != IMPORT_GLOBAL_M_1); + assert(IMPORT_GLOBAL_N_2 != IMPORT_GLOBAL_M_2); + assert(IMPORT_GLOBAL_N_3 != IMPORT_GLOBAL_M_3); + + let x: LocalStruct = Default::default(); + assert(drop_var(x.trait_fn5([0; LOCAL_GLOBAL_N], [false; LOCAL_GLOBAL_M]), true)); + assert(drop_var(x.trait_fn6([0; LOCAL_GLOBAL_N], [false; LOCAL_GLOBAL_M]), true)); + + let x: LibLocalStruct1 = Default::default(); + assert(drop_var(x.lib_trait_fn5([0; IMPORT_GLOBAL_N_1], [false; IMPORT_GLOBAL_M_1]), true)); + assert(drop_var(x.lib_trait_fn6([0; IMPORT_GLOBAL_N_1], [false; IMPORT_GLOBAL_M_1]), true)); + + let x: PubLibLocalStruct3 = Default::default(); + assert(drop_var(x.pub_trait_fn5([0; IMPORT_GLOBAL_N_2], [false; IMPORT_GLOBAL_M_2]), true)); + assert(drop_var(x.pub_trait_fn6([0; IMPORT_GLOBAL_N_2], [false; IMPORT_GLOBAL_M_2]), true)); + + let x: PubCrateLibLocalStruct2 = Default::default(); + assert(drop_var(x.crate_trait_fn5([0; IMPORT_GLOBAL_N_3], [false; IMPORT_GLOBAL_M_3]), true)); + assert(drop_var(x.crate_trait_fn6([0; IMPORT_GLOBAL_N_3], [false; IMPORT_GLOBAL_M_3]), true)); + + assert(drop_var(local_fn2((0, 1), [], []), true)); + assert(drop_var(Liblocal_fn1((0, 1), [], []), true)); + assert(drop_var(PubLiblocal_fn4((0, 1), [], []), true)); + assert(drop_var(PubCrateLiblocal_fn3((0, 1), [], []), true)); + }"; + + // NOTE: these names must be "replacement-unique", i.e. + // replacing one in a discinct name should do nothing + let names_to_collapse = [ + "DefinedInLocalModule1", + "IMPORT_GLOBAL_M_1", + "IMPORT_GLOBAL_M_2", + "IMPORT_GLOBAL_M_3", + "IMPORT_GLOBAL_N_1", + "IMPORT_GLOBAL_N_2", + "IMPORT_GLOBAL_N_3", + "LOCAL_GLOBAL_M", + "LOCAL_GLOBAL_N", + "LibDefinedInLocalModule1", + "LibLocalStruct1", + "Liblocal_fn1", + "LocalStruct", + "PubCrateLibDefinedInLocalModule2", + "PubCrateLibLocalStruct2", + "PubCrateLiblocal_fn2", + "PubLibDefinedInLocalModule3", + "PubLibLocalStruct3", + "PubLiblocal_fn3", + "ReExportMeFromAnotherLib1", + "ReExportMeFromAnotherLib2", + "local_fn4", + "crate_field1", + "crate_field2", + "crate_field3", + "crate_field4", + "crate_field5", + "crate_trait_fn1", + "crate_trait_fn2", + "crate_trait_fn3", + "crate_trait_fn4", + "crate_trait_fn5", + "crate_trait_fn6", + "crate_trait_fn7", + "field1", + "field2", + "field3", + "field4", + "field5", + "lib_field1", + "lib_field2", + "lib_field3", + "lib_field4", + "lib_field5", + "lib_trait_fn1", + "lib_trait_fn2", + "lib_trait_fn3", + "lib_trait_fn4", + "lib_trait_fn5", + "lib_trait_fn6", + "lib_trait_fn7", + "pub_field1", + "pub_field2", + "pub_field3", + "pub_field4", + "pub_field5", + "pub_trait_fn1", + "pub_trait_fn2", + "pub_trait_fn3", + "pub_trait_fn4", + "pub_trait_fn5", + "pub_trait_fn6", + "pub_trait_fn7", + "trait_fn1", + "trait_fn2", + "trait_fn3", + "trait_fn4", + "trait_fn5", + "trait_fn6", + "trait_fn7", + ]; + + // TODO(https://github.com/noir-lang/noir/issues/4973): + // Name resolution panic from name shadowing test + let cases_to_skip = [ + (1, 21), + (2, 11), + (2, 21), + (3, 11), + (3, 18), + (3, 21), + (4, 21), + (5, 11), + (5, 21), + (6, 11), + (6, 18), + (6, 21), + ]; + let cases_to_skip: HashSet<(usize, usize)> = cases_to_skip.into_iter().collect(); + + for (i, x) in names_to_collapse.iter().enumerate() { + for (j, y) in names_to_collapse.iter().enumerate().filter(|(j, _)| i < *j) { + if !cases_to_skip.contains(&(i, j)) { + dbg!((i, j)); + + let modified_src = src.replace(x, y); + let errors = get_program_errors(&modified_src); + assert!(!errors.is_empty(), "Expected errors, got: {:?}", errors); + } + } + } +} diff --git a/compiler/wasm/src/compile.rs b/compiler/wasm/src/compile.rs index de157a1fe2..57b17a6f79 100644 --- a/compiler/wasm/src/compile.rs +++ b/compiler/wasm/src/compile.rs @@ -1,3 +1,4 @@ +use acvm::acir::circuit::ExpressionWidth; use fm::FileManager; use gloo_utils::format::JsValueSerdeExt; use js_sys::{JsString, Object}; @@ -169,9 +170,10 @@ pub fn compile_program( console_error_panic_hook::set_once(); let (crate_id, mut context) = prepare_context(entry_point, dependency_graph, file_source_map)?; - let compile_options = CompileOptions::default(); - // For now we default to a bounded width of 3, though we can add it as a parameter - let expression_width = acvm::acir::circuit::ExpressionWidth::Bounded { width: 3 }; + let compile_options = CompileOptions { + expression_width: ExpressionWidth::Bounded { width: 4 }, + ..CompileOptions::default() + }; let compiled_program = noirc_driver::compile_main(&mut context, crate_id, &compile_options, None) @@ -184,7 +186,8 @@ pub fn compile_program( })? .0; - let optimized_program = nargo::ops::transform_program(compiled_program, expression_width); + let optimized_program = + nargo::ops::transform_program(compiled_program, compile_options.expression_width); let warnings = optimized_program.warnings.clone(); Ok(JsCompileProgramResult::new(optimized_program.into(), warnings)) @@ -199,9 +202,10 @@ pub fn compile_contract( console_error_panic_hook::set_once(); let (crate_id, mut context) = prepare_context(entry_point, dependency_graph, file_source_map)?; - let compile_options = CompileOptions::default(); - // For now we default to a bounded width of 3, though we can add it as a parameter - let expression_width = acvm::acir::circuit::ExpressionWidth::Bounded { width: 3 }; + let compile_options = CompileOptions { + expression_width: ExpressionWidth::Bounded { width: 4 }, + ..CompileOptions::default() + }; let compiled_contract = noirc_driver::compile_contract(&mut context, crate_id, &compile_options) @@ -214,7 +218,8 @@ pub fn compile_contract( })? .0; - let optimized_contract = nargo::ops::transform_contract(compiled_contract, expression_width); + let optimized_contract = + nargo::ops::transform_contract(compiled_contract, compile_options.expression_width); let functions = optimized_contract.functions.into_iter().map(ContractFunctionArtifact::from).collect(); diff --git a/compiler/wasm/src/compile_new.rs b/compiler/wasm/src/compile_new.rs index c187fe7f3d..4f11cafb97 100644 --- a/compiler/wasm/src/compile_new.rs +++ b/compiler/wasm/src/compile_new.rs @@ -3,6 +3,7 @@ use crate::compile::{ PathToFileSourceMap, }; use crate::errors::{CompileError, JsCompileError}; +use acvm::acir::circuit::ExpressionWidth; use nargo::artifacts::contract::{ContractArtifact, ContractFunctionArtifact}; use nargo::parse_all; use noirc_driver::{ @@ -96,11 +97,14 @@ impl CompilerContext { mut self, program_width: usize, ) -> Result { - let compile_options = CompileOptions::default(); - let np_language = acvm::acir::circuit::ExpressionWidth::Bounded { width: program_width }; + let expression_width = if program_width == 0 { + ExpressionWidth::Unbounded + } else { + ExpressionWidth::Bounded { width: 4 } + }; + let compile_options = CompileOptions { expression_width, ..CompileOptions::default() }; let root_crate_id = *self.context.root_crate_id(); - let compiled_program = compile_main(&mut self.context, root_crate_id, &compile_options, None) .map_err(|errs| { @@ -112,7 +116,8 @@ impl CompilerContext { })? .0; - let optimized_program = nargo::ops::transform_program(compiled_program, np_language); + let optimized_program = + nargo::ops::transform_program(compiled_program, compile_options.expression_width); let warnings = optimized_program.warnings.clone(); Ok(JsCompileProgramResult::new(optimized_program.into(), warnings)) @@ -122,10 +127,14 @@ impl CompilerContext { mut self, program_width: usize, ) -> Result { - let compile_options = CompileOptions::default(); - let np_language = acvm::acir::circuit::ExpressionWidth::Bounded { width: program_width }; - let root_crate_id = *self.context.root_crate_id(); + let expression_width = if program_width == 0 { + ExpressionWidth::Unbounded + } else { + ExpressionWidth::Bounded { width: 4 } + }; + let compile_options = CompileOptions { expression_width, ..CompileOptions::default() }; + let root_crate_id = *self.context.root_crate_id(); let compiled_contract = compile_contract(&mut self.context, root_crate_id, &compile_options) .map_err(|errs| { @@ -137,7 +146,8 @@ impl CompilerContext { })? .0; - let optimized_contract = nargo::ops::transform_contract(compiled_contract, np_language); + let optimized_contract = + nargo::ops::transform_contract(compiled_contract, compile_options.expression_width); let functions = optimized_contract.functions.into_iter().map(ContractFunctionArtifact::from).collect(); @@ -166,7 +176,7 @@ pub fn compile_program_( let compiler_context = prepare_compiler_context(entry_point, dependency_graph, file_source_map)?; - let program_width = 3; + let program_width = 4; compiler_context.compile_program(program_width) } @@ -183,7 +193,7 @@ pub fn compile_contract_( let compiler_context = prepare_compiler_context(entry_point, dependency_graph, file_source_map)?; - let program_width = 3; + let program_width = 4; compiler_context.compile_contract(program_width) } diff --git a/cspell.json b/cspell.json index bf3040265c..5fc9ad9d1b 100644 --- a/cspell.json +++ b/cspell.json @@ -62,6 +62,12 @@ "defunctionalization", "defunctionalize", "defunctionalized", + "demonomorphization", + "demonomorphize", + "demonomorphized", + "demonomorphizer", + "demonomorphizes", + "demonomorphizing", "deque", "desugared", "devcontainer", diff --git a/docs/docs/getting_started/barretenberg/_category_.json b/docs/docs/getting_started/barretenberg/_category_.json new file mode 100644 index 0000000000..27a8e89228 --- /dev/null +++ b/docs/docs/getting_started/barretenberg/_category_.json @@ -0,0 +1,6 @@ +{ + "position": 1, + "label": "Install Barretenberg", + "collapsible": true, + "collapsed": true +} diff --git a/docs/docs/getting_started/barretenberg/index.md b/docs/docs/getting_started/barretenberg/index.md new file mode 100644 index 0000000000..f435ae151f --- /dev/null +++ b/docs/docs/getting_started/barretenberg/index.md @@ -0,0 +1,54 @@ +--- +title: Barretenberg Installation +description: + `bb` is a command line tool for interacting with Aztec's proving backend Barretenberg. This page is a quick guide on how to install `bb` +keywords: [ + Barretenberg + bb + Installation + Terminal Commands + Version Check + Nightlies + Specific Versions + Branches +] +pagination_next: getting_started/hello_noir/index +--- + +`bb` is the CLI tool for generating and verifying proofs for Noir programs using the Barretenberg proving library. It also allows generating solidity verifier contracts for which you can verify contracts which were constructed using `bb`. + +## Installing `bb` + +Open a terminal on your machine, and write: + +##### macOS (Apple Silicon) + +```bash +mkdir -p $HOME/.barretenberg && \ +curl -o ./barretenberg-aarch64-apple-darwin.tar.gz -L https://github.com/AztecProtocol/aztec-packages/releases/download/aztec-packages-v0.38.0/barretenberg-aarch64-apple-darwin.tar.gz && \ +tar -xvf ./barretenberg-aarch64-apple-darwin.tar.gz -C $HOME/.barretenberg/ && \ +echo 'export PATH=$PATH:$HOME/.barretenberg/' >> ~/.zshrc && \ +source ~/.zshrc +``` + +##### macOS (Intel) + +```bash +mkdir -p $HOME/.barretenberg && \ +curl -o ./barretenberg-x86_64-apple-darwin.tar.gz -L https://github.com/AztecProtocol/aztec-packages/releases/download/aztec-packages-v0.38.0/barretenberg-x86_64-apple-darwin.tar.gz && \ +tar -xvf ./barretenberg-x86_64-apple-darwin.tar.gz -C $HOME/.barretenberg/ && \ +echo 'export PATH=$PATH:$HOME/.barretenberg/' >> ~/.zshrc && \ +source ~/.zshrc +``` + +##### Linux (Bash) + +```bash +mkdir -p $HOME/.barretenberg && \ +curl -o ./barretenberg-x86_64-linux-gnu.tar.gz -L https://github.com/AztecProtocol/aztec-packages/releases/download/aztec-packages-v0.38.0/barretenberg-x86_64-linux-gnu.tar.gz && \ +tar -xvf ./barretenberg-x86_64-linux-gnu.tar.gz -C $HOME/.barretenberg/ && \ +echo -e 'export PATH=$PATH:$HOME/.barretenberg/' >> ~/.bashrc && \ +source ~/.bashrc +``` + +Now we're ready to start working on [our first Noir program!](../hello_noir/index.md) diff --git a/docs/docs/getting_started/hello_noir/_category_.json b/docs/docs/getting_started/hello_noir/_category_.json index 23b560f610..976a2325de 100644 --- a/docs/docs/getting_started/hello_noir/_category_.json +++ b/docs/docs/getting_started/hello_noir/_category_.json @@ -1,5 +1,5 @@ { - "position": 1, + "position": 2, "collapsible": true, "collapsed": true } diff --git a/docs/docs/getting_started/hello_noir/index.md b/docs/docs/getting_started/hello_noir/index.md index 743c4d8d63..1ade3f09ae 100644 --- a/docs/docs/getting_started/hello_noir/index.md +++ b/docs/docs/getting_started/hello_noir/index.md @@ -90,13 +90,11 @@ cd hello_world nargo check ``` -Two additional files would be generated in your project directory: +A _Prover.toml_ file will be generated in your project directory, to allow specifying input values to the program. -_Prover.toml_ houses input values, and _Verifier.toml_ houses public values. +## Execute Our Noir Program -## Prove Our Noir Program - -Now that the project is set up, we can create a proof of correct execution of our Noir program. +Now that the project is set up, we can execute our Noir program. Fill in input values for execution in the _Prover.toml_ file. For example: @@ -105,37 +103,42 @@ x = "1" y = "2" ``` -Prove the valid execution of your Noir program: +Execute your Noir program: ```sh -nargo prove +nargo execute witness-name ``` -A new folder _proofs_ would then be generated in your project directory, containing the proof file -`.proof`, where the project name is defined in Nargo.toml. +The witness corresponding to this execution will then be written to the file `./target/witness-name.gz`. -The _Verifier.toml_ file would also be updated with the public values computed from program -execution (in this case the value of `y`): +## Prove Our Noir Program -```toml -y = "0x0000000000000000000000000000000000000000000000000000000000000002" +:::info + +Nargo no longer handles communicating with backends in order to generate proofs. In order to prove/verify your Noir programs, you'll need an installation of [bb](../barretenberg/index.md). + +::: + +Prove the valid execution of your Noir program using `bb`: + +```sh +bb prove -b ./target/hello_world.json -w ./target/witness-name.gz -o ./proof ``` -> **Note:** Values in _Verifier.toml_ are computed as 32-byte hex values. +A new file called `proof` will be generated in your project directory, containing the generated proof for your program. ## Verify Our Noir Program -Once a proof is generated, we can verify correct execution of our Noir program by verifying the -proof file. +Once a proof is generated, we can verify correct execution of our Noir program by verifying the proof file. Verify your proof by running: ```sh -nargo verify +bb write_vk -b ./target/hello_world.json -o ./target/vk +bb verify -k ./target/vk -p ./proof ``` -The verification will complete in silence if it is successful. If it fails, it will log the -corresponding error instead. +The verification will complete in silence if it is successful. If it fails, it will log the corresponding error instead. Congratulations, you have now created and verified a proof for your very first Noir program! diff --git a/docs/docs/getting_started/hello_noir/project_breakdown.md b/docs/docs/getting_started/hello_noir/project_breakdown.md index 6160a102c6..29688df148 100644 --- a/docs/docs/getting_started/hello_noir/project_breakdown.md +++ b/docs/docs/getting_started/hello_noir/project_breakdown.md @@ -1,10 +1,10 @@ --- title: Project Breakdown description: - Learn about the anatomy of a Nargo project, including the purpose of the Prover and Verifier TOML - files, and how to prove and verify your program. + Learn about the anatomy of a Nargo project, including the purpose of the Prover TOML + file, and how to prove and verify your program. keywords: - [Nargo, Nargo project, Prover.toml, Verifier.toml, proof verification, private asset transfer] + [Nargo, Nargo project, Prover.toml, proof verification, private asset transfer] sidebar_position: 2 --- @@ -18,7 +18,6 @@ commands, you would get a minimal Nargo project of the following structure: - src - Prover.toml - - Verifier.toml - Nargo.toml The source directory _src_ holds the source code for your Noir program. By default only a _main.nr_ @@ -28,10 +27,6 @@ file will be generated within it. _Prover.toml_ is used for specifying the input values for executing and proving the program. You can specify `toml` files with different names by using the `--prover-name` or `-p` flags, see the [Prover](#provertoml) section below. Optionally you may specify expected output values for prove-time checking as well. -### Verifier.toml - -_Verifier.toml_ contains public in/output values computed when executing the Noir program. - ### Nargo.toml _Nargo.toml_ contains the environmental options of your project. It contains a "package" section and a "dependencies" section. @@ -92,20 +87,15 @@ fn main(x : Field, y : Field) { } ``` -The parameters `x` and `y` can be seen as the API for the program and must be supplied by the -prover. Since neither `x` nor `y` is marked as public, the verifier does not supply any inputs, when -verifying the proof. +The parameters `x` and `y` can be seen as the API for the program and must be supplied by the prover. Since neither `x` nor `y` is marked as public, the verifier does not supply any inputs, when verifying the proof. The prover supplies the values for `x` and `y` in the _Prover.toml_ file. -As for the program body, `assert` ensures that the condition to be satisfied (e.g. `x != y`) is -constrained by the proof of the execution of said program (i.e. if the condition was not met, the -verifier would reject the proof as an invalid proof). +As for the program body, `assert` ensures that the condition to be satisfied (e.g. `x != y`) is constrained by the proof of the execution of said program (i.e. if the condition was not met, the verifier would reject the proof as an invalid proof). ### Prover.toml -The _Prover.toml_ file is a file which the prover uses to supply his witness values(both private and -public). +The _Prover.toml_ file is a file which the prover uses to supply the inputs to the Noir program (both private and public). In our hello world program the _Prover.toml_ file looks like this: @@ -114,12 +104,9 @@ x = "1" y = "2" ``` -When the command `nargo prove` is executed, two processes happen: - -1. Noir creates a proof that `x`, which holds the value of `1`, and `y`, which holds the value of `2`, - is not equal. This inequality constraint is due to the line `assert(x != y)`. +When the command `nargo execute` is executed, nargo will execute the Noir program using the inputs specified in `Prover.toml`, aborting if it finds that these do not satisfy the constraints defined by `main`. In this example, `x` and `y` must satisfy the inequality constraint `assert(x != y)`. -2. Noir creates and stores the proof of this statement in the _proofs_ directory in a file called your-project.proof. So if your project is named "private_voting" (defined in the project Nargo.toml), the proof will be saved at `./proofs/private_voting.proof`. Opening this file will display the proof in hex format. +If an output name is specified such as `nargo execute foo`, the witness generated by this execution will be written to `./target/foo.gz`. This can then be used to generate a proof of the execution. #### Arrays of Structs @@ -155,45 +142,18 @@ baz = 2 #### Custom toml files -You can specify a `toml` file with a different name to use for proving by using the `--prover-name` or `-p` flags. +You can specify a `toml` file with a different name to use for execution by using the `--prover-name` or `-p` flags. -This command looks for proof inputs in the default **Prover.toml** and generates the proof and saves it at `./proofs/.proof`: +This command looks for proof inputs in the default **Prover.toml** and generates the witness and saves it at `./target/foo.gz`: ```bash -nargo prove +nargo execute foo ``` -This command looks for proof inputs in the custom **OtherProver.toml** and generates proof and saves it at `./proofs/.proof`: +This command looks for proof inputs in the custom **OtherProver.toml** and generates the witness and saves it at `./target/bar.gz`: ```bash -nargo prove -p OtherProver +nargo execute -p OtherProver bar ``` -## Verifying a Proof - -When the command `nargo verify` is executed, two processes happen: - -1. Noir checks in the _proofs_ directory for a proof file with the project name (eg. test_project.proof) - -2. If that file is found, the proof's validity is checked - -> **Note:** The validity of the proof is linked to the current Noir program; if the program is -> changed and the verifier verifies the proof, it will fail because the proof is not valid for the -> _modified_ Noir program. - -In production, the prover and the verifier are usually two separate entities. A prover would -retrieve the necessary inputs, execute the Noir program, generate a proof and pass it to the -verifier. The verifier would then retrieve the public inputs, usually from external sources, and -verify the validity of the proof against it. - -Take a private asset transfer as an example: - -A person using a browser as the prover would retrieve private inputs locally (e.g. the user's private key) and -public inputs (e.g. the user's encrypted balance on-chain), compute the transfer, generate a proof -and submit it to the verifier smart contract. - -The verifier contract would then draw the user's encrypted balance directly from the blockchain and -verify the proof submitted against it. If the verification passes, additional functions in the -verifier contract could trigger (e.g. approve the asset transfer). - Now that you understand the concepts, you'll probably want some editor feedback while you are writing more complex code. diff --git a/docs/docs/getting_started/tooling/noir_codegen.md b/docs/docs/getting_started/tooling/noir_codegen.md index d65151da0a..1c04058534 100644 --- a/docs/docs/getting_started/tooling/noir_codegen.md +++ b/docs/docs/getting_started/tooling/noir_codegen.md @@ -2,7 +2,7 @@ title: Noir Codegen for TypeScript description: Learn how to use Noir codegen to generate TypeScript bindings keywords: [Nargo, Noir, compile, TypeScript] -sidebar_position: 2 +sidebar_position: 3 --- When using TypeScript, it is extra work to interpret Noir program outputs in a type-safe way. Third party libraries may exist for popular Noir programs, but they are either hard to find or unmaintained. diff --git a/docs/docs/how_to/how-to-oracles.md b/docs/docs/how_to/how-to-oracles.md index 8cf8035a5c..5f427f1e23 100644 --- a/docs/docs/how_to/how-to-oracles.md +++ b/docs/docs/how_to/how-to-oracles.md @@ -177,7 +177,7 @@ interface ForeignCallResult { ## Step 3 - Usage with Nargo -Using the [`nargo` CLI tool](../getting_started/installation/index.md), you can use oracles in the `nargo test`, `nargo execute` and `nargo prove` commands by passing a value to `--oracle-resolver`. For example: +Using the [`nargo` CLI tool](../getting_started/installation/index.md), you can use oracles in the `nargo test` and `nargo execute` commands by passing a value to `--oracle-resolver`. For example: ```bash nargo test --oracle-resolver http://localhost:5555 @@ -203,7 +203,7 @@ As one can see, in NoirJS, the [`foreignCallHandler`](../reference/NoirJS/noir_j Does this mean you don't have to write an RPC server like in [Step #2](#step-2---write-an-rpc-server)? -You don't technically have to, but then how would you run `nargo test` or `nargo prove`? To use both `Nargo` and `NoirJS` in your development flow, you will have to write a JSON RPC server. +You don't technically have to, but then how would you run `nargo test`? To use both `Nargo` and `NoirJS` in your development flow, you will have to write a JSON RPC server. ::: diff --git a/docs/docs/how_to/how-to-solidity-verifier.md b/docs/docs/how_to/how-to-solidity-verifier.md index e3c7c1065d..7c96e22b8d 100644 --- a/docs/docs/how_to/how-to-solidity-verifier.md +++ b/docs/docs/how_to/how-to-solidity-verifier.md @@ -43,11 +43,19 @@ Generating a Solidity Verifier contract is actually a one-command process. Howev This is by far the most straight-forward step. Just run: ```sh -nargo codegen-verifier +nargo compile ``` -A new `contract` folder would then be generated in your project directory, containing the Solidity -file `plonk_vk.sol`. It can be deployed to any EVM blockchain acting as a verifier smart contract. +This will compile your source code into a Noir build artifact to be stored in the `./target` directory, you can then generate the smart contract using the commands: + +```sh +# Here we pass the path to the newly generated Noir artifact. +bb write_vk -b ./target/.json +bb contract +``` + +replacing `` with the name of your Noir project. A new `contract` folder would then be generated in your project directory, containing the Solidity +file `contract.sol`. It can be deployed to any EVM blockchain acting as a verifier smart contract. :::info @@ -123,11 +131,25 @@ To verify a proof using the Solidity verifier contract, we call the `verify` fun function verify(bytes calldata _proof, bytes32[] calldata _publicInputs) external view returns (bool) ``` -When using the default example in the [Hello Noir](../getting_started/hello_noir/index.md) guide, the easiest way to confirm that the verifier contract is doing its job is by calling the `verify` function via remix with the required parameters. For `_proof`, run `nargo prove` and use the string in `proof/.proof` (adding the hex `0x` prefix). We can also copy the public input from `Verifier.toml`, as it will be properly formatted as 32-byte strings: +When using the default example in the [Hello Noir](../getting_started/hello_noir/index.md) guide, the easiest way to confirm that the verifier contract is doing its job is by calling the `verify` function via remix with the required parameters. Note that the public inputs must be passed in separately to the rest of the proof so we must split the proof as returned from `bb`. +First generate a proof with `bb` at the location `./proof` using the steps in [get started](../getting_started/hello_noir/index.md), this proof is in a binary format but we want to convert it into a hex string to pass into Remix, this can be done with the + +```bash +# This value must be changed to match the number of public inputs (including return values!) in your program. +NUM_PUBLIC_INPUTS=1 +PUBLIC_INPUT_BYTES=32*NUM_PUBLIC_INPUTS +HEX_PUBLIC_INPUTS=$(head -c $PUBLIC_INPUT_BYTES ./proof | od -An -v -t x1 | tr -d $' \n') +HEX_PROOF=$(tail -c +$(($PUBLIC_INPUT_BYTES + 1)) ./proof | od -An -v -t x1 | tr -d $' \n') + +echo "Public inputs:" +echo $HEX_PUBLIC_INPUTS + +echo "Proof:" +echo "0x$HEX_PROOF" ``` -0x...... , [0x0000.....02] -``` + +Remix expects that the public inputs will be split into an array of `bytes32` values so `HEX_PUBLIC_INPUTS` needs to be split up into 32 byte chunks which are prefixed with `0x` accordingly. A programmatic example of how the `verify` function is called can be seen in the example zk voting application [here](https://github.com/noir-lang/noir-examples/blob/33e598c257e2402ea3a6b68dd4c5ad492bce1b0a/foundry-voting/src/zkVote.sol#L35): @@ -144,11 +166,9 @@ function castVote(bytes calldata proof, uint proposalId, uint vote, bytes32 null :::info[Return Values] -A circuit doesn't have the concept of a return value. Return values are just syntactic sugar in -Noir. +A circuit doesn't have the concept of a return value. Return values are just syntactic sugar in Noir. -Under the hood, the return value is passed as an input to the circuit and is checked at the end of -the circuit program. +Under the hood, the return value is passed as an input to the circuit and is checked at the end of the circuit program. For example, if you have Noir program like this: @@ -162,11 +182,11 @@ fn main( ) -> pub Field ``` -the `verify` function will expect the public inputs array (second function parameter) to be of length 3, the two inputs and the return value. Like before, these values are populated in Verifier.toml after running `nargo prove`. +the `verify` function will expect the public inputs array (second function parameter) to be of length 3, the two inputs and the return value. Passing only two inputs will result in an error such as `PUBLIC_INPUT_COUNT_INVALID(3, 2)`. -In this case, the inputs parameter to `verify` would be an array ordered as `[pubkey_x, pubkey_y, return]`. +In this case, the inputs parameter to `verify` would be an array ordered as `[pubkey_x, pubkey_y, return`. ::: diff --git a/docs/docs/noir/concepts/data_types/booleans.md b/docs/docs/noir/concepts/data_types/booleans.md index 69826fcd72..3dcfa83681 100644 --- a/docs/docs/noir/concepts/data_types/booleans.md +++ b/docs/docs/noir/concepts/data_types/booleans.md @@ -23,9 +23,6 @@ fn main() { } ``` -> **Note:** When returning a boolean value, it will show up as a value of 1 for `true` and 0 for -> `false` in _Verifier.toml_. - The boolean type is most commonly used in conditionals like `if` expressions and `assert` statements. More about conditionals is covered in the [Control Flow](../control_flow) and [Assert Function](../assert) sections. diff --git a/docs/docs/noir/concepts/data_types/integers.md b/docs/docs/noir/concepts/data_types/integers.md index 6b2d377391..c14fffa717 100644 --- a/docs/docs/noir/concepts/data_types/integers.md +++ b/docs/docs/noir/concepts/data_types/integers.md @@ -115,7 +115,7 @@ y = "1" Would result in: ``` -$ nargo prove +$ nargo execute error: Assertion failed: 'attempt to add with overflow' ┌─ ~/src/main.nr:9:13 │ diff --git a/docs/docs/noir/standard_library/traits.md b/docs/docs/noir/standard_library/traits.md index 08370dde9e..96a7b8e2f2 100644 --- a/docs/docs/noir/standard_library/traits.md +++ b/docs/docs/noir/standard_library/traits.md @@ -241,6 +241,15 @@ impl Rem for i64 { fn rem(self, other: i64) -> i64 { self % other } } Implementations: #include_code neg-trait-impls noir_stdlib/src/ops/arith.nr rust +### `std::ops::Not` + +#include_code not-trait noir_stdlib/src/ops/bit.nr rust + +`Not::not` is equivalent to the unary bitwise NOT operator `!`. + +Implementations: +#include_code not-trait-impls noir_stdlib/src/ops/bit.nr rust + ### `std::ops::{ BitOr, BitAnd, BitXor }` #include_code bitor-trait noir_stdlib/src/ops/bit.nr rust diff --git a/examples/codegen-verifier/.gitignore b/examples/codegen-verifier/.gitignore new file mode 100644 index 0000000000..c0d62c447d --- /dev/null +++ b/examples/codegen-verifier/.gitignore @@ -0,0 +1,4 @@ +out +cache +target +src/contract.sol \ No newline at end of file diff --git a/examples/codegen-verifier/Nargo.toml b/examples/codegen-verifier/Nargo.toml new file mode 100644 index 0000000000..2b367f30db --- /dev/null +++ b/examples/codegen-verifier/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "hello_world" +type = "bin" +authors = [""] +compiler_version = ">=0.29.0" + +[dependencies] \ No newline at end of file diff --git a/examples/codegen-verifier/Prover.toml b/examples/codegen-verifier/Prover.toml new file mode 100644 index 0000000000..2c1854573a --- /dev/null +++ b/examples/codegen-verifier/Prover.toml @@ -0,0 +1,2 @@ +x = 1 +y = 2 diff --git a/examples/codegen-verifier/codegen_verifier.sh b/examples/codegen-verifier/codegen_verifier.sh new file mode 100755 index 0000000000..fabd6235a6 --- /dev/null +++ b/examples/codegen-verifier/codegen_verifier.sh @@ -0,0 +1,38 @@ +#!/bin/bash +set -eu + +BACKEND=${BACKEND:-bb} + +nargo compile + +# TODO: backend should automatically generate vk if necessary. +$BACKEND write_vk -b ./target/hello_world.json +$BACKEND contract -o ./src/contract.sol + +# We now generate a proof and check whether the verifier contract will verify it. + +nargo execute witness + +PROOF_PATH=./target/proof +$BACKEND prove -b ./target/hello_world.json -w ./target/witness.gz -o $PROOF_PATH + +NUM_PUBLIC_INPUTS=1 +PUBLIC_INPUT_BYTES=$((32 * $NUM_PUBLIC_INPUTS)) +HEX_PUBLIC_INPUTS=$(head -c $PUBLIC_INPUT_BYTES $PROOF_PATH | od -An -v -t x1 | tr -d $' \n') +HEX_PROOF=$(tail -c +$(($PUBLIC_INPUT_BYTES + 1)) $PROOF_PATH | od -An -v -t x1 | tr -d $' \n') + +# Spin up an anvil node to deploy the contract to +anvil & + +DEPLOY_INFO=$(forge create UltraVerifier \ + --rpc-url "127.0.0.1:8545" \ + --private-key "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" \ + --json) +VERIFIER_ADDRESS=$(echo $DEPLOY_INFO | jq -r '.deployedTo') + +# Call the verifier contract with our proof. +# Note that we haven't needed to split up `HEX_PUBLIC_INPUTS` as there's only a single public input +cast call $VERIFIER_ADDRESS "verify(bytes, bytes32[])(bool)" "0x$HEX_PROOF" "[0x$HEX_PUBLIC_INPUTS]" + +# Stop anvil node again +kill %- \ No newline at end of file diff --git a/examples/codegen-verifier/foundry.toml b/examples/codegen-verifier/foundry.toml new file mode 100644 index 0000000000..25b918f9c9 --- /dev/null +++ b/examples/codegen-verifier/foundry.toml @@ -0,0 +1,6 @@ +[profile.default] +src = "src" +out = "out" +libs = ["lib"] + +# See more config options https://github.com/foundry-rs/foundry/blob/master/crates/config/README.md#all-options diff --git a/examples/codegen-verifier/src/main.nr b/examples/codegen-verifier/src/main.nr new file mode 100644 index 0000000000..baef0c3786 --- /dev/null +++ b/examples/codegen-verifier/src/main.nr @@ -0,0 +1,3 @@ +fn main(x: Field, y: pub Field) { + assert(x != y); +} \ No newline at end of file diff --git a/examples/codegen-verifier/test.sh b/examples/codegen-verifier/test.sh new file mode 100755 index 0000000000..93c2f6edf5 --- /dev/null +++ b/examples/codegen-verifier/test.sh @@ -0,0 +1,15 @@ +#!/bin/bash +set -eu + +# This file is used for Noir CI and is not required. + +BACKEND=${BACKEND:-bb} + +rm -f ./src/contract.sol + +./codegen_verifier.sh + +if ! [ -f ./src/contract.sol ]; then + printf '%s\n' "Contract not written to file" >&2 + exit 1 +fi \ No newline at end of file diff --git a/examples/prove_and_verify/Nargo.toml b/examples/prove_and_verify/Nargo.toml new file mode 100644 index 0000000000..2b367f30db --- /dev/null +++ b/examples/prove_and_verify/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "hello_world" +type = "bin" +authors = [""] +compiler_version = ">=0.29.0" + +[dependencies] \ No newline at end of file diff --git a/examples/prove_and_verify/Prover.toml b/examples/prove_and_verify/Prover.toml new file mode 100644 index 0000000000..8c12ebba6c --- /dev/null +++ b/examples/prove_and_verify/Prover.toml @@ -0,0 +1,2 @@ +x = "1" +y = "2" diff --git a/examples/prove_and_verify/proofs/proof b/examples/prove_and_verify/proofs/proof new file mode 100644 index 0000000000000000000000000000000000000000..01d5ad276865ced1d94c7e1be9a84e09e0d6cb50 GIT binary patch literal 2176 zcmV-`2!Hng000000000000000000000000000000000000006J0S8cdAa>aDI1er` zD|x|5vMZ?MQTBMo$;s!NqkKRF(GdPwXbO6bcBHP4(CLcGZ{3r7QLt zaWO0Zhf%!!=``aT_Ngbft_>T+6XBD6%_RL2=Nb|XZKh8$=o?+MNSMEld2+}qL|YqH zLv37TBiqvfn7o4__bhJ|M<8>FmMP<3Sen_18u;RyI*DdwZ_hvTws2jpcP8bGBZ^e3)Jn6=g9>4a&jMY3YvROW7@1uygo z;5@Y@!u0*#UO#;c$)lIBLj70Kdl)i$`9uWor`4HW4^9?RD2x4cYsS-=KY2m(_GRbXxLd`oyO*Z)%l*L|x zv-CZ8>9svulPJHe7Fxx>8C!H#UbE;2(0fM-gegT@y=e-H?)xt_#D9X$wMVOf>DFaK z0%FYk?xNKLWKuV8By z9xl$MPJ!&4KjFIO(;;Z(vbJ*pWgkp2C< &3u_voM;b zOtM%%_fQ{^&wAJfZ;GiN0p9qKBout>sXkPL)2W#Jx`<}s)UUqDwXxO`peHgLbtXEi zA}QV2A5Y-kA}enoMoM3VRp`w6Y)0a8ZZH@O!Ts}z*}woW#imU_!aB{?Y1w4 zd~iKsf)fzvxs8$Gq6iEr7lL!+cc7iQiE*bBVf0JiH2S;)RXw_%l4}P~Q)PHJu_p4{*Ru_1m9Dp8)*zYD$!A&?i2~NJxFh-eka_ z5&@JLG^IL)kaSba{Rl?wehG5T^4158geGzaxi$V!E+Cd58w*SZEJDPnAuDv|2SQ2v zD6U)1Jf#xchKQNmnPi0fe{GsY(T|4-SxmAGRYR z!;|O<+&dyFTcx>!{NBfR7#ofvKxi;V`O!!mxJ=Y6>I+}AZGrHbn%qc>U|YtSq617FqhC6+)VddC>J;Cq)J1og5Luo@<-Q12xSbLa zp4H7G58tL4;|ZpwE*GX6ePf%~(6m1I<@^({r+s^J%rvkGB@5*3ayuiQ!Fasr&xbP) zK^aV+8fDOXvjj{o^OVgaZxq-wLO0uf;Uit)e{PD7iO70nerlMaHatF2o1>d1co%5j zw_sTKrNcmFmNe2Vd}^mZ>`6X298oXo2{Ic8E_?0a`h_++3wIRNg(+kP zb-A39b^ZaJVA~p0gnqqRfL<9O+WIHv)7i5Wx6|)VbSDHDJAecE!UA?Iom+~24j~r_ z0o~7H^Q};cpVA~TpTksRgysp2c=}a06gZg`WJGKS3~JRg-RURs=LMFuX7>T#u|qq@ zqW7q6c{6l)ehj4=`-R~qJstJS6W?>-Ks(wKT*z4TN_7gwYUa}5Y-@}e^2-u#-0`N8 zhrUI9DFl3KIvt{UlC9G^Sb2>`aAU|Hah48r%uVBMRlEXl>FO?W8eBWSIGd$9k32Gi zv#M(<)#fWe=Xb3LG@0zy4oWefBzX1!I!VPV*gt~=P#t9w^~deLGAGQZA07Pf|Mif} zddk93$K#e}$j;HZ1L|la>frfIP|MwU9zkTl7mWdCoVt=%Lg1jKVgDRgntNg{1!LxB z5V7Ku@3Sc}26&B|D4S8BB)h7jOK-f&6j@IS-K*efFN6G$90iBbvClb1UX_J6;X{d) z4b5cs8uKU`K;Eh0C;UB?yTPS{8b-r#20xH2mHtQTX?08lgJ97bm3F-#stFrS(n0h9 z4CA}n2txgxH+HlnLO#AneCmV&#-b9mZ@nAn!i#}(}^jL#=8#xmc C5)-Ka literal 0 HcmV?d00001 diff --git a/examples/prove_and_verify/prove_and_verify.sh b/examples/prove_and_verify/prove_and_verify.sh new file mode 100755 index 0000000000..01ee6c7073 --- /dev/null +++ b/examples/prove_and_verify/prove_and_verify.sh @@ -0,0 +1,14 @@ +#!/bin/bash +set -eu + +BACKEND=${BACKEND:-bb} + +nargo execute witness + +# TODO: `bb` should create `proofs` directory if it doesn't exist. +mkdir -p proofs +$BACKEND prove -b ./target/hello_world.json -w ./target/witness.gz + +# TODO: backend should automatically generate vk if necessary. +$BACKEND write_vk -b ./target/hello_world.json +$BACKEND verify -v ./target/vk -p ./proofs/proof \ No newline at end of file diff --git a/examples/prove_and_verify/src/main.nr b/examples/prove_and_verify/src/main.nr new file mode 100644 index 0000000000..baef0c3786 --- /dev/null +++ b/examples/prove_and_verify/src/main.nr @@ -0,0 +1,3 @@ +fn main(x: Field, y: pub Field) { + assert(x != y); +} \ No newline at end of file diff --git a/examples/prove_and_verify/test.sh b/examples/prove_and_verify/test.sh new file mode 100755 index 0000000000..a8ae3cca13 --- /dev/null +++ b/examples/prove_and_verify/test.sh @@ -0,0 +1,10 @@ +#!/bin/bash +set -eu + +# This file is used for Noir CI and is not required. + +BACKEND=${BACKEND:-bb} + +rm -rf ./target ./proofs + +./prove_and_verify.sh \ No newline at end of file diff --git a/noir_stdlib/src/aes128.nr b/noir_stdlib/src/aes128.nr index e6e2a5e499..cd61021a95 100644 --- a/noir_stdlib/src/aes128.nr +++ b/noir_stdlib/src/aes128.nr @@ -2,3 +2,6 @@ // docs:start:aes128 pub fn aes128_encrypt(input: [u8; N], iv: [u8; 16], key: [u8; 16]) -> [u8] {} // docs:end:aes128 + +#[foreign(aes128_encrypt)] +pub fn aes128_encrypt_slice(input: [u8], iv: [u8; 16], key: [u8; 16]) -> [u8] {} diff --git a/noir_stdlib/src/embedded_curve_ops.nr b/noir_stdlib/src/embedded_curve_ops.nr index 21d658db61..2aec90e0a5 100644 --- a/noir_stdlib/src/embedded_curve_ops.nr +++ b/noir_stdlib/src/embedded_curve_ops.nr @@ -1,15 +1,20 @@ use crate::ops::arith::{Add, Sub, Neg}; - +use crate::cmp::Eq; // TODO(https://github.com/noir-lang/noir/issues/4931) struct EmbeddedCurvePoint { x: Field, y: Field, + is_infinite: bool } impl EmbeddedCurvePoint { fn double(self) -> EmbeddedCurvePoint { embedded_curve_add(self, self) } + + fn point_at_infinity() -> EmbeddedCurvePoint { + EmbeddedCurvePoint { x: 0, y: 0, is_infinite: true } + } } impl Add for EmbeddedCurvePoint { @@ -28,11 +33,24 @@ impl Neg for EmbeddedCurvePoint { fn neg(self) -> EmbeddedCurvePoint { EmbeddedCurvePoint { x: self.x, - y: -self.y + y: -self.y, + is_infinite: self.is_infinite } } } +impl Eq for EmbeddedCurvePoint { + fn eq(self: Self, b: EmbeddedCurvePoint) -> bool { + (self.is_infinite & b.is_infinite) | ((self.is_infinite == b.is_infinite) & (self.x == b.x) & (self.y == b.y)) + } +} + +// Scalar represented as low and high limbs +struct EmbeddedCurveScalar { + lo: Field, + hi: Field, +} + // Computes a multi scalar multiplication over the embedded curve. // For bn254, We have Grumpkin and Baby JubJub. // For bls12-381, we have JubJub and Bandersnatch. @@ -42,9 +60,9 @@ impl Neg for EmbeddedCurvePoint { #[foreign(multi_scalar_mul)] // docs:start:multi_scalar_mul pub fn multi_scalar_mul( - points: [Field; N], // points represented as x and y coordinates [x1, y1, x2, y2, ...] - scalars: [Field; N] // scalars represented as low and high limbs [low1, high1, low2, high2, ...] -) -> [Field; 2] + points: [EmbeddedCurvePoint; N], + scalars: [EmbeddedCurveScalar; N] +) -> [Field; 3] // docs:end:multi_scalar_mul {} @@ -52,12 +70,12 @@ pub fn multi_scalar_mul( pub fn fixed_base_scalar_mul( scalar_low: Field, scalar_high: Field -) -> [Field; 2] +) -> [Field; 3] // docs:end:fixed_base_scalar_mul { - let g1_x = 1; - let g1_y = 17631683881184975370165255887551781615748388533673675138860; - multi_scalar_mul([g1_x, g1_y], [scalar_low, scalar_high]) + let g1 = EmbeddedCurvePoint { x: 1, y: 17631683881184975370165255887551781615748388533673675138860, is_infinite: false }; + let scalar = EmbeddedCurveScalar { lo: scalar_low, hi: scalar_high }; + multi_scalar_mul([g1], [scalar]) } // This is a hack as returning an `EmbeddedCurvePoint` from a foreign function in brillig returns a [BrilligVariable::SingleAddr; 2] rather than BrilligVariable::BrilligArray @@ -72,8 +90,8 @@ fn embedded_curve_add( let point_array = embedded_curve_add_array_return(point1, point2); let x = point_array[0]; let y = point_array[1]; - EmbeddedCurvePoint { x, y } + EmbeddedCurvePoint { x, y, is_infinite: point_array[2] == 1 } } #[foreign(embedded_curve_add)] -fn embedded_curve_add_array_return(_point1: EmbeddedCurvePoint, _point2: EmbeddedCurvePoint) -> [Field; 2] {} +fn embedded_curve_add_array_return(_point1: EmbeddedCurvePoint, _point2: EmbeddedCurvePoint) -> [Field; 3] {} diff --git a/noir_stdlib/src/field/bn254.nr b/noir_stdlib/src/field/bn254.nr index d70310be39..2e82d9e7c2 100644 --- a/noir_stdlib/src/field/bn254.nr +++ b/noir_stdlib/src/field/bn254.nr @@ -25,7 +25,7 @@ unconstrained fn decompose_unsafe(x: Field) -> (Field, Field) { fn assert_gt_limbs(a: (Field, Field), b: (Field, Field)) { let (alo, ahi) = a; let (blo, bhi) = b; - let borrow = lte_unsafe(alo, blo, 16); + let borrow = lte_unsafe_16(alo, blo); let rlo = alo - blo - 1 + (borrow as Field) * TWO_POW_128; let rhi = ahi - bhi - (borrow as Field); @@ -51,9 +51,9 @@ pub fn decompose(x: Field) -> (Field, Field) { (xlo, xhi) } -unconstrained fn lt_unsafe(x: Field, y: Field, num_bytes: u32) -> bool { - let x_bytes = x.__to_le_radix(256, num_bytes); - let y_bytes = y.__to_le_radix(256, num_bytes); +fn lt_unsafe_internal(x: Field, y: Field, num_bytes: u32) -> bool { + let x_bytes = x.to_le_radix(256, num_bytes); + let y_bytes = y.to_le_radix(256, num_bytes); let mut x_is_lt = false; let mut done = false; for i in 0..num_bytes { @@ -70,8 +70,20 @@ unconstrained fn lt_unsafe(x: Field, y: Field, num_bytes: u32) -> bool { x_is_lt } -unconstrained fn lte_unsafe(x: Field, y: Field, num_bytes: u32) -> bool { - lt_unsafe(x, y, num_bytes) | (x == y) +fn lte_unsafe_internal(x: Field, y: Field, num_bytes: u32) -> bool { + if x == y { + true + } else { + lt_unsafe_internal(x, y, num_bytes) + } +} + +unconstrained fn lt_unsafe_32(x: Field, y: Field) -> bool { + lt_unsafe_internal(x, y, 32) +} + +unconstrained fn lte_unsafe_16(x: Field, y: Field) -> bool { + lte_unsafe_internal(x, y, 16) } pub fn assert_gt(a: Field, b: Field) { @@ -90,7 +102,7 @@ pub fn assert_lt(a: Field, b: Field) { pub fn gt(a: Field, b: Field) -> bool { if a == b { false - } else if lt_unsafe(a, b, 32) { + } else if lt_unsafe_32(a, b) { assert_gt(b, a); false } else { @@ -105,7 +117,10 @@ pub fn lt(a: Field, b: Field) -> bool { mod tests { // TODO: Allow imports from "super" - use crate::field::bn254::{decompose_unsafe, decompose, lt_unsafe, assert_gt, gt, lt, TWO_POW_128, lte_unsafe, PLO, PHI}; + use crate::field::bn254::{ + decompose_unsafe, decompose, lt_unsafe_internal, assert_gt, gt, lt, TWO_POW_128, + lte_unsafe_internal, PLO, PHI + }; #[test] fn check_decompose_unsafe() { @@ -123,23 +138,23 @@ mod tests { #[test] fn check_lt_unsafe() { - assert(lt_unsafe(0, 1, 16)); - assert(lt_unsafe(0, 0x100, 16)); - assert(lt_unsafe(0x100, TWO_POW_128 - 1, 16)); - assert(!lt_unsafe(0, TWO_POW_128, 16)); + assert(lt_unsafe_internal(0, 1, 16)); + assert(lt_unsafe_internal(0, 0x100, 16)); + assert(lt_unsafe_internal(0x100, TWO_POW_128 - 1, 16)); + assert(!lt_unsafe_internal(0, TWO_POW_128, 16)); } #[test] fn check_lte_unsafe() { - assert(lte_unsafe(0, 1, 16)); - assert(lte_unsafe(0, 0x100, 16)); - assert(lte_unsafe(0x100, TWO_POW_128 - 1, 16)); - assert(!lte_unsafe(0, TWO_POW_128, 16)); - - assert(lte_unsafe(0, 0, 16)); - assert(lte_unsafe(0x100, 0x100, 16)); - assert(lte_unsafe(TWO_POW_128 - 1, TWO_POW_128 - 1, 16)); - assert(lte_unsafe(TWO_POW_128, TWO_POW_128, 16)); + assert(lte_unsafe_internal(0, 1, 16)); + assert(lte_unsafe_internal(0, 0x100, 16)); + assert(lte_unsafe_internal(0x100, TWO_POW_128 - 1, 16)); + assert(!lte_unsafe_internal(0, TWO_POW_128, 16)); + + assert(lte_unsafe_internal(0, 0, 16)); + assert(lte_unsafe_internal(0x100, 0x100, 16)); + assert(lte_unsafe_internal(TWO_POW_128 - 1, TWO_POW_128 - 1, 16)); + assert(lte_unsafe_internal(TWO_POW_128, TWO_POW_128, 16)); } #[test] diff --git a/noir_stdlib/src/ops.nr b/noir_stdlib/src/ops.nr index d7ea1dfd48..8b1903cff0 100644 --- a/noir_stdlib/src/ops.nr +++ b/noir_stdlib/src/ops.nr @@ -2,4 +2,4 @@ mod arith; mod bit; use arith::{Add, Sub, Mul, Div, Rem, Neg}; -use bit::{BitOr, BitAnd, BitXor, Shl, Shr}; +use bit::{Not, BitOr, BitAnd, BitXor, Shl, Shr}; diff --git a/noir_stdlib/src/ops/arith.nr b/noir_stdlib/src/ops/arith.nr index ea7d633a45..df0ff978a7 100644 --- a/noir_stdlib/src/ops/arith.nr +++ b/noir_stdlib/src/ops/arith.nr @@ -8,9 +8,11 @@ impl Add for Field { fn add(self, other: Field) -> Field { self + other } } impl Add for u64 { fn add(self, other: u64) -> u64 { self + other } } impl Add for u32 { fn add(self, other: u32) -> u32 { self + other } } +impl Add for u16 { fn add(self, other: u16) -> u16 { self + other } } impl Add for u8 { fn add(self, other: u8) -> u8 { self + other } } impl Add for i8 { fn add(self, other: i8) -> i8 { self + other } } +impl Add for i16 { fn add(self, other: i16) -> i16 { self + other } } impl Add for i32 { fn add(self, other: i32) -> i32 { self + other } } impl Add for i64 { fn add(self, other: i64) -> i64 { self + other } } @@ -24,9 +26,11 @@ impl Sub for Field { fn sub(self, other: Field) -> Field { self - other } } impl Sub for u64 { fn sub(self, other: u64) -> u64 { self - other } } impl Sub for u32 { fn sub(self, other: u32) -> u32 { self - other } } +impl Sub for u16 { fn sub(self, other: u16) -> u16 { self - other } } impl Sub for u8 { fn sub(self, other: u8) -> u8 { self - other } } impl Sub for i8 { fn sub(self, other: i8) -> i8 { self - other } } +impl Sub for i16 { fn sub(self, other: i16) -> i16 { self - other } } impl Sub for i32 { fn sub(self, other: i32) -> i32 { self - other } } impl Sub for i64 { fn sub(self, other: i64) -> i64 { self - other } } @@ -40,9 +44,11 @@ impl Mul for Field { fn mul(self, other: Field) -> Field { self * other } } impl Mul for u64 { fn mul(self, other: u64) -> u64 { self * other } } impl Mul for u32 { fn mul(self, other: u32) -> u32 { self * other } } +impl Mul for u16 { fn mul(self, other: u16) -> u16 { self * other } } impl Mul for u8 { fn mul(self, other: u8) -> u8 { self * other } } impl Mul for i8 { fn mul(self, other: i8) -> i8 { self * other } } +impl Mul for i16 { fn mul(self, other: i16) -> i16 { self * other } } impl Mul for i32 { fn mul(self, other: i32) -> i32 { self * other } } impl Mul for i64 { fn mul(self, other: i64) -> i64 { self * other } } @@ -56,9 +62,11 @@ impl Div for Field { fn div(self, other: Field) -> Field { self / other } } impl Div for u64 { fn div(self, other: u64) -> u64 { self / other } } impl Div for u32 { fn div(self, other: u32) -> u32 { self / other } } +impl Div for u16 { fn div(self, other: u16) -> u16 { self / other } } impl Div for u8 { fn div(self, other: u8) -> u8 { self / other } } impl Div for i8 { fn div(self, other: i8) -> i8 { self / other } } +impl Div for i16 { fn div(self, other: i16) -> i16 { self / other } } impl Div for i32 { fn div(self, other: i32) -> i32 { self / other } } impl Div for i64 { fn div(self, other: i64) -> i64 { self / other } } @@ -70,9 +78,11 @@ trait Rem{ impl Rem for u64 { fn rem(self, other: u64) -> u64 { self % other } } impl Rem for u32 { fn rem(self, other: u32) -> u32 { self % other } } +impl Rem for u16 { fn rem(self, other: u16) -> u16 { self % other } } impl Rem for u8 { fn rem(self, other: u8) -> u8 { self % other } } impl Rem for i8 { fn rem(self, other: i8) -> i8 { self % other } } +impl Rem for i16 { fn rem(self, other: i16) -> i16 { self % other } } impl Rem for i32 { fn rem(self, other: i32) -> i32 { self % other } } impl Rem for i64 { fn rem(self, other: i64) -> i64 { self % other } } @@ -86,6 +96,7 @@ trait Neg { impl Neg for Field { fn neg(self) -> Field { -self } } impl Neg for i8 { fn neg(self) -> i8 { -self } } +impl Neg for i16 { fn neg(self) -> i16 { -self } } impl Neg for i32 { fn neg(self) -> i32 { -self } } impl Neg for i64 { fn neg(self) -> i64 { -self } } // docs:end:neg-trait-impls diff --git a/noir_stdlib/src/ops/bit.nr b/noir_stdlib/src/ops/bit.nr index 8a3d20b190..a31cfee878 100644 --- a/noir_stdlib/src/ops/bit.nr +++ b/noir_stdlib/src/ops/bit.nr @@ -1,3 +1,24 @@ +// docs:start:not-trait +trait Not { + fn not(self: Self) -> Self; +} +// docs:end:not-trait + +// docs:start:not-trait-impls +impl Not for bool { fn not(self) -> bool { !self } } + +impl Not for u64 { fn not(self) -> u64 { !self } } +impl Not for u32 { fn not(self) -> u32 { !self } } +impl Not for u16 { fn not(self) -> u16 { !self } } +impl Not for u8 { fn not(self) -> u8 { !self } } +impl Not for u1 { fn not(self) -> u1 { !self } } + +impl Not for i8 { fn not(self) -> i8 { !self } } +impl Not for i16 { fn not(self) -> i16 { !self } } +impl Not for i32 { fn not(self) -> i32 { !self } } +impl Not for i64 { fn not(self) -> i64 { !self } } +// docs:end:not-trait-impls + // docs:start:bitor-trait trait BitOr { fn bitor(self, other: Self) -> Self; @@ -8,9 +29,11 @@ impl BitOr for bool { fn bitor(self, other: bool) -> bool { self | other } } impl BitOr for u64 { fn bitor(self, other: u64) -> u64 { self | other } } impl BitOr for u32 { fn bitor(self, other: u32) -> u32 { self | other } } +impl BitOr for u16 { fn bitor(self, other: u16) -> u16 { self | other } } impl BitOr for u8 { fn bitor(self, other: u8) -> u8 { self | other } } impl BitOr for i8 { fn bitor(self, other: i8) -> i8 { self | other } } +impl BitOr for i16 { fn bitor(self, other: i16) -> i16 { self | other } } impl BitOr for i32 { fn bitor(self, other: i32) -> i32 { self | other } } impl BitOr for i64 { fn bitor(self, other: i64) -> i64 { self | other } } @@ -24,9 +47,11 @@ impl BitAnd for bool { fn bitand(self, other: bool) -> bool { self & other } } impl BitAnd for u64 { fn bitand(self, other: u64) -> u64 { self & other } } impl BitAnd for u32 { fn bitand(self, other: u32) -> u32 { self & other } } +impl BitAnd for u16 { fn bitand(self, other: u16) -> u16 { self & other } } impl BitAnd for u8 { fn bitand(self, other: u8) -> u8 { self & other } } impl BitAnd for i8 { fn bitand(self, other: i8) -> i8 { self & other } } +impl BitAnd for i16 { fn bitand(self, other: i16) -> i16 { self & other } } impl BitAnd for i32 { fn bitand(self, other: i32) -> i32 { self & other } } impl BitAnd for i64 { fn bitand(self, other: i64) -> i64 { self & other } } @@ -40,9 +65,11 @@ impl BitXor for bool { fn bitxor(self, other: bool) -> bool { self ^ other } } impl BitXor for u64 { fn bitxor(self, other: u64) -> u64 { self ^ other } } impl BitXor for u32 { fn bitxor(self, other: u32) -> u32 { self ^ other } } +impl BitXor for u16 { fn bitxor(self, other: u16) -> u16 { self ^ other } } impl BitXor for u8 { fn bitxor(self, other: u8) -> u8 { self ^ other } } impl BitXor for i8 { fn bitxor(self, other: i8) -> i8 { self ^ other } } +impl BitXor for i16 { fn bitxor(self, other: i16) -> i16 { self ^ other } } impl BitXor for i32 { fn bitxor(self, other: i32) -> i32 { self ^ other } } impl BitXor for i64 { fn bitxor(self, other: i64) -> i64 { self ^ other } } @@ -54,10 +81,12 @@ trait Shl { impl Shl for u32 { fn shl(self, other: u8) -> u32 { self << other } } impl Shl for u64 { fn shl(self, other: u8) -> u64 { self << other } } +impl Shl for u16 { fn shl(self, other: u8) -> u16 { self << other } } impl Shl for u8 { fn shl(self, other: u8) -> u8 { self << other } } impl Shl for u1 { fn shl(self, other: u8) -> u1 { self << other } } impl Shl for i8 { fn shl(self, other: u8) -> i8 { self << other } } +impl Shl for i16 { fn shl(self, other: u8) -> i16 { self << other } } impl Shl for i32 { fn shl(self, other: u8) -> i32 { self << other } } impl Shl for i64 { fn shl(self, other: u8) -> i64 { self << other } } @@ -69,10 +98,12 @@ trait Shr { impl Shr for u64 { fn shr(self, other: u8) -> u64 { self >> other } } impl Shr for u32 { fn shr(self, other: u8) -> u32 { self >> other } } +impl Shr for u16 { fn shr(self, other: u8) -> u16 { self >> other } } impl Shr for u8 { fn shr(self, other: u8) -> u8 { self >> other } } impl Shr for u1 { fn shr(self, other: u8) -> u1 { self >> other } } impl Shr for i8 { fn shr(self, other: u8) -> i8 { self >> other } } +impl Shr for i16 { fn shr(self, other: u8) -> i16 { self >> other } } impl Shr for i32 { fn shr(self, other: u8) -> i32 { self >> other } } impl Shr for i64 { fn shr(self, other: u8) -> i64 { self >> other } } diff --git a/noir_stdlib/src/uint128.nr b/noir_stdlib/src/uint128.nr index d0f38079e6..0332c8ac86 100644 --- a/noir_stdlib/src/uint128.nr +++ b/noir_stdlib/src/uint128.nr @@ -1,8 +1,9 @@ -use crate::ops::{Add, Sub, Mul, Div, Rem, BitOr, BitAnd, BitXor, Shl, Shr}; +use crate::ops::{Add, Sub, Mul, Div, Rem, Not, BitOr, BitAnd, BitXor, Shl, Shr}; use crate::cmp::{Eq, Ord, Ordering}; +use crate::println; global pow64 : Field = 18446744073709551616; //2^64; - +global pow63 : Field = 9223372036854775808; // 2^63; struct U128 { lo: Field, hi: Field, @@ -20,6 +21,13 @@ impl U128 { U128::from_u64s_le(lo, hi) } + pub fn zero() -> U128 { + U128 { lo: 0, hi: 0 } + } + + pub fn one() -> U128 { + U128 { lo: 1, hi: 0 } + } pub fn from_le_bytes(bytes: [u8; 16]) -> U128 { let mut lo = 0; let mut base = 1; @@ -87,27 +95,44 @@ impl U128 { U128 { lo: lo as Field, hi: hi as Field } } + unconstrained fn uconstrained_check_is_upper_ascii(ascii: u8) -> bool { + ((ascii >= 65) & (ascii <= 90)) // Between 'A' and 'Z' + } + fn decode_ascii(ascii: u8) -> Field { if ascii < 58 { ascii - 48 - } else if ascii < 71 { - ascii - 55 } else { + let ascii = ascii + 32 * (U128::uconstrained_check_is_upper_ascii(ascii) as u8); + assert(ascii >= 97); // enforce >= 'a' + assert(ascii <= 102); // enforce <= 'f' ascii - 87 } as Field } + // TODO: Replace with a faster version. + // A circuit that uses this function can be slow to compute + // (we're doing up to 127 calls to compute the quotient) unconstrained fn unconstrained_div(self: Self, b: U128) -> (U128, U128) { - if self < b { - (U128::from_u64s_le(0, 0), self) + if b == U128::zero() { + // Return 0,0 to avoid eternal loop + (U128::zero(), U128::zero()) + } else if self < b { + (U128::zero(), self) + } else if self == b { + (U128::one(), U128::zero()) } else { - //TODO check if this can overflow? - let (q,r) = self.unconstrained_div(b * U128::from_u64s_le(2, 0)); + let (q,r) = if b.hi as u64 >= pow63 as u64 { + // The result of multiplication by 2 would overflow + (U128::zero(), self) + } else { + self.unconstrained_div(b * U128::from_u64s_le(2, 0)) + }; let q_mul_2 = q * U128::from_u64s_le(2, 0); if r < b { (q_mul_2, r) } else { - (q_mul_2 + U128::from_u64s_le(1, 0), r - b) + (q_mul_2 + U128::one(), r - b) } } } @@ -129,11 +154,7 @@ impl U128 { let low = self.lo * b.lo; let lo = low as u64 as Field; let carry = (low - lo) / pow64; - let high = if crate::field::modulus_num_bits() as u32 > 196 { - (self.lo + self.hi) * (b.lo + b.hi) - low + carry - } else { - self.lo * b.hi + self.hi * b.lo + carry - }; + let high = self.lo * b.hi + self.hi * b.lo + carry; let hi = high as u64 as Field; U128 { lo, hi } } @@ -228,11 +249,20 @@ impl Ord for U128 { } } +impl Not for U128 { + fn not(self) -> U128 { + U128 { + lo: (!(self.lo as u64)) as Field, + hi: (!(self.hi as u64)) as Field + } + } +} + impl BitOr for U128 { fn bitor(self, other: U128) -> U128 { U128 { lo: ((self.lo as u64) | (other.lo as u64)) as Field, - hi: ((self.hi as u64) | (other.hi as u64))as Field + hi: ((self.hi as u64) | (other.hi as u64)) as Field } } } @@ -284,3 +314,221 @@ impl Shr for U128 { self / U128::from_integer(y) } } + +mod tests { + use crate::uint128::{U128, pow64, pow63}; + + #[test] + fn test_not() { + let num = U128::from_u64s_le(0, 0); + let not_num = num.not(); + + let max_u64: Field = pow64 - 1; + assert_eq(not_num.hi, max_u64); + assert_eq(not_num.lo, max_u64); + + let not_not_num = not_num.not(); + assert_eq(num, not_not_num); + } + + #[test] + fn test_construction() { + // Check little-endian u64 is inversed with big-endian u64 construction + let a = U128::from_u64s_le(2, 1); + let b = U128::from_u64s_be(1, 2); + assert_eq(a, b); + // Check byte construction is equivalent + let c = U128::from_le_bytes([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]); + let d = U128::from_u64s_le(0x0706050403020100, 0x0f0e0d0c0b0a0908); + assert_eq(c, d); + } + + #[test] + fn test_byte_decomposition() { + let a = U128::from_u64s_le(0x0706050403020100, 0x0f0e0d0c0b0a0908); + // Get big-endian and little-endian byte decompostions + let le_bytes_a= a.to_le_bytes(); + let be_bytes_a= a.to_be_bytes(); + + // Check equivalence + for i in 0..16 { + assert_eq(le_bytes_a[i], be_bytes_a[15 - i]); + } + // Reconstruct U128 from byte decomposition + let b= U128::from_le_bytes(le_bytes_a); + // Check that it's the same element + assert_eq(a, b); + } + + #[test] + fn test_hex_constuction() { + let a = U128::from_u64s_le(0x1, 0x2); + let b = U128::from_hex("0x20000000000000001"); + assert_eq(a, b); + + let c= U128::from_hex("0xffffffffffffffffffffffffffffffff"); + let d= U128::from_u64s_le(0xffffffffffffffff, 0xffffffffffffffff); + assert_eq(c, d); + + let e= U128::from_hex("0x00000000000000000000000000000000"); + let f= U128::from_u64s_le(0, 0); + assert_eq(e, f); + } + + // Ascii decode tests + + #[test] + fn test_ascii_decode_correct_range() { + // '0'..'9' range + for i in 0..10 { + let decoded= U128::decode_ascii(48 + i); + assert_eq(decoded, i as Field); + } + // 'A'..'F' range + for i in 0..6 { + let decoded = U128::decode_ascii(65 + i); + assert_eq(decoded, (i + 10) as Field); + } + // 'a'..'f' range + for i in 0..6 { + let decoded = U128::decode_ascii(97 + i); + assert_eq(decoded, (i + 10) as Field); + } + } + + #[test(should_fail)] + fn test_ascii_decode_range_less_than_48_fails_0() { + crate::println(U128::decode_ascii(0)); + } + + #[test(should_fail)] + fn test_ascii_decode_range_less_than_48_fails_1() { + crate::println(U128::decode_ascii(47)); + } + + #[test(should_fail)] + fn test_ascii_decode_range_58_64_fails_0() { + let _ = U128::decode_ascii(58); + } + + #[test(should_fail)] + fn test_ascii_decode_range_58_64_fails_1() { + let _ = U128::decode_ascii(64); + } + + #[test(should_fail)] + fn test_ascii_decode_range_71_96_fails_0() { + let _ = U128::decode_ascii(71); + } + + #[test(should_fail)] + fn test_ascii_decode_range_71_96_fails_1() { + let _ = U128::decode_ascii(96); + } + + #[test(should_fail)] + fn test_ascii_decode_range_greater_than_102_fails() { + let _ = U128::decode_ascii(103); + } + + #[test(should_fail)] + fn test_ascii_decode_regression() { + // This code will actually fail because of ascii_decode, + // but in the past it was possible to create a value > (1<<128) + let a = U128::from_hex("0x~fffffffffffffffffffffffffffffff"); + let b:Field= a.to_integer(); + let c= b.to_le_bytes(17); + assert(c[16] != 0); + } + + #[test] + fn test_unconstrained_div() { + // Test the potential overflow case + let a= U128::from_u64s_le(0x0, 0xffffffffffffffff); + let b= U128::from_u64s_le(0x0, 0xfffffffffffffffe); + let c= U128::one(); + let d= U128::from_u64s_le(0x0, 0x1); + let (q,r) = a.unconstrained_div(b); + assert_eq(q, c); + assert_eq(r, d); + + let a = U128::from_u64s_le(2, 0); + let b = U128::one(); + // Check the case where a is a multiple of b + let (c,d ) = a.unconstrained_div(b); + assert_eq((c, d), (a, U128::zero())); + + // Check where b is a multiple of a + let (c,d) = b.unconstrained_div(a); + assert_eq((c, d), (U128::zero(), b)); + + // Dividing by zero returns 0,0 + let a = U128::from_u64s_le(0x1, 0x0); + let b = U128::zero(); + let (c,d)= a.unconstrained_div(b); + assert_eq((c, d), (U128::zero(), U128::zero())); + + // Dividing 1<<127 by 1<<127 (special case) + let a = U128::from_u64s_le(0x0, pow63 as u64); + let b = U128::from_u64s_le(0x0, pow63 as u64); + let (c,d )= a.unconstrained_div(b); + assert_eq((c, d), (U128::one(), U128::zero())); + } + + #[test] + fn integer_conversions() { + // Maximum + let start:Field = 0xffffffffffffffffffffffffffffffff; + let a = U128::from_integer(start); + let end = a.to_integer(); + assert_eq(start, end); + + // Minimum + let start:Field = 0x0; + let a = U128::from_integer(start); + let end = a.to_integer(); + assert_eq(start, end); + + // Low limb + let start:Field = 0xffffffffffffffff; + let a = U128::from_integer(start); + let end = a.to_integer(); + assert_eq(start, end); + + // High limb + let start:Field = 0xffffffffffffffff0000000000000000; + let a = U128::from_integer(start); + let end = a.to_integer(); + assert_eq(start, end); + } + #[test] + fn test_wrapping_mul() { + // 1*0==0 + assert_eq(U128::zero(), U128::zero().wrapping_mul(U128::one())); + + // 0*1==0 + assert_eq(U128::zero(), U128::one().wrapping_mul(U128::zero())); + + // 1*1==1 + assert_eq(U128::one(), U128::one().wrapping_mul(U128::one())); + + // 0 * ( 1 << 64 ) == 0 + assert_eq(U128::zero(), U128::zero().wrapping_mul(U128::from_u64s_le(0, 1))); + + // ( 1 << 64 ) * 0 == 0 + assert_eq(U128::zero(), U128::from_u64s_le(0, 1).wrapping_mul(U128::zero())); + + // 1 * ( 1 << 64 ) == 1 << 64 + assert_eq(U128::from_u64s_le(0, 1), U128::from_u64s_le(0, 1).wrapping_mul(U128::one())); + + // ( 1 << 64 ) * 1 == 1 << 64 + assert_eq(U128::from_u64s_le(0, 1), U128::one().wrapping_mul(U128::from_u64s_le(0, 1))); + + // ( 1 << 64 ) * ( 1 << 64 ) == 1 << 64 + assert_eq(U128::zero(), U128::from_u64s_le(0, 1).wrapping_mul(U128::from_u64s_le(0, 1))); + // -1 * -1 == 1 + assert_eq( + U128::one(), U128::from_u64s_le(0xffffffffffffffff, 0xffffffffffffffff).wrapping_mul(U128::from_u64s_le(0xffffffffffffffff, 0xffffffffffffffff)) + ); + } +} diff --git a/scripts/count_loc.sh b/scripts/count_loc.sh new file mode 100755 index 0000000000..91565aa6c4 --- /dev/null +++ b/scripts/count_loc.sh @@ -0,0 +1,33 @@ +#!/usr/bin/env bash +set -eu + +# Run relative to repo root +cd $(dirname "$0")/../ + +if ! command -v "tokei" >/dev/null 2>&1; then + echo "Error: tokei is required but not installed." >&2 + echo "Error: Run \`cargo install --git https://github.com/TomAFrench/tokei --branch tf/add-noir-support tokei\`" >&2 + + exit 1 +fi + +echo "" +echo "Total:" + +tokei ./ --sort code + +echo "" +echo "ACIR/ACVM:" +tokei ./acvm-repo --sort code + +echo "" +echo "Compiler:" +tokei ./compiler --sort code + +echo "" +echo "Tooling:" +tokei ./tooling --sort code + +echo "" +echo "Standard Library:" +tokei ./noir_stdlib --sort code diff --git a/scripts/install_bb.sh b/scripts/install_bb.sh new file mode 100755 index 0000000000..4ee5bbbbe4 --- /dev/null +++ b/scripts/install_bb.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +# We use this script just for CI so we assume we're running on x86 linux + +mkdir -p $HOME/.barretenberg +curl -o ./barretenberg-aarch64-apple-darwin.tar.gz -L https://github.com/AztecProtocol/aztec-packages/releases/download/aztec-packages-v0.38.0/barretenberg-aarch64-apple-darwin.tar.gz +tar -xvf ./barretenberg-aarch64-apple-darwin.tar.gz -C $HOME/.barretenberg/ +echo 'export PATH=$PATH:$HOME/.barretenberg/' >> ~/.bashrc +source ~/.bashrc diff --git a/security/insectarium/noir_stdlib.md b/security/insectarium/noir_stdlib.md new file mode 100644 index 0000000000..5ec4eb5f6c --- /dev/null +++ b/security/insectarium/noir_stdlib.md @@ -0,0 +1,61 @@ +# Bugs found in Noir stdlib + +## U128 + +### decode_ascii +Old **decode_ascii** function didn't check that the values of individual bytes in the string were just in the range of [0-9a-f-A-F]. +```rust +fn decode_ascii(ascii: u8) -> Field { + if ascii < 58 { + ascii - 48 + } else if ascii < 71 { + ascii - 55 + } else { + ascii - 87 + } as Field +} +``` +Since the function used the assumption that decode_ascii returns values in range [0,15] to construct **lo** and **hi** it was possible to overflow these 64-bit limbs. + +### unconstrained_div +```rust + unconstrained fn unconstrained_div(self: Self, b: U128) -> (U128, U128) { + if self < b { + (U128::from_u64s_le(0, 0), self) + } else { + //TODO check if this can overflow? + let (q,r) = self.unconstrained_div(b * U128::from_u64s_le(2, 0)); + let q_mul_2 = q * U128::from_u64s_le(2, 0); + if r < b { + (q_mul_2, r) + } else { + (q_mul_2 + U128::from_u64s_le(1, 0), r - b) + } + } + } +``` +There were 2 issues in unconstrained_div: +1) Attempting to divide by zero resulted in an infinite loop, because there was no check. +2) $a >= 2^{127}$ cause the function to multiply b to such power of 2 that the result would be more than $2^{128}$ and lead to assertion failure even though it was a legitimate input + +N.B. initial fix by Rumata888 also had an edgecase missing for when a==b and b >= (1<<127). + +### wrapping_mul +```rust +fn wrapping_mul(self: Self, b: U128) -> U128 { + let low = self.lo * b.lo; + let lo = low as u64 as Field; + let carry = (low - lo) / pow64; + let high = if crate::field::modulus_num_bits() as u32 > 196 { + (self.lo + self.hi) * (b.lo + b.hi) - low + carry // Bug + } else { + self.lo * b.hi + self.hi * b.lo + carry + }; + let hi = high as u64 as Field; + U128 { lo, hi } + } +``` +Wrapping mul had the code copied from regular mul barring the assertion that the product of high limbs is zero. Because that check was removed, the optimized path for moduli > 196 bits was incorrect, since it included their product (as at least one of them was supposed to be zero originally, but not for wrapping multiplication) + + + diff --git a/test_programs/compile_success_empty/intrinsic_die/src/main.nr b/test_programs/compile_success_empty/intrinsic_die/src/main.nr index 9ce17f72c0..a6c6d3df9a 100644 --- a/test_programs/compile_success_empty/intrinsic_die/src/main.nr +++ b/test_programs/compile_success_empty/intrinsic_die/src/main.nr @@ -4,5 +4,7 @@ fn main(x: Field) { let hash = std::hash::pedersen_commitment([x]); let g1_x = 0x0000000000000000000000000000000000000000000000000000000000000001; let g1_y = 0x0000000000000002cf135e7506a45d632d270d45f1181294833fc48d823f272c; - let _p1 = std::embedded_curve_ops::multi_scalar_mul([g1_x, g1_y], [x, 0]); + let g1 = std::embedded_curve_ops::EmbeddedCurvePoint { x: g1_x, y: g1_y, is_infinite: false }; + let scalar = std::embedded_curve_ops::EmbeddedCurveScalar { lo: x, hi: 0 }; + let _p1 = std::embedded_curve_ops::multi_scalar_mul([g1], [scalar]); } diff --git a/test_programs/execution_success/brillig_embedded_curve/src/main.nr b/test_programs/execution_success/brillig_embedded_curve/src/main.nr index 8a1a7f0897..89a699448d 100644 --- a/test_programs/execution_success/brillig_embedded_curve/src/main.nr +++ b/test_programs/execution_success/brillig_embedded_curve/src/main.nr @@ -2,22 +2,22 @@ use dep::std; unconstrained fn main(priv_key: Field, pub_x: pub Field, pub_y: pub Field) { let g1_y = 17631683881184975370165255887551781615748388533673675138860; - let g1 = std::embedded_curve_ops::EmbeddedCurvePoint { x: 1, y: g1_y }; - + let g1 = std::embedded_curve_ops::EmbeddedCurvePoint { x: 1, y: g1_y, is_infinite: false }; + let scalar = std::embedded_curve_ops::EmbeddedCurveScalar { lo: priv_key, hi: 0 }; // Test that multi_scalar_mul correctly derives the public key - let res = std::embedded_curve_ops::multi_scalar_mul([g1.x, g1.y], [priv_key, 0]); + let res = std::embedded_curve_ops::multi_scalar_mul([g1], [scalar]); assert(res[0] == pub_x); assert(res[1] == pub_y); // Test that double function calling embedded_curve_add works as expected - let pub_point = std::embedded_curve_ops::EmbeddedCurvePoint { x: pub_x, y: pub_y }; + let pub_point = std::embedded_curve_ops::EmbeddedCurvePoint { x: pub_x, y: pub_y, is_infinite: false }; let res = pub_point.double(); let double = g1.add(g1); assert(double.x == res.x); // Test calling multi_scalar_mul with multiple points and scalars - let res = std::embedded_curve_ops::multi_scalar_mul([g1.x, g1.y, g1.x, g1.y], [priv_key, 0, priv_key, 0]); + let res = std::embedded_curve_ops::multi_scalar_mul([g1, g1], [scalar, scalar]); // The results should be double the g1 point because the scalars are 1 and we pass in g1 twice assert(double.x == res[0]); diff --git a/test_programs/execution_success/embedded_curve_ops/src/main.nr b/test_programs/execution_success/embedded_curve_ops/src/main.nr index 3cb27d8c18..46f919e947 100644 --- a/test_programs/execution_success/embedded_curve_ops/src/main.nr +++ b/test_programs/execution_success/embedded_curve_ops/src/main.nr @@ -2,22 +2,22 @@ use dep::std; fn main(priv_key: Field, pub_x: pub Field, pub_y: pub Field) { let g1_y = 17631683881184975370165255887551781615748388533673675138860; - let g1 = std::embedded_curve_ops::EmbeddedCurvePoint { x: 1, y: g1_y }; - + let g1 = std::embedded_curve_ops::EmbeddedCurvePoint { x: 1, y: g1_y, is_infinite: false }; + let scalar = std::embedded_curve_ops::EmbeddedCurveScalar { lo: priv_key, hi: 0 }; // Test that multi_scalar_mul correctly derives the public key - let res = std::embedded_curve_ops::multi_scalar_mul([g1.x, g1.y], [priv_key, 0]); + let res = std::embedded_curve_ops::multi_scalar_mul([g1], [scalar]); assert(res[0] == pub_x); assert(res[1] == pub_y); // Test that double function calling embedded_curve_add works as expected - let pub_point = std::embedded_curve_ops::EmbeddedCurvePoint { x: pub_x, y: pub_y }; + let pub_point = std::embedded_curve_ops::EmbeddedCurvePoint { x: pub_x, y: pub_y, is_infinite: false }; let res = pub_point.double(); let double = g1.add(g1); assert(double.x == res.x); // Test calling multi_scalar_mul with multiple points and scalars - let res = std::embedded_curve_ops::multi_scalar_mul([g1.x, g1.y, g1.x, g1.y], [priv_key, 0, priv_key, 0]); + let res = std::embedded_curve_ops::multi_scalar_mul([g1, g1], [scalar, scalar]); // The results should be double the g1 point because the scalars are 1 and we pass in g1 twice assert(double.x == res[0]); diff --git a/test_programs/execution_success/no_predicates_brillig/Nargo.toml b/test_programs/execution_success/no_predicates_brillig/Nargo.toml new file mode 100644 index 0000000000..328d78c8f9 --- /dev/null +++ b/test_programs/execution_success/no_predicates_brillig/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "no_predicates_brillig" +type = "bin" +authors = [""] +compiler_version = ">=0.27.0" + +[dependencies] diff --git a/test_programs/execution_success/no_predicates_brillig/Prover.toml b/test_programs/execution_success/no_predicates_brillig/Prover.toml new file mode 100644 index 0000000000..93a825f609 --- /dev/null +++ b/test_programs/execution_success/no_predicates_brillig/Prover.toml @@ -0,0 +1,2 @@ +x = "10" +y = "20" diff --git a/test_programs/execution_success/no_predicates_brillig/src/main.nr b/test_programs/execution_success/no_predicates_brillig/src/main.nr new file mode 100644 index 0000000000..65e2e5d61f --- /dev/null +++ b/test_programs/execution_success/no_predicates_brillig/src/main.nr @@ -0,0 +1,16 @@ +unconstrained fn main(x: u32, y: pub u32) { + intermediate_function(x, y); +} + +fn intermediate_function(x: u32, y: u32) { + basic_checks(x, y); +} + +#[no_predicates] +fn basic_checks(x: u32, y: u32) { + if x > y { + assert(x == 10); + } else { + assert(y == 20); + } +} diff --git a/test_programs/execution_success/unit_value/Nargo.toml b/test_programs/execution_success/unit_value/Nargo.toml index f7e3697a7c..1f9c4524ec 100644 --- a/test_programs/execution_success/unit_value/Nargo.toml +++ b/test_programs/execution_success/unit_value/Nargo.toml @@ -1,7 +1,7 @@ [package] -name = "short" +name = "unit_value" type = "bin" authors = [""] compiler_version = ">=0.23.0" -[dependencies] \ No newline at end of file +[dependencies] diff --git a/test_programs/noir_test_success/embedded_curve_ops/Nargo.toml b/test_programs/noir_test_success/embedded_curve_ops/Nargo.toml new file mode 100644 index 0000000000..65e6efea53 --- /dev/null +++ b/test_programs/noir_test_success/embedded_curve_ops/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "embedded_curve_ops" +type = "bin" +authors = [""] +compiler_version = ">=0.23.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/noir_test_success/embedded_curve_ops/src/main.nr b/test_programs/noir_test_success/embedded_curve_ops/src/main.nr new file mode 100644 index 0000000000..9e3c5d8787 --- /dev/null +++ b/test_programs/noir_test_success/embedded_curve_ops/src/main.nr @@ -0,0 +1,37 @@ +use dep::std::embedded_curve_ops::{EmbeddedCurvePoint, EmbeddedCurveScalar, multi_scalar_mul}; + +#[test] + + fn test_infinite_point() { + let zero = EmbeddedCurvePoint::point_at_infinity(); + let zero = EmbeddedCurvePoint { x: 0, y: 0, is_infinite: true }; + let g1 = EmbeddedCurvePoint { x: 1, y: 17631683881184975370165255887551781615748388533673675138860, is_infinite: false }; + let g2 = g1 + g1; + + let s1 = EmbeddedCurveScalar { lo: 1, hi: 0 }; + let a = multi_scalar_mul([g1], [s1]); + assert(a[2] == 0); + assert(g1 + zero == g1); + assert(g1 - g1 == zero); + assert(g1 - zero == g1); + assert(zero + zero == zero); + assert( + multi_scalar_mul([g1], [s1]) + == [1, 17631683881184975370165255887551781615748388533673675138860, 0] + ); + assert(multi_scalar_mul([g1, g1], [s1, s1]) == [g2.x, g2.y, 0]); + assert( + multi_scalar_mul( + [g1, zero], + [EmbeddedCurveScalar { lo: 2, hi: 0 }, EmbeddedCurveScalar { lo: 42, hi: 25 }] + ) + == [g2.x, g2.y, 0] + ); + assert( + multi_scalar_mul( + [g1, g1, zero], + [s1, s1, EmbeddedCurveScalar { lo: 42, hi: 25 }] + ) + == [g2.x, g2.y, 0] + ); +} diff --git a/test_programs/rebuild.sh b/test_programs/rebuild.sh index 51e9727828..4733bad10c 100755 --- a/test_programs/rebuild.sh +++ b/test_programs/rebuild.sh @@ -16,13 +16,14 @@ process_dir() { if [ -d ./target/ ]; then rm -r ./target/ fi - nargo compile --only-acir && nargo execute witness + nargo execute witness if [ -d "$current_dir/acir_artifacts/$dir_name/target" ]; then rm -r "$current_dir/acir_artifacts/$dir_name/target" fi mkdir $current_dir/acir_artifacts/$dir_name/target + mv ./target/$dir_name.json $current_dir/acir_artifacts/$dir_name/target/program.json mv ./target/*.gz $current_dir/acir_artifacts/$dir_name/target/ cd $current_dir @@ -70,4 +71,4 @@ if [ ! -z "$exit_status" ]; then echo "Rebuild failed!" exit $exit_status fi -echo "Rebuild Succeeded!" \ No newline at end of file +echo "Rebuild Succeeded!" diff --git a/tooling/backend_interface/CHANGELOG.md b/tooling/backend_interface/CHANGELOG.md deleted file mode 100644 index 9ebde989ad..0000000000 --- a/tooling/backend_interface/CHANGELOG.md +++ /dev/null @@ -1,233 +0,0 @@ -# Changelog - -## [0.11.0](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.10.1...v0.11.0) (2023-08-18) - - -### âš  BREAKING CHANGES - -* Update `acvm` to 0.22.0 ([#240](https://github.com/noir-lang/acvm-backend-barretenberg/issues/240)) - -### Features - -* Update `acvm` to 0.22.0 ([#240](https://github.com/noir-lang/acvm-backend-barretenberg/issues/240)) ([d8342fd](https://github.com/noir-lang/acvm-backend-barretenberg/commit/d8342fd6da605ac3bbd889edf89cd122bc4689ce)) - -## [0.10.1](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.10.0...v0.10.1) (2023-08-18) - - -### Features - -* Migrate to `wasmer` 3.3.0 ([#236](https://github.com/noir-lang/acvm-backend-barretenberg/issues/236)) ([e115e38](https://github.com/noir-lang/acvm-backend-barretenberg/commit/e115e38856887c6b1eeead3534534ac7e6327ea9)) - -## [0.10.0](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.9.1...v0.10.0) (2023-07-26) - - -### âš  BREAKING CHANGES - -* Migrate to ACVM 0.21.0 ([#234](https://github.com/noir-lang/acvm-backend-barretenberg/issues/234)) - -### Features - -* Migrate to ACVM 0.21.0 ([#234](https://github.com/noir-lang/acvm-backend-barretenberg/issues/234)) ([15c8676](https://github.com/noir-lang/acvm-backend-barretenberg/commit/15c86768685d2946a767c350f6ef5972c86677eb)) - -## [0.9.1](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.9.0...v0.9.1) (2023-07-21) - - -### Features - -* add support for atomic memory opcodes ([#232](https://github.com/noir-lang/acvm-backend-barretenberg/issues/232)) ([a7aa6e9](https://github.com/noir-lang/acvm-backend-barretenberg/commit/a7aa6e9505bb402c1b3db0a990845ed26928e7aa)) - -## [0.9.0](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.8.0...v0.9.0) (2023-07-17) - - -### âš  BREAKING CHANGES - -* update to ACVM 0.19.0 ([#230](https://github.com/noir-lang/acvm-backend-barretenberg/issues/230)) - -### Miscellaneous Chores - -* update to ACVM 0.19.0 ([#230](https://github.com/noir-lang/acvm-backend-barretenberg/issues/230)) ([3f1d967](https://github.com/noir-lang/acvm-backend-barretenberg/commit/3f1d9674b904acb02c2a3e52481be8a6104c3a9d)) - -## [0.8.0](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.7.0...v0.8.0) (2023-07-12) - - -### âš  BREAKING CHANGES - -* Update to acvm 0.18.1 ([#228](https://github.com/noir-lang/acvm-backend-barretenberg/issues/228)) - -### Features - -* Update to acvm 0.18.1 ([#228](https://github.com/noir-lang/acvm-backend-barretenberg/issues/228)) ([397098b](https://github.com/noir-lang/acvm-backend-barretenberg/commit/397098b239efbe16785b1c9af108ca9fc4e24497)) - -## [0.7.0](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.6.1...v0.7.0) (2023-07-08) - - -### âš  BREAKING CHANGES - -* **bberg:** add secp256r1 builtin to barretenberg ([#223](https://github.com/noir-lang/acvm-backend-barretenberg/issues/223)) - -### Features - -* **bberg:** add secp256r1 builtin to barretenberg ([#223](https://github.com/noir-lang/acvm-backend-barretenberg/issues/223)) ([ceb4770](https://github.com/noir-lang/acvm-backend-barretenberg/commit/ceb47705a492fcdcea1f3c098aaab42ea8edbf2e)) - -## [0.6.1](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.6.0...v0.6.1) (2023-07-06) - - -### Features - -* switch RecursiveAggregation support to true ([#225](https://github.com/noir-lang/acvm-backend-barretenberg/issues/225)) ([e9462ae](https://github.com/noir-lang/acvm-backend-barretenberg/commit/e9462ae015ec0dfb0a23ccbb89562071f87940f5)) - -## [0.6.0](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.5.1...v0.6.0) (2023-07-06) - - -### âš  BREAKING CHANGES - -* Update to ACVM 0.16.0 ([#221](https://github.com/noir-lang/acvm-backend-barretenberg/issues/221)) - -### Features - -* Update to ACVM 0.16.0 ([#221](https://github.com/noir-lang/acvm-backend-barretenberg/issues/221)) ([062d5ed](https://github.com/noir-lang/acvm-backend-barretenberg/commit/062d5ed9b476fab8ac8d3ca13371699fb2aac332)) - -## [0.5.1](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.5.0...v0.5.1) (2023-06-20) - - -### Bug Fixes - -* Remove wasm32 target ([#219](https://github.com/noir-lang/acvm-backend-barretenberg/issues/219)) ([e4cbb6d](https://github.com/noir-lang/acvm-backend-barretenberg/commit/e4cbb6d476e8746de33c38506e2fcb970f1c866a)) - -## [0.5.0](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.4.0...v0.5.0) (2023-06-15) - - -### âš  BREAKING CHANGES - -* Update to target ACVM 0.15.0 ([#217](https://github.com/noir-lang/acvm-backend-barretenberg/issues/217)) - -### Features - -* Update to target ACVM 0.15.0 ([#217](https://github.com/noir-lang/acvm-backend-barretenberg/issues/217)) ([9331898](https://github.com/noir-lang/acvm-backend-barretenberg/commit/9331898f161321c8b6a82d5ea850f197952b2ed2)) - -## [0.4.0](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.3.0...v0.4.0) (2023-06-07) - - -### âš  BREAKING CHANGES - -* Recursion ([#207](https://github.com/noir-lang/acvm-backend-barretenberg/issues/207)) - -### Features - -* Recursion ([#207](https://github.com/noir-lang/acvm-backend-barretenberg/issues/207)) ([6fc479b](https://github.com/noir-lang/acvm-backend-barretenberg/commit/6fc479b9ae99d59bbfeb1b895d63cdbea469dcaa)) - -## [0.3.0](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.2.0...v0.3.0) (2023-06-01) - - -### âš  BREAKING CHANGES - -* Update to ACVM 0.13.0 ([#205](https://github.com/noir-lang/acvm-backend-barretenberg/issues/205)) -* added keccakvar constraints ([#213](https://github.com/noir-lang/acvm-backend-barretenberg/issues/213)) -* update pedersen hashes for new implementation ([#212](https://github.com/noir-lang/acvm-backend-barretenberg/issues/212)) - -### Features - -* added keccakvar constraints ([91ea65f](https://github.com/noir-lang/acvm-backend-barretenberg/commit/91ea65f6af7039095c7a3af7bc1e4ce302a68a8d)) -* added keccakvar constraints ([#213](https://github.com/noir-lang/acvm-backend-barretenberg/issues/213)) ([91ea65f](https://github.com/noir-lang/acvm-backend-barretenberg/commit/91ea65f6af7039095c7a3af7bc1e4ce302a68a8d)) -* Update to ACVM 0.13.0 ([#205](https://github.com/noir-lang/acvm-backend-barretenberg/issues/205)) ([298446e](https://github.com/noir-lang/acvm-backend-barretenberg/commit/298446ef8b69f528b6e2fd2abb2298d7b0a8118e)) - - -### Bug Fixes - -* Add or cleanup implementations for JS target ([#199](https://github.com/noir-lang/acvm-backend-barretenberg/issues/199)) ([f6134b7](https://github.com/noir-lang/acvm-backend-barretenberg/commit/f6134b7b502cb74882300b0046ab91ab000daf3c)) -* update pedersen hashes for new impl ([9a233ce](https://github.com/noir-lang/acvm-backend-barretenberg/commit/9a233ce8db9984b29b9cce0603f758d5281c89c9)) -* update pedersen hashes for new implementation ([#212](https://github.com/noir-lang/acvm-backend-barretenberg/issues/212)) ([9a233ce](https://github.com/noir-lang/acvm-backend-barretenberg/commit/9a233ce8db9984b29b9cce0603f758d5281c89c9)) - -## [0.2.0](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.1.2...v0.2.0) (2023-05-22) - - -### âš  BREAKING CHANGES - -* Update to acvm 0.12.0 ([#165](https://github.com/noir-lang/acvm-backend-barretenberg/issues/165)) -* Add serialization logic for RAM and ROM opcodes ([#153](https://github.com/noir-lang/acvm-backend-barretenberg/issues/153)) - -### Features - -* Add serde to `ConstraintSystem` types ([#196](https://github.com/noir-lang/acvm-backend-barretenberg/issues/196)) ([4c04a79](https://github.com/noir-lang/acvm-backend-barretenberg/commit/4c04a79e6d2b0115f3b4526c60f9f7dae8b464ae)) -* Add serialization logic for RAM and ROM opcodes ([#153](https://github.com/noir-lang/acvm-backend-barretenberg/issues/153)) ([3d3847d](https://github.com/noir-lang/acvm-backend-barretenberg/commit/3d3847de70e74a8f65c64e165ad15ae3d31f5350)) -* Update to acvm 0.12.0 ([#165](https://github.com/noir-lang/acvm-backend-barretenberg/issues/165)) ([d613c79](https://github.com/noir-lang/acvm-backend-barretenberg/commit/d613c79584a599f4adbd11d2ce3b61403c185b73)) - -## [0.1.2](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.1.1...v0.1.2) (2023-05-11) - - -### Bug Fixes - -* Remove star dependencies to allow publishing ([#182](https://github.com/noir-lang/acvm-backend-barretenberg/issues/182)) ([1727a79](https://github.com/noir-lang/acvm-backend-barretenberg/commit/1727a79ce7e66d95528f70c445cb4ec1b1ece636)) - -## [0.1.1](https://github.com/noir-lang/acvm-backend-barretenberg/compare/v0.1.0...v0.1.1) (2023-05-11) - - -### Bug Fixes - -* Add description so crate can be published ([#180](https://github.com/noir-lang/acvm-backend-barretenberg/issues/180)) ([caabf94](https://github.com/noir-lang/acvm-backend-barretenberg/commit/caabf9434031c6023a5e3a436c87fba0a1072539)) - -## 0.1.0 (2023-05-10) - - -### âš  BREAKING CHANGES - -* Update to ACVM v0.11.0 ([#151](https://github.com/noir-lang/acvm-backend-barretenberg/issues/151)) -* Add Keccak constraints ([#150](https://github.com/noir-lang/acvm-backend-barretenberg/issues/150)) -* migrate to ACVM 0.10.3 ([#148](https://github.com/noir-lang/acvm-backend-barretenberg/issues/148)) -* remove all crates other than `acvm-backend-barretenberg` and remove workspace ([#147](https://github.com/noir-lang/acvm-backend-barretenberg/issues/147)) -* merge `barretenberg_static_lib` and `barretenberg_wasm` ([#117](https://github.com/noir-lang/acvm-backend-barretenberg/issues/117)) -* remove dead blake2 code ([#137](https://github.com/noir-lang/acvm-backend-barretenberg/issues/137)) -* Implement pseudo-builder pattern for ConstraintSystem & hide struct fields ([#120](https://github.com/noir-lang/acvm-backend-barretenberg/issues/120)) -* return boolean rather than `FieldElement` from `verify_signature` ([#123](https://github.com/noir-lang/acvm-backend-barretenberg/issues/123)) -* avoid exposing internals of Assignments type ([#119](https://github.com/noir-lang/acvm-backend-barretenberg/issues/119)) -* update to acvm 0.9.0 ([#106](https://github.com/noir-lang/acvm-backend-barretenberg/issues/106)) -* Depend upon upstream barretenberg & switch to UltraPlonk ([#84](https://github.com/noir-lang/acvm-backend-barretenberg/issues/84)) -* update to ACVM 0.7.0 ([#90](https://github.com/noir-lang/acvm-backend-barretenberg/issues/90)) -* Remove create_proof and verify functions ([#82](https://github.com/noir-lang/acvm-backend-barretenberg/issues/82)) -* update to acvm v0.5.0 ([#60](https://github.com/noir-lang/acvm-backend-barretenberg/issues/60)) - -### Features - -* **acvm_interop:** Updates to reflect new acvm methods using pk/vk ([#50](https://github.com/noir-lang/acvm-backend-barretenberg/issues/50)) ([cff757d](https://github.com/noir-lang/acvm-backend-barretenberg/commit/cff757dca7971161e4bd25e7a744d910c37c22be)) -* Add Keccak constraints ([#150](https://github.com/noir-lang/acvm-backend-barretenberg/issues/150)) ([ce2b9ed](https://github.com/noir-lang/acvm-backend-barretenberg/commit/ce2b9ed456bd8d2ad8357c15736d62c2a5812add)) -* allow overriding transcript location with BARRETENBERG_TRANSCRIPT env var ([#86](https://github.com/noir-lang/acvm-backend-barretenberg/issues/86)) ([af92b99](https://github.com/noir-lang/acvm-backend-barretenberg/commit/af92b99c7b5f37e9659931af378a851b3658a80b)) -* **ci:** add concurrency group for rust workflow ([#63](https://github.com/noir-lang/acvm-backend-barretenberg/issues/63)) ([5c936bc](https://github.com/noir-lang/acvm-backend-barretenberg/commit/5c936bc63cc3adcf9d43c9c4ce69053566089ad9)) -* Depend upon upstream barretenberg & switch to UltraPlonk ([#84](https://github.com/noir-lang/acvm-backend-barretenberg/issues/84)) ([8437bf7](https://github.com/noir-lang/acvm-backend-barretenberg/commit/8437bf7e08acadf43b55b307545336596a9fe766)) -* Implement pseudo-builder pattern for ConstraintSystem & hide struct fields ([#120](https://github.com/noir-lang/acvm-backend-barretenberg/issues/120)) ([8ed67d6](https://github.com/noir-lang/acvm-backend-barretenberg/commit/8ed67d68c71d655e1a6a5c38fa9ea1c3566f771d)) -* Leverage rustls when using downloader crate ([#46](https://github.com/noir-lang/acvm-backend-barretenberg/issues/46)) ([9de36b6](https://github.com/noir-lang/acvm-backend-barretenberg/commit/9de36b642d125d1fb4facd1bf60db67946be70ae)) -* merge `barretenberg_static_lib` and `barretenberg_wasm` ([#117](https://github.com/noir-lang/acvm-backend-barretenberg/issues/117)) ([ba1d0d6](https://github.com/noir-lang/acvm-backend-barretenberg/commit/ba1d0d61b94de91b15044d97608907c21bfb5299)) -* migrate to ACVM 0.10.3 ([#148](https://github.com/noir-lang/acvm-backend-barretenberg/issues/148)) ([c9fb9e8](https://github.com/noir-lang/acvm-backend-barretenberg/commit/c9fb9e806f1400a2ff7594a0669bec56025220bb)) -* remove all crates other than `acvm-backend-barretenberg` and remove workspace ([#147](https://github.com/noir-lang/acvm-backend-barretenberg/issues/147)) ([8fe7111](https://github.com/noir-lang/acvm-backend-barretenberg/commit/8fe7111ebdcb043764a83436744662e8c3ca5abc)) -* remove dead blake2 code ([#137](https://github.com/noir-lang/acvm-backend-barretenberg/issues/137)) ([14d8a5b](https://github.com/noir-lang/acvm-backend-barretenberg/commit/14d8a5b893eb1cb91d5bde908643b487b41809d6)) -* replace `downloader` dependency with `reqwest` ([#114](https://github.com/noir-lang/acvm-backend-barretenberg/issues/114)) ([dd62231](https://github.com/noir-lang/acvm-backend-barretenberg/commit/dd62231b8bfcee32e1029d31a07895b16159339c)) -* return boolean from `verify_signature` ([e560602](https://github.com/noir-lang/acvm-backend-barretenberg/commit/e560602ebbd547386ca4cab35735ffa92e98ac4b)) -* return boolean rather than `FieldElement` from `check_membership` ([#124](https://github.com/noir-lang/acvm-backend-barretenberg/issues/124)) ([a0a338e](https://github.com/noir-lang/acvm-backend-barretenberg/commit/a0a338e2295635a07f6b9e497c029160a5f323bc)) -* return boolean rather than `FieldElement` from `verify_signature` ([#123](https://github.com/noir-lang/acvm-backend-barretenberg/issues/123)) ([e560602](https://github.com/noir-lang/acvm-backend-barretenberg/commit/e560602ebbd547386ca4cab35735ffa92e98ac4b)) -* store transcript in `.nargo/backends` directory ([#91](https://github.com/noir-lang/acvm-backend-barretenberg/issues/91)) ([c6b5023](https://github.com/noir-lang/acvm-backend-barretenberg/commit/c6b50231da065e7550bfe8bddf8e46f4cd8002d7)) -* update `aztec_backend_wasm` to use new serialization ([#94](https://github.com/noir-lang/acvm-backend-barretenberg/issues/94)) ([28014d8](https://github.com/noir-lang/acvm-backend-barretenberg/commit/28014d803d052a7f459e03dbd7b5b9210449b1d0)) -* update to acvm 0.9.0 ([#106](https://github.com/noir-lang/acvm-backend-barretenberg/issues/106)) ([ff350fb](https://github.com/noir-lang/acvm-backend-barretenberg/commit/ff350fb111043964b8a14fc0df62508c87506423)) -* Update to ACVM v0.11.0 ([#151](https://github.com/noir-lang/acvm-backend-barretenberg/issues/151)) ([9202415](https://github.com/noir-lang/acvm-backend-barretenberg/commit/92024155532e15f25acb2f3ed8d5ca78da0fddd9)) -* update to acvm v0.5.0 ([#60](https://github.com/noir-lang/acvm-backend-barretenberg/issues/60)) ([74b4d8d](https://github.com/noir-lang/acvm-backend-barretenberg/commit/74b4d8d8b118e4477880c04149e5e9d93d388384)) - - -### Bug Fixes - -* Avoid exposing internals of Assignments type ([614c81b](https://github.com/noir-lang/acvm-backend-barretenberg/commit/614c81b0ea5e110bbf5a61a526bb0173f4fe377a)) -* avoid exposing internals of Assignments type ([#119](https://github.com/noir-lang/acvm-backend-barretenberg/issues/119)) ([614c81b](https://github.com/noir-lang/acvm-backend-barretenberg/commit/614c81b0ea5e110bbf5a61a526bb0173f4fe377a)) -* fix serialization of arithmetic expressions ([#145](https://github.com/noir-lang/acvm-backend-barretenberg/issues/145)) ([7f42535](https://github.com/noir-lang/acvm-backend-barretenberg/commit/7f4253570257d9dedcfa8c8fb96b9d097ef06419)) -* Implement random_get for wasm backend ([#102](https://github.com/noir-lang/acvm-backend-barretenberg/issues/102)) ([9c0f06e](https://github.com/noir-lang/acvm-backend-barretenberg/commit/9c0f06ef56f23e2b5794e810f433e36ff2c5d6b5)) -* rename gates to opcodes ([#59](https://github.com/noir-lang/acvm-backend-barretenberg/issues/59)) ([6e05307](https://github.com/noir-lang/acvm-backend-barretenberg/commit/6e053072d8b9c5d93c296f10782251ccb597f902)) -* reorganize and ensure contracts can be compiled in Remix ([#112](https://github.com/noir-lang/acvm-backend-barretenberg/issues/112)) ([7ec5693](https://github.com/noir-lang/acvm-backend-barretenberg/commit/7ec5693f194a79c379ae2952bc17a31ee63a42b9)) -* replace `serialize_circuit` function with `from<&Circuit>` ([#118](https://github.com/noir-lang/acvm-backend-barretenberg/issues/118)) ([94f83a7](https://github.com/noir-lang/acvm-backend-barretenberg/commit/94f83a78e32d91dfb7ae9824923695d9b4c425b0)) -* Replace serialize_circuit function with `from<&Circuit>` ([94f83a7](https://github.com/noir-lang/acvm-backend-barretenberg/commit/94f83a78e32d91dfb7ae9824923695d9b4c425b0)) -* Update bb-sys to resolve bugs in some environments ([#129](https://github.com/noir-lang/acvm-backend-barretenberg/issues/129)) ([e3d4504](https://github.com/noir-lang/acvm-backend-barretenberg/commit/e3d4504f15e1295e637c4da80b1d08c87c267c45)) -* Update dependency containing pk write fix for large general circuits ([#78](https://github.com/noir-lang/acvm-backend-barretenberg/issues/78)) ([2cb523d](https://github.com/noir-lang/acvm-backend-barretenberg/commit/2cb523d2ab95249157b22e198d9dcd6841c3eed8)) -* Update to bb-sys 0.1.1 and update bb in lockfile ([00bb157](https://github.com/noir-lang/acvm-backend-barretenberg/commit/00bb15779dfb64539eeb3f3bb4c4deeba106f2fe)) -* update to bb-sys 0.1.1 and update bb in lockfile ([#111](https://github.com/noir-lang/acvm-backend-barretenberg/issues/111)) ([00bb157](https://github.com/noir-lang/acvm-backend-barretenberg/commit/00bb15779dfb64539eeb3f3bb4c4deeba106f2fe)) -* use `Barretenberg.call` to query circuit size from wasm ([#121](https://github.com/noir-lang/acvm-backend-barretenberg/issues/121)) ([a775af1](https://github.com/noir-lang/acvm-backend-barretenberg/commit/a775af14137cc7bc2e9d8a063fa718a5a9abe6cb)) - - -### Miscellaneous Chores - -* Remove create_proof and verify functions ([#82](https://github.com/noir-lang/acvm-backend-barretenberg/issues/82)) ([ad0c422](https://github.com/noir-lang/acvm-backend-barretenberg/commit/ad0c4228488457bd155ff381186ecf583f18bfac)) -* update to ACVM 0.7.0 ([#90](https://github.com/noir-lang/acvm-backend-barretenberg/issues/90)) ([6c03687](https://github.com/noir-lang/acvm-backend-barretenberg/commit/6c036870a6a8e26612ab8b4f90a162f7540b42e2)) diff --git a/tooling/backend_interface/Cargo.toml b/tooling/backend_interface/Cargo.toml deleted file mode 100644 index f6b5d5d013..0000000000 --- a/tooling/backend_interface/Cargo.toml +++ /dev/null @@ -1,35 +0,0 @@ -[package] -name = "backend-interface" -description = "The definition of the backend CLI interface which Nargo uses for proving/verifying ACIR circuits." -version.workspace = true -authors.workspace = true -edition.workspace = true -rust-version.workspace = true -license.workspace = true - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -acvm.workspace = true -dirs.workspace = true -thiserror.workspace = true -serde_json.workspace = true -bb_abstraction_leaks.workspace = true -tracing.workspace = true - -tempfile.workspace = true - -## bb binary downloading -tar = "~0.4.15" -flate2 = "~1.0.1" -reqwest = { version = "0.11.20", default-features = false, features = [ - "rustls-tls", - "blocking", -] } - -[dev-dependencies] -test-binary = "3.0.1" - -[build-dependencies] -build-target = "0.4.0" -const_format.workspace = true diff --git a/tooling/backend_interface/src/cli/contract.rs b/tooling/backend_interface/src/cli/contract.rs deleted file mode 100644 index e83fc1909b..0000000000 --- a/tooling/backend_interface/src/cli/contract.rs +++ /dev/null @@ -1,71 +0,0 @@ -use std::path::{Path, PathBuf}; - -use crate::BackendError; - -use super::string_from_stderr; - -/// VerifyCommand will call the barretenberg binary -/// to return a solidity library with the verification key -/// that can be used to verify proofs on-chain. -/// -/// This does not return a Solidity file that is able -/// to verify a proof. See acvm_interop/contract.sol for the -/// remaining logic that is missing. -pub(crate) struct ContractCommand { - pub(crate) crs_path: PathBuf, - pub(crate) vk_path: PathBuf, -} - -impl ContractCommand { - pub(crate) fn run(self, binary_path: &Path) -> Result { - let mut command = std::process::Command::new(binary_path); - - command - .arg("contract") - .arg("-c") - .arg(self.crs_path) - .arg("-k") - .arg(self.vk_path) - .arg("-o") - .arg("-"); - - let output = command.output()?; - - if output.status.success() { - String::from_utf8(output.stdout) - .map_err(|error| BackendError::InvalidUTF8Vector(error.into_bytes())) - } else { - Err(BackendError::CommandFailed(string_from_stderr(&output.stderr))) - } - } -} - -#[test] -fn contract_command() -> Result<(), BackendError> { - use tempfile::tempdir; - - let backend = crate::get_mock_backend()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory_path = temp_directory.path(); - let bytecode_path = temp_directory_path.join("acir.gz"); - let vk_path = temp_directory_path.join("vk"); - - let crs_path = backend.backend_directory(); - - std::fs::File::create(&bytecode_path).expect("file should be created"); - - let write_vk_command = super::WriteVkCommand { - bytecode_path, - vk_path_output: vk_path.clone(), - crs_path: crs_path.clone(), - }; - write_vk_command.run(backend.binary_path())?; - - let contract_command = ContractCommand { vk_path, crs_path }; - contract_command.run(backend.binary_path())?; - - drop(temp_directory); - - Ok(()) -} diff --git a/tooling/backend_interface/src/cli/gates.rs b/tooling/backend_interface/src/cli/gates.rs deleted file mode 100644 index aca05f0232..0000000000 --- a/tooling/backend_interface/src/cli/gates.rs +++ /dev/null @@ -1,64 +0,0 @@ -use std::path::{Path, PathBuf}; - -use crate::BackendError; - -use super::string_from_stderr; - -/// GatesCommand will call the barretenberg binary -/// to return the number of gates needed to create a proof -/// for the given bytecode. -pub(crate) struct GatesCommand { - pub(crate) crs_path: PathBuf, - pub(crate) bytecode_path: PathBuf, -} - -impl GatesCommand { - pub(crate) fn run(self, binary_path: &Path) -> Result { - let output = std::process::Command::new(binary_path) - .arg("gates") - .arg("-c") - .arg(self.crs_path) - .arg("-b") - .arg(self.bytecode_path) - .output()?; - - if !output.status.success() { - return Err(BackendError::CommandFailed(string_from_stderr(&output.stderr))); - } - // Note: barretenberg includes the newline, so that subsequent prints to stdout - // are not on the same line as the gates output. - - const EXPECTED_BYTES: usize = 8; - let gates_bytes: [u8; EXPECTED_BYTES] = - output.stdout.as_slice().try_into().map_err(|_| { - BackendError::UnexpectedNumberOfBytes(EXPECTED_BYTES, output.stdout.clone()) - })?; - - // Convert bytes to u64 in little-endian format - let value = u64::from_le_bytes(gates_bytes); - - Ok(value as u32) - } -} - -#[test] -fn gate_command() -> Result<(), BackendError> { - use tempfile::tempdir; - - let backend = crate::get_mock_backend()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory_path = temp_directory.path(); - let bytecode_path = temp_directory_path.join("acir.gz"); - let crs_path = backend.backend_directory(); - - std::fs::File::create(&bytecode_path).expect("file should be created"); - - let gate_command = GatesCommand { crs_path, bytecode_path }; - - let output = gate_command.run(backend.binary_path())?; - // Mock backend always returns zero gates. - assert_eq!(output, 0); - - Ok(()) -} diff --git a/tooling/backend_interface/src/cli/mod.rs b/tooling/backend_interface/src/cli/mod.rs deleted file mode 100644 index df43bd5cc2..0000000000 --- a/tooling/backend_interface/src/cli/mod.rs +++ /dev/null @@ -1,39 +0,0 @@ -// Reference: https://github.com/AztecProtocol/aztec-packages/blob/master/barretenberg/cpp/src/barretenberg/bb/main.cpp - -mod contract; -mod gates; -mod proof_as_fields; -mod prove; -mod verify; -mod version; -mod vk_as_fields; -mod write_vk; - -pub(crate) use contract::ContractCommand; -pub(crate) use gates::GatesCommand; -pub(crate) use proof_as_fields::ProofAsFieldsCommand; -pub(crate) use prove::ProveCommand; -pub(crate) use verify::VerifyCommand; -pub(crate) use version::VersionCommand; -pub(crate) use vk_as_fields::VkAsFieldsCommand; -pub(crate) use write_vk::WriteVkCommand; - -#[test] -fn no_command_provided_works() -> Result<(), crate::BackendError> { - // This is a simple test to check that the binaries work - - let backend = crate::get_mock_backend()?; - - let output = std::process::Command::new(backend.binary_path()).output()?; - - let stderr = string_from_stderr(&output.stderr); - // Assert help message is printed due to no command being provided. - assert!(stderr.contains("Usage: mock_backend ")); - - Ok(()) -} - -// Converts a stderr byte array to a string (including invalid characters) -fn string_from_stderr(stderr: &[u8]) -> String { - String::from_utf8_lossy(stderr).to_string() -} diff --git a/tooling/backend_interface/src/cli/proof_as_fields.rs b/tooling/backend_interface/src/cli/proof_as_fields.rs deleted file mode 100644 index 7eb1c1ef35..0000000000 --- a/tooling/backend_interface/src/cli/proof_as_fields.rs +++ /dev/null @@ -1,38 +0,0 @@ -use std::path::{Path, PathBuf}; - -use acvm::FieldElement; - -use crate::BackendError; - -use super::string_from_stderr; - -/// `ProofAsFieldsCommand` will call the barretenberg binary -/// to split a proof into a representation as [`FieldElement`]s. -pub(crate) struct ProofAsFieldsCommand { - pub(crate) proof_path: PathBuf, - pub(crate) vk_path: PathBuf, -} - -impl ProofAsFieldsCommand { - pub(crate) fn run(self, binary_path: &Path) -> Result, BackendError> { - let mut command = std::process::Command::new(binary_path); - - command - .arg("proof_as_fields") - .arg("-p") - .arg(self.proof_path) - .arg("-k") - .arg(self.vk_path) - .arg("-o") - .arg("-"); - - let output = command.output()?; - if output.status.success() { - let string_output = String::from_utf8(output.stdout).unwrap(); - serde_json::from_str(&string_output) - .map_err(|err| BackendError::CommandFailed(err.to_string())) - } else { - Err(BackendError::CommandFailed(string_from_stderr(&output.stderr))) - } - } -} diff --git a/tooling/backend_interface/src/cli/prove.rs b/tooling/backend_interface/src/cli/prove.rs deleted file mode 100644 index c63d8afab5..0000000000 --- a/tooling/backend_interface/src/cli/prove.rs +++ /dev/null @@ -1,66 +0,0 @@ -use std::path::{Path, PathBuf}; - -use crate::BackendError; - -use super::string_from_stderr; - -/// ProveCommand will call the barretenberg binary -/// to create a proof, given the witness and the bytecode. -/// -/// Note:Internally barretenberg will create and discard the -/// proving key, so this is not returned. -/// -/// The proof will be written to the specified output file. -pub(crate) struct ProveCommand { - pub(crate) crs_path: PathBuf, - pub(crate) bytecode_path: PathBuf, - pub(crate) witness_path: PathBuf, -} - -impl ProveCommand { - pub(crate) fn run(self, binary_path: &Path) -> Result, BackendError> { - let mut command = std::process::Command::new(binary_path); - - command - .arg("prove") - .arg("-c") - .arg(self.crs_path) - .arg("-b") - .arg(self.bytecode_path) - .arg("-w") - .arg(self.witness_path) - .arg("-o") - .arg("-"); - - let output = command.output()?; - if output.status.success() { - Ok(output.stdout) - } else { - Err(BackendError::CommandFailed(string_from_stderr(&output.stderr))) - } - } -} - -#[test] -fn prove_command() -> Result<(), BackendError> { - use tempfile::tempdir; - - let backend = crate::get_mock_backend()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory_path = temp_directory.path(); - let bytecode_path = temp_directory_path.join("acir.gz"); - let witness_path = temp_directory_path.join("witness.tr"); - - std::fs::File::create(&bytecode_path).expect("file should be created"); - std::fs::File::create(&witness_path).expect("file should be created"); - - let crs_path = backend.backend_directory(); - let prove_command = ProveCommand { crs_path, bytecode_path, witness_path }; - - let proof = prove_command.run(backend.binary_path())?; - assert_eq!(proof, "proof".as_bytes()); - drop(temp_directory); - - Ok(()) -} diff --git a/tooling/backend_interface/src/cli/verify.rs b/tooling/backend_interface/src/cli/verify.rs deleted file mode 100644 index 1a4ba50b7d..0000000000 --- a/tooling/backend_interface/src/cli/verify.rs +++ /dev/null @@ -1,74 +0,0 @@ -use std::path::{Path, PathBuf}; - -use crate::BackendError; - -/// VerifyCommand will call the barretenberg binary -/// to verify a proof -pub(crate) struct VerifyCommand { - pub(crate) crs_path: PathBuf, - pub(crate) proof_path: PathBuf, - pub(crate) vk_path: PathBuf, -} - -impl VerifyCommand { - pub(crate) fn run(self, binary_path: &Path) -> Result { - let mut command = std::process::Command::new(binary_path); - - command - .arg("verify") - .arg("-c") - .arg(self.crs_path) - .arg("-p") - .arg(self.proof_path) - .arg("-k") - .arg(self.vk_path); - - let output = command.output()?; - - // We currently do not distinguish between an invalid proof and an error inside the backend. - Ok(output.status.success()) - } -} - -#[test] -fn verify_command() -> Result<(), BackendError> { - use tempfile::tempdir; - - use super::{ProveCommand, WriteVkCommand}; - use crate::proof_system::write_to_file; - - let backend = crate::get_mock_backend()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory_path = temp_directory.path(); - let bytecode_path = temp_directory_path.join("acir.gz"); - let witness_path = temp_directory_path.join("witness.tr"); - let proof_path = temp_directory_path.join("1_mul.proof"); - let vk_path_output = temp_directory_path.join("vk"); - - let crs_path = backend.backend_directory(); - - std::fs::File::create(&bytecode_path).expect("file should be created"); - std::fs::File::create(&witness_path).expect("file should be created"); - - let write_vk_command = WriteVkCommand { - bytecode_path: bytecode_path.clone(), - crs_path: crs_path.clone(), - vk_path_output: vk_path_output.clone(), - }; - - write_vk_command.run(backend.binary_path())?; - - let prove_command = ProveCommand { crs_path: crs_path.clone(), bytecode_path, witness_path }; - let proof = prove_command.run(backend.binary_path())?; - - write_to_file(&proof, &proof_path); - - let verify_command = VerifyCommand { crs_path, proof_path, vk_path: vk_path_output }; - - let verified = verify_command.run(backend.binary_path())?; - assert!(verified); - - drop(temp_directory); - Ok(()) -} diff --git a/tooling/backend_interface/src/cli/version.rs b/tooling/backend_interface/src/cli/version.rs deleted file mode 100644 index 83ab72a870..0000000000 --- a/tooling/backend_interface/src/cli/version.rs +++ /dev/null @@ -1,29 +0,0 @@ -use std::path::Path; - -use crate::BackendError; - -use super::string_from_stderr; - -/// VersionCommand will call the backend binary -/// to query installed version. -pub(crate) struct VersionCommand; - -impl VersionCommand { - pub(crate) fn run(self, binary_path: &Path) -> Result { - let mut command = std::process::Command::new(binary_path); - - command.arg("--version"); - - let output = command.output()?; - if output.status.success() { - match String::from_utf8(output.stdout) { - Ok(result) => Ok(result), - Err(_) => Err(BackendError::CommandFailed( - "Unexpected output from --version check.".to_owned(), - )), - } - } else { - Err(BackendError::CommandFailed(string_from_stderr(&output.stderr))) - } - } -} diff --git a/tooling/backend_interface/src/cli/vk_as_fields.rs b/tooling/backend_interface/src/cli/vk_as_fields.rs deleted file mode 100644 index 1b0212241c..0000000000 --- a/tooling/backend_interface/src/cli/vk_as_fields.rs +++ /dev/null @@ -1,39 +0,0 @@ -use std::path::{Path, PathBuf}; - -use acvm::FieldElement; - -use crate::BackendError; - -use super::string_from_stderr; - -/// VkAsFieldsCommand will call the barretenberg binary -/// to split a verification key into a representation as [`FieldElement`]s. -/// -/// The hash of the verification key will also be returned. -pub(crate) struct VkAsFieldsCommand { - pub(crate) vk_path: PathBuf, -} - -impl VkAsFieldsCommand { - pub(crate) fn run( - self, - binary_path: &Path, - ) -> Result<(FieldElement, Vec), BackendError> { - let mut command = std::process::Command::new(binary_path); - - command.arg("vk_as_fields").arg("-k").arg(self.vk_path).arg("-o").arg("-"); - - let output = command.output()?; - if output.status.success() { - let string_output = String::from_utf8(output.stdout).unwrap(); - let mut fields: Vec = serde_json::from_str(&string_output) - .map_err(|err| BackendError::CommandFailed(err.to_string()))?; - - // The first element of this vector is the hash of the verification key, we want to split that off. - let hash = fields.remove(0); - Ok((hash, fields)) - } else { - Err(BackendError::CommandFailed(string_from_stderr(&output.stderr))) - } - } -} diff --git a/tooling/backend_interface/src/cli/write_vk.rs b/tooling/backend_interface/src/cli/write_vk.rs deleted file mode 100644 index da9fc04cbe..0000000000 --- a/tooling/backend_interface/src/cli/write_vk.rs +++ /dev/null @@ -1,58 +0,0 @@ -use std::path::{Path, PathBuf}; - -use super::string_from_stderr; -use crate::BackendError; - -/// WriteCommand will call the barretenberg binary -/// to write a verification key to a file -pub(crate) struct WriteVkCommand { - pub(crate) crs_path: PathBuf, - pub(crate) bytecode_path: PathBuf, - pub(crate) vk_path_output: PathBuf, -} - -impl WriteVkCommand { - #[tracing::instrument(level = "trace", name = "vk_generation", skip_all)] - pub(crate) fn run(self, binary_path: &Path) -> Result<(), BackendError> { - let mut command = std::process::Command::new(binary_path); - - command - .arg("write_vk") - .arg("-c") - .arg(self.crs_path) - .arg("-b") - .arg(self.bytecode_path) - .arg("-o") - .arg(self.vk_path_output); - - let output = command.output()?; - if output.status.success() { - Ok(()) - } else { - Err(BackendError::CommandFailed(string_from_stderr(&output.stderr))) - } - } -} - -#[test] -fn write_vk_command() -> Result<(), BackendError> { - use tempfile::tempdir; - - let backend = crate::get_mock_backend()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory_path = temp_directory.path(); - let bytecode_path = temp_directory_path.join("acir.gz"); - let vk_path_output = temp_directory.path().join("vk"); - - let crs_path = backend.backend_directory(); - - std::fs::File::create(&bytecode_path).expect("file should be created"); - - let write_vk_command = WriteVkCommand { bytecode_path, crs_path, vk_path_output }; - - write_vk_command.run(backend.binary_path())?; - drop(temp_directory); - - Ok(()) -} diff --git a/tooling/backend_interface/src/download.rs b/tooling/backend_interface/src/download.rs deleted file mode 100644 index 60ecb14e64..0000000000 --- a/tooling/backend_interface/src/download.rs +++ /dev/null @@ -1,58 +0,0 @@ -use std::{ - io::{Cursor, ErrorKind}, - path::Path, -}; - -/// Downloads a zipped archive and unpacks the backend binary to `destination_path`. -/// -/// # Backend Requirements -/// -/// In order for a backend to be compatible with this function: -/// - `backend_url` must serve a gzipped tarball. -/// - The tarball must only contain the backend's binary. -/// - The binary file must be located at the archive root. -pub fn download_backend(backend_url: &str, destination_path: &Path) -> std::io::Result<()> { - use flate2::read::GzDecoder; - use tar::Archive; - use tempfile::tempdir; - - // Download sources - let compressed_file: Cursor> = download_binary_from_url(backend_url).map_err(|_| { - std::io::Error::new( - ErrorKind::Other, - format!("Could not download backend from install url: {backend_url}"), - ) - })?; - - // Unpack the tarball - let gz_decoder = GzDecoder::new(compressed_file); - let mut archive = Archive::new(gz_decoder); - - let temp_directory = tempdir()?; - archive.unpack(&temp_directory)?; - - // Assume that the archive contains a single file which is the backend binary. - let mut archive_files = std::fs::read_dir(&temp_directory)?; - let temp_binary_path = archive_files.next().unwrap()?.path(); - - // Create directory to place binary in. - std::fs::create_dir_all(destination_path.parent().unwrap())?; - - // Rename the binary to the desired name - std::fs::copy(temp_binary_path, destination_path)?; - - drop(temp_directory); - - Ok(()) -} - -/// Try to download the specified URL into a buffer which is returned. -fn download_binary_from_url(url: &str) -> Result>, reqwest::Error> { - let response = reqwest::blocking::get(url)?; - - let bytes = response.bytes()?; - - // TODO: Check SHA of downloaded binary - - Ok(Cursor::new(bytes.to_vec())) -} diff --git a/tooling/backend_interface/src/lib.rs b/tooling/backend_interface/src/lib.rs deleted file mode 100644 index eab9885255..0000000000 --- a/tooling/backend_interface/src/lib.rs +++ /dev/null @@ -1,150 +0,0 @@ -#![warn(unused_crate_dependencies, unused_extern_crates)] -#![warn(unreachable_pub)] - -use std::path::PathBuf; - -mod cli; -mod download; -mod proof_system; -mod smart_contract; - -pub use bb_abstraction_leaks::ACVM_BACKEND_BARRETENBERG; -use bb_abstraction_leaks::BB_VERSION; -use cli::VersionCommand; -pub use download::download_backend; -use tracing::warn; - -const BACKENDS_DIR: &str = ".nargo/backends"; - -pub fn backends_directory() -> PathBuf { - let home_directory = dirs::home_dir().unwrap(); - home_directory.join(BACKENDS_DIR) -} - -#[cfg(test)] -test_binary::build_test_binary_once!(mock_backend, "test-binaries"); - -#[cfg(test)] -fn get_mock_backend() -> Result { - std::env::set_var("NARGO_BACKEND_PATH", path_to_mock_backend()); - - let mock_backend = Backend::new("mock_backend".to_string()); - mock_backend.assert_binary_exists()?; - - Ok(mock_backend) -} - -#[derive(Debug, thiserror::Error)] -pub enum BackendError { - #[error(transparent)] - IoError(#[from] std::io::Error), - - #[error("Backend binary does not exist")] - MissingBinary, - - #[error("The backend responded with a malformed UTF8 byte vector: {0:?}")] - InvalidUTF8Vector(Vec), - - #[error( - "The backend responded with a unexpected number of bytes. Expected: {0} but got {} ({1:?})", .1.len() - )] - UnexpectedNumberOfBytes(usize, Vec), - - #[error("The backend encountered an error: {0:?}")] - CommandFailed(String), -} - -#[derive(Debug)] -pub struct Backend { - name: String, - binary_path: PathBuf, -} - -impl Backend { - pub fn new(name: String) -> Backend { - let binary_path = if let Some(binary_path) = std::env::var_os("NARGO_BACKEND_PATH") { - PathBuf::from(binary_path) - } else { - const BINARY_NAME: &str = "backend_binary"; - - backends_directory().join(&name).join(BINARY_NAME) - }; - Backend { name, binary_path } - } - - pub fn name(&self) -> &str { - &self.name - } - - fn binary_path(&self) -> &PathBuf { - &self.binary_path - } - - fn assert_binary_exists(&self) -> Result<&PathBuf, BackendError> { - let binary_path = self.binary_path(); - if binary_path.is_file() { - Ok(binary_path) - } else { - if self.name == ACVM_BACKEND_BARRETENBERG { - // If we're trying to use barretenberg, automatically go and install it. - let bb_url = std::env::var("BB_BINARY_URL") - .unwrap_or_else(|_| bb_abstraction_leaks::BB_DOWNLOAD_URL.to_owned()); - download_backend(&bb_url, binary_path)?; - return Ok(binary_path); - } - Err(BackendError::MissingBinary) - } - } - - fn backend_directory(&self) -> PathBuf { - self.binary_path() - .parent() - .expect("backend binary should have a parent directory") - .to_path_buf() - } - - fn crs_directory(&self) -> PathBuf { - self.backend_directory().join("crs") - } - - fn assert_correct_version(&self) -> Result<&PathBuf, BackendError> { - let binary_path = self.binary_path(); - if binary_path.to_string_lossy().contains(ACVM_BACKEND_BARRETENBERG) { - match VersionCommand.run(binary_path) { - // If version matches then do nothing. - Ok(version_string) if version_string == BB_VERSION => (), - - // If version doesn't match then download the correct version. - Ok(version_string) => { - warn!("`{ACVM_BACKEND_BARRETENBERG}` version `{version_string}` is different from expected `{BB_VERSION}`. Downloading expected version..."); - let bb_url = std::env::var("BB_BINARY_URL") - .unwrap_or_else(|_| bb_abstraction_leaks::BB_DOWNLOAD_URL.to_owned()); - download_backend(&bb_url, binary_path)?; - } - - // If `bb` fails to report its version, then attempt to fix it by re-downloading the binary. - Err(_) => { - warn!("Could not determine version of `{ACVM_BACKEND_BARRETENBERG}`. Downloading expected version..."); - let bb_url = std::env::var("BB_BINARY_URL") - .unwrap_or_else(|_| bb_abstraction_leaks::BB_DOWNLOAD_URL.to_owned()); - download_backend(&bb_url, binary_path)?; - } - } - } - Ok(binary_path) - } -} - -#[cfg(test)] -mod backend { - use crate::{Backend, BackendError}; - - #[test] - fn raises_error_on_missing_binary() { - let bad_backend = Backend::new("i_don't_exist".to_string()); - - let binary_path = bad_backend.assert_binary_exists(); - - assert!(matches!(binary_path, Err(BackendError::MissingBinary))); - } -} diff --git a/tooling/backend_interface/src/proof_system.rs b/tooling/backend_interface/src/proof_system.rs deleted file mode 100644 index 20a6dcf70f..0000000000 --- a/tooling/backend_interface/src/proof_system.rs +++ /dev/null @@ -1,169 +0,0 @@ -use std::fs::File; -use std::io::Write; -use std::path::Path; - -use acvm::acir::{ - circuit::Program, - native_types::{WitnessMap, WitnessStack}, -}; -use acvm::FieldElement; -use tempfile::tempdir; -use tracing::warn; - -use crate::cli::{ - GatesCommand, ProofAsFieldsCommand, ProveCommand, VerifyCommand, VkAsFieldsCommand, - WriteVkCommand, -}; -use crate::{Backend, BackendError}; - -impl Backend { - pub fn get_exact_circuit_size(&self, program: &Program) -> Result { - let binary_path = self.assert_binary_exists()?; - self.assert_correct_version()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory = temp_directory.path().to_path_buf(); - - // Create a temporary file for the circuit - let circuit_path = temp_directory.join("circuit").with_extension("bytecode"); - let serialized_program = Program::serialize_program(program); - write_to_file(&serialized_program, &circuit_path); - - GatesCommand { crs_path: self.crs_directory(), bytecode_path: circuit_path } - .run(binary_path) - } - - #[tracing::instrument(level = "trace", skip_all)] - pub fn prove( - &self, - program: &Program, - witness_stack: WitnessStack, - ) -> Result, BackendError> { - let binary_path = self.assert_binary_exists()?; - self.assert_correct_version()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory = temp_directory.path().to_path_buf(); - - // Create a temporary file for the witness - let serialized_witnesses: Vec = - witness_stack.try_into().expect("could not serialize witness map"); - let witness_path = temp_directory.join("witness").with_extension("tr"); - write_to_file(&serialized_witnesses, &witness_path); - - // Create a temporary file for the circuit - // - let bytecode_path = temp_directory.join("program").with_extension("bytecode"); - let serialized_program = Program::serialize_program(program); - write_to_file(&serialized_program, &bytecode_path); - - // Create proof and store it in the specified path - let proof_with_public_inputs = - ProveCommand { crs_path: self.crs_directory(), bytecode_path, witness_path } - .run(binary_path)?; - - let proof = bb_abstraction_leaks::remove_public_inputs( - // TODO(https://github.com/noir-lang/noir/issues/4428) - program.functions[0].public_inputs().0.len(), - &proof_with_public_inputs, - ); - Ok(proof) - } - - #[tracing::instrument(level = "trace", skip_all)] - pub fn verify( - &self, - proof: &[u8], - public_inputs: WitnessMap, - program: &Program, - ) -> Result { - let binary_path = self.assert_binary_exists()?; - self.assert_correct_version()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory = temp_directory.path().to_path_buf(); - - // Create a temporary file for the proof - let proof_with_public_inputs = - bb_abstraction_leaks::prepend_public_inputs(proof.to_vec(), public_inputs); - let proof_path = temp_directory.join("proof").with_extension("proof"); - write_to_file(&proof_with_public_inputs, &proof_path); - - // Create a temporary file for the circuit - let bytecode_path = temp_directory.join("program").with_extension("bytecode"); - let serialized_program = Program::serialize_program(program); - write_to_file(&serialized_program, &bytecode_path); - - // Create the verification key and write it to the specified path - let vk_path = temp_directory.join("vk"); - - WriteVkCommand { - crs_path: self.crs_directory(), - bytecode_path, - vk_path_output: vk_path.clone(), - } - .run(binary_path)?; - - // Verify the proof - VerifyCommand { crs_path: self.crs_directory(), proof_path, vk_path }.run(binary_path) - } - - pub fn get_intermediate_proof_artifacts( - &self, - program: &Program, - proof: &[u8], - public_inputs: WitnessMap, - ) -> Result<(Vec, FieldElement, Vec), BackendError> { - let binary_path = self.assert_binary_exists()?; - self.assert_correct_version()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory = temp_directory.path().to_path_buf(); - - // Create a temporary file for the circuit - // - let bytecode_path = temp_directory.join("program").with_extension("bytecode"); - let serialized_program = Program::serialize_program(program); - write_to_file(&serialized_program, &bytecode_path); - - // Create the verification key and write it to the specified path - let vk_path = temp_directory.join("vk"); - - WriteVkCommand { - crs_path: self.crs_directory(), - bytecode_path, - vk_path_output: vk_path.clone(), - } - .run(binary_path)?; - - // Create a temporary file for the proof - - let proof_with_public_inputs = - bb_abstraction_leaks::prepend_public_inputs(proof.to_vec(), public_inputs); - let proof_path = temp_directory.join("proof").with_extension("proof"); - write_to_file(&proof_with_public_inputs, &proof_path); - - // Now ready to generate intermediate artifacts. - - let proof_as_fields = - ProofAsFieldsCommand { proof_path, vk_path: vk_path.clone() }.run(binary_path)?; - - let (vk_hash, vk_as_fields) = VkAsFieldsCommand { vk_path }.run(binary_path)?; - - Ok((proof_as_fields, vk_hash, vk_as_fields)) - } -} - -pub(super) fn write_to_file(bytes: &[u8], path: &Path) -> String { - let display = path.display(); - - let mut file = match File::create(path) { - Err(why) => panic!("couldn't create {display}: {why}"), - Ok(file) => file, - }; - - match file.write_all(bytes) { - Err(why) => panic!("couldn't write to {display}: {why}"), - Ok(_) => display.to_string(), - } -} diff --git a/tooling/backend_interface/src/smart_contract.rs b/tooling/backend_interface/src/smart_contract.rs deleted file mode 100644 index 153ab52c83..0000000000 --- a/tooling/backend_interface/src/smart_contract.rs +++ /dev/null @@ -1,70 +0,0 @@ -use super::proof_system::write_to_file; -use crate::{ - cli::{ContractCommand, WriteVkCommand}, - Backend, BackendError, -}; -use acvm::acir::circuit::Program; -use tempfile::tempdir; - -impl Backend { - pub fn eth_contract(&self, program: &Program) -> Result { - let binary_path = self.assert_binary_exists()?; - self.assert_correct_version()?; - - let temp_directory = tempdir().expect("could not create a temporary directory"); - let temp_directory_path = temp_directory.path().to_path_buf(); - - // Create a temporary file for the circuit - let bytecode_path = temp_directory_path.join("program").with_extension("bytecode"); - let serialized_program = Program::serialize_program(program); - write_to_file(&serialized_program, &bytecode_path); - - // Create the verification key and write it to the specified path - let vk_path = temp_directory_path.join("vk"); - - WriteVkCommand { - crs_path: self.crs_directory(), - bytecode_path, - vk_path_output: vk_path.clone(), - } - .run(binary_path)?; - - ContractCommand { crs_path: self.crs_directory(), vk_path }.run(binary_path) - } -} - -#[cfg(test)] -mod tests { - use std::collections::BTreeSet; - - use acvm::acir::{ - circuit::{Circuit, ExpressionWidth, Opcode, Program, PublicInputs}, - native_types::{Expression, Witness}, - }; - - use crate::{get_mock_backend, BackendError}; - - #[test] - fn test_smart_contract() -> Result<(), BackendError> { - let expression = &(Witness(1) + Witness(2)) - &Expression::from(Witness(3)); - let constraint = Opcode::AssertZero(expression); - - let circuit = Circuit { - current_witness_index: 4, - expression_width: ExpressionWidth::Bounded { width: 4 }, - opcodes: vec![constraint], - private_parameters: BTreeSet::from([Witness(1), Witness(2)]), - public_parameters: PublicInputs::default(), - return_values: PublicInputs::default(), - assert_messages: Default::default(), - recursive: false, - }; - let program = Program { functions: vec![circuit], unconstrained_functions: Vec::new() }; - - let contract = get_mock_backend()?.eth_contract(&program)?; - - assert!(contract.contains("contract VerifierContract")); - - Ok(()) - } -} diff --git a/tooling/backend_interface/test-binaries/mock_backend/Cargo.lock b/tooling/backend_interface/test-binaries/mock_backend/Cargo.lock deleted file mode 100644 index 3c14a93690..0000000000 --- a/tooling/backend_interface/test-binaries/mock_backend/Cargo.lock +++ /dev/null @@ -1,223 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 3 - -[[package]] -name = "anstream" -version = "0.6.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e2e1ebcb11de5c03c67de28a7df593d32191b44939c482e97702baaaa6ab6a5" -dependencies = [ - "anstyle", - "anstyle-parse", - "anstyle-query", - "anstyle-wincon", - "colorchoice", - "utf8parse", -] - -[[package]] -name = "anstyle" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7079075b41f533b8c61d2a4d073c4676e1f8b249ff94a393b0595db304e0dd87" - -[[package]] -name = "anstyle-parse" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c" -dependencies = [ - "utf8parse", -] - -[[package]] -name = "anstyle-query" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648" -dependencies = [ - "windows-sys", -] - -[[package]] -name = "anstyle-wincon" -version = "3.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7" -dependencies = [ - "anstyle", - "windows-sys", -] - -[[package]] -name = "clap" -version = "4.4.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e578d6ec4194633722ccf9544794b71b1385c3c027efe0c55db226fc880865c" -dependencies = [ - "clap_builder", - "clap_derive", -] - -[[package]] -name = "clap_builder" -version = "4.4.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4df4df40ec50c46000231c914968278b1eb05098cf8f1b3a518a95030e71d1c7" -dependencies = [ - "anstream", - "anstyle", - "clap_lex", - "strsim", -] - -[[package]] -name = "clap_derive" -version = "4.4.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf9804afaaf59a91e75b022a30fb7229a7901f60c755489cc61c9b423b836442" -dependencies = [ - "heck", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "clap_lex" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1" - -[[package]] -name = "colorchoice" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" - -[[package]] -name = "heck" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" - -[[package]] -name = "mock_backend" -version = "0.1.0" -dependencies = [ - "clap", -] - -[[package]] -name = "proc-macro2" -version = "1.0.76" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95fc56cda0b5c3325f5fbbd7ff9fda9e02bb00bb3dac51252d2f1bfa1cb8cc8c" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "quote" -version = "1.0.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "strsim" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" - -[[package]] -name = "syn" -version = "2.0.48" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "unicode-ident" -version = "1.0.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" - -[[package]] -name = "utf8parse" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" - -[[package]] -name = "windows-sys" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" -dependencies = [ - "windows-targets", -] - -[[package]] -name = "windows-targets" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" -dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", -] - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" - -[[package]] -name = "windows_i686_gnu" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" - -[[package]] -name = "windows_i686_msvc" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" diff --git a/tooling/backend_interface/test-binaries/mock_backend/Cargo.toml b/tooling/backend_interface/test-binaries/mock_backend/Cargo.toml deleted file mode 100644 index f527b03a7b..0000000000 --- a/tooling/backend_interface/test-binaries/mock_backend/Cargo.toml +++ /dev/null @@ -1,11 +0,0 @@ -[workspace] - -[package] -name = "mock_backend" -version = "0.1.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -clap = { version = "4.3.19", features = ["derive"] } diff --git a/tooling/backend_interface/test-binaries/mock_backend/src/contract_cmd.rs b/tooling/backend_interface/test-binaries/mock_backend/src/contract_cmd.rs deleted file mode 100644 index 7ee41121d6..0000000000 --- a/tooling/backend_interface/test-binaries/mock_backend/src/contract_cmd.rs +++ /dev/null @@ -1,21 +0,0 @@ -use clap::Args; -use std::io::Write; -use std::path::PathBuf; - -#[derive(Debug, Clone, Args)] -pub(crate) struct ContractCommand { - #[clap(short = 'c')] - pub(crate) crs_path: Option, - - #[clap(short = 'k')] - pub(crate) vk_path: PathBuf, - - #[clap(short = 'o')] - pub(crate) contract_path: PathBuf, -} - -pub(crate) fn run(args: ContractCommand) { - assert!(args.vk_path.is_file(), "Could not find vk file at provided path"); - - std::io::stdout().write_all(b"contract VerifierContract {}").unwrap(); -} diff --git a/tooling/backend_interface/test-binaries/mock_backend/src/gates_cmd.rs b/tooling/backend_interface/test-binaries/mock_backend/src/gates_cmd.rs deleted file mode 100644 index 3cc397d329..0000000000 --- a/tooling/backend_interface/test-binaries/mock_backend/src/gates_cmd.rs +++ /dev/null @@ -1,18 +0,0 @@ -use clap::Args; -use std::io::Write; -use std::path::PathBuf; - -#[derive(Debug, Clone, Args)] -pub(crate) struct GatesCommand { - #[clap(short = 'c')] - pub(crate) crs_path: Option, - - #[clap(short = 'b')] - pub(crate) bytecode_path: PathBuf, -} - -pub(crate) fn run(args: GatesCommand) { - assert!(args.bytecode_path.is_file(), "Could not find bytecode file at provided path"); - - std::io::stdout().write_all(&0u64.to_le_bytes()).unwrap(); -} diff --git a/tooling/backend_interface/test-binaries/mock_backend/src/main.rs b/tooling/backend_interface/test-binaries/mock_backend/src/main.rs deleted file mode 100644 index 74ea82d28f..0000000000 --- a/tooling/backend_interface/test-binaries/mock_backend/src/main.rs +++ /dev/null @@ -1,41 +0,0 @@ -#![forbid(unsafe_code)] -#![warn(unreachable_pub)] -#![warn(clippy::semicolon_if_nothing_returned)] -#![cfg_attr(not(test), warn(unused_crate_dependencies, unused_extern_crates))] - -use clap::{Parser, Subcommand}; - -mod contract_cmd; -mod gates_cmd; -mod prove_cmd; -mod verify_cmd; -mod write_vk_cmd; - -#[derive(Parser, Debug)] -#[command(name = "mock_backend")] -struct BackendCli { - #[command(subcommand)] - command: BackendCommand, -} - -#[derive(Subcommand, Clone, Debug)] -enum BackendCommand { - Contract(contract_cmd::ContractCommand), - Gates(gates_cmd::GatesCommand), - Prove(prove_cmd::ProveCommand), - Verify(verify_cmd::VerifyCommand), - #[command(name = "write_vk")] - WriteVk(write_vk_cmd::WriteVkCommand), -} - -fn main() { - let BackendCli { command } = BackendCli::parse(); - - match command { - BackendCommand::Contract(args) => contract_cmd::run(args), - BackendCommand::Gates(args) => gates_cmd::run(args), - BackendCommand::Prove(args) => prove_cmd::run(args), - BackendCommand::Verify(args) => verify_cmd::run(args), - BackendCommand::WriteVk(args) => write_vk_cmd::run(args), - }; -} diff --git a/tooling/backend_interface/test-binaries/mock_backend/src/prove_cmd.rs b/tooling/backend_interface/test-binaries/mock_backend/src/prove_cmd.rs deleted file mode 100644 index 3967778d4e..0000000000 --- a/tooling/backend_interface/test-binaries/mock_backend/src/prove_cmd.rs +++ /dev/null @@ -1,25 +0,0 @@ -use clap::Args; -use std::io::Write; -use std::path::PathBuf; - -#[derive(Debug, Clone, Args)] -pub(crate) struct ProveCommand { - #[clap(short = 'c')] - pub(crate) crs_path: Option, - - #[clap(short = 'b')] - pub(crate) bytecode_path: PathBuf, - - #[clap(short = 'w')] - pub(crate) witness_path: PathBuf, - - #[clap(short = 'o')] - pub(crate) proof_path: PathBuf, -} - -pub(crate) fn run(args: ProveCommand) { - assert!(args.bytecode_path.is_file(), "Could not find bytecode file at provided path"); - assert!(args.witness_path.is_file(), "Could not find witness file at provided path"); - - std::io::stdout().write_all(b"proof").unwrap(); -} diff --git a/tooling/backend_interface/test-binaries/mock_backend/src/verify_cmd.rs b/tooling/backend_interface/test-binaries/mock_backend/src/verify_cmd.rs deleted file mode 100644 index 1a715eea88..0000000000 --- a/tooling/backend_interface/test-binaries/mock_backend/src/verify_cmd.rs +++ /dev/null @@ -1,24 +0,0 @@ -use clap::Args; -use std::path::PathBuf; - -#[derive(Debug, Clone, Args)] -pub(crate) struct VerifyCommand { - #[clap(short = 'c')] - pub(crate) crs_path: Option, - - #[clap(short = 'p')] - pub(crate) proof_path: PathBuf, - - #[clap(short = 'k')] - pub(crate) vk_path: PathBuf, - - #[clap(short = 'r')] - pub(crate) is_recursive: bool, -} - -pub(crate) fn run(args: VerifyCommand) { - assert!(args.vk_path.is_file(), "Could not find verification key file at provided path"); - assert!(args.proof_path.is_file(), "Could not find proof file at provided path"); - - std::fs::write(args.proof_path, "proof").unwrap(); -} diff --git a/tooling/backend_interface/test-binaries/mock_backend/src/write_vk_cmd.rs b/tooling/backend_interface/test-binaries/mock_backend/src/write_vk_cmd.rs deleted file mode 100644 index fcee224e85..0000000000 --- a/tooling/backend_interface/test-binaries/mock_backend/src/write_vk_cmd.rs +++ /dev/null @@ -1,20 +0,0 @@ -use clap::Args; -use std::path::PathBuf; - -#[derive(Debug, Clone, Args)] -pub(crate) struct WriteVkCommand { - #[clap(short = 'c')] - pub(crate) crs_path: Option, - - #[clap(short = 'b')] - pub(crate) bytecode_path: PathBuf, - - #[clap(short = 'o')] - pub(crate) vk_path: PathBuf, -} - -pub(crate) fn run(args: WriteVkCommand) { - assert!(args.bytecode_path.is_file(), "Could not find bytecode file at provided path"); - - std::fs::write(args.vk_path, "vk").unwrap(); -} diff --git a/tooling/bb_abstraction_leaks/Cargo.toml b/tooling/bb_abstraction_leaks/Cargo.toml deleted file mode 100644 index 972c78831a..0000000000 --- a/tooling/bb_abstraction_leaks/Cargo.toml +++ /dev/null @@ -1,17 +0,0 @@ -[package] -name = "bb_abstraction_leaks" -description = "A crate which encapsulates knowledge about Barretenberg which is currently leaking into Nargo" -version = "0.11.0" -authors.workspace = true -edition.workspace = true -rust-version.workspace = true -license.workspace = true - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -acvm.workspace = true - -[build-dependencies] -build-target = "0.4.0" -const_format.workspace = true diff --git a/tooling/bb_abstraction_leaks/build.rs b/tooling/bb_abstraction_leaks/build.rs deleted file mode 100644 index 45da7f9d00..0000000000 --- a/tooling/bb_abstraction_leaks/build.rs +++ /dev/null @@ -1,58 +0,0 @@ -use build_target::{Arch, Os}; -use const_format::formatcp; - -// Useful for printing debugging messages during the build -// macro_rules! p { -// ($($tokens: tt)*) => { -// println!("cargo:warning={}", format!($($tokens)*)) -// } -// } - -const USERNAME: &str = "AztecProtocol"; -const REPO: &str = "aztec-packages"; -const VERSION: &str = "0.38.0"; -const TAG: &str = formatcp!("aztec-packages-v{}", VERSION); - -const API_URL: &str = - formatcp!("https://github.com/{}/{}/releases/download/{}", USERNAME, REPO, TAG); - -fn main() -> Result<(), String> { - // We need to inject which OS we're building for so that we can download the correct barretenberg binary. - let os = match build_target::target_os().unwrap() { - os @ (Os::Linux | Os::MacOs) => os, - Os::Windows => todo!("Windows is not currently supported"), - os_name => panic!("Unsupported OS {os_name}"), - }; - - let arch = match build_target::target_arch().unwrap() { - arch @ (Arch::X86_64 | Arch::AARCH64) => arch, - arch_name => panic!("Unsupported Architecture {arch_name}"), - }; - - // Arm builds of linux are not supported - // We do not panic because we allow users to run nargo without a backend. - if let (Os::Linux, Arch::AARCH64) = (&os, &arch) { - println!( - "cargo:warning=ARM64 builds of linux are not supported for the barretenberg binary" - ); - }; - - println!("cargo:rustc-env=BB_BINARY_URL={}", get_bb_download_url(arch, os)); - println!("cargo:rustc-env=BB_VERSION={}", VERSION); - - Ok(()) -} - -fn get_bb_download_url(target_arch: Arch, target_os: Os) -> String { - let archive_name = match target_os { - Os::Linux => "barretenberg-x86_64-linux-gnu.tar.gz", - Os::MacOs => match target_arch { - Arch::AARCH64 => "barretenberg-aarch64-apple-darwin.tar.gz", - Arch::X86_64 => "barretenberg-x86_64-apple-darwin.tar.gz", - arch => panic!("unsupported arch {arch}"), - }, - os => panic!("Unsupported OS {os}"), - }; - - format!("{API_URL}/{archive_name}") -} diff --git a/tooling/bb_abstraction_leaks/src/lib.rs b/tooling/bb_abstraction_leaks/src/lib.rs deleted file mode 100644 index 56a4f58cd2..0000000000 --- a/tooling/bb_abstraction_leaks/src/lib.rs +++ /dev/null @@ -1,26 +0,0 @@ -#![warn(unused_crate_dependencies, unused_extern_crates)] -#![warn(unreachable_pub)] - -use acvm::{acir::native_types::WitnessMap, FieldElement}; - -pub const ACVM_BACKEND_BARRETENBERG: &str = "acvm-backend-barretenberg"; -pub const BB_DOWNLOAD_URL: &str = env!("BB_BINARY_URL"); -pub const BB_VERSION: &str = env!("BB_VERSION"); - -/// Removes the public inputs which are prepended to a proof by Barretenberg. -pub fn remove_public_inputs(num_pub_inputs: usize, proof: &[u8]) -> Vec { - // Barretenberg prepends the public inputs onto the proof so we need to remove - // the first `num_pub_inputs` field elements. - let num_bytes_to_remove = num_pub_inputs * (FieldElement::max_num_bytes() as usize); - proof[num_bytes_to_remove..].to_vec() -} - -/// Prepends a set of public inputs to a proof. -pub fn prepend_public_inputs(proof: Vec, public_inputs: WitnessMap) -> Vec { - // We omit any unassigned witnesses. - // Witness values should be ordered by their index but we skip over any indices without an assignment. - let public_inputs_bytes = - public_inputs.into_iter().flat_map(|(_, assignment)| assignment.to_be_bytes()); - - public_inputs_bytes.chain(proof).collect() -} diff --git a/tooling/debugger/src/context.rs b/tooling/debugger/src/context.rs index ea32c864a0..646beaf009 100644 --- a/tooling/debugger/src/context.rs +++ b/tooling/debugger/src/context.rs @@ -862,7 +862,11 @@ mod tests { let opcodes = vec![ Opcode::BrilligCall { id: 0, inputs: vec![], outputs: vec![], predicate: None }, - Opcode::MemoryInit { block_id: BlockId(0), init: vec![] }, + Opcode::MemoryInit { + block_id: BlockId(0), + init: vec![], + block_type: acvm::acir::circuit::opcodes::BlockType::Memory, + }, Opcode::BrilligCall { id: 0, inputs: vec![], outputs: vec![], predicate: None }, Opcode::AssertZero(Expression::default()), ]; diff --git a/tooling/debugger/tests/debug.rs b/tooling/debugger/tests/debug.rs index b104a2c84a..313b6b3059 100644 --- a/tooling/debugger/tests/debug.rs +++ b/tooling/debugger/tests/debug.rs @@ -16,12 +16,6 @@ mod tests { let mut dbg_session = spawn_bash(Some(timeout_seconds * 1000)).expect("Could not start bash session"); - // Set backend to `/dev/null` to force an error if nargo tries to speak to a backend. - dbg_session - .send_line("export NARGO_BACKEND_PATH=/dev/null") - .expect("Could not export NARGO_BACKEND_PATH."); - dbg_session.wait_for_prompt().expect("Could not export NARGO_BACKEND_PATH."); - // Start debugger and test that it loads for the given program. dbg_session .execute( diff --git a/tooling/lsp/src/lib.rs b/tooling/lsp/src/lib.rs index be9b83e02f..05345b96c8 100644 --- a/tooling/lsp/src/lib.rs +++ b/tooling/lsp/src/lib.rs @@ -345,7 +345,7 @@ fn prepare_package_from_source_string() { let mut state = LspState::new(&client, acvm::blackbox_solver::StubbedBlackBoxSolver); let (mut context, crate_id) = crate::prepare_source(source.to_string(), &mut state); - let _check_result = noirc_driver::check_crate(&mut context, crate_id, false, false); + let _check_result = noirc_driver::check_crate(&mut context, crate_id, false, false, false); let main_func_id = context.get_main_function(&crate_id); assert!(main_func_id.is_some()); } diff --git a/tooling/lsp/src/notifications/mod.rs b/tooling/lsp/src/notifications/mod.rs index 355bb7832c..3856bdc79e 100644 --- a/tooling/lsp/src/notifications/mod.rs +++ b/tooling/lsp/src/notifications/mod.rs @@ -56,7 +56,7 @@ pub(super) fn on_did_change_text_document( state.input_files.insert(params.text_document.uri.to_string(), text.clone()); let (mut context, crate_id) = prepare_source(text, state); - let _ = check_crate(&mut context, crate_id, false, false); + let _ = check_crate(&mut context, crate_id, false, false, false); let workspace = match resolve_workspace_for_source_path( params.text_document.uri.to_file_path().unwrap().as_path(), @@ -139,7 +139,7 @@ fn process_noir_document( let (mut context, crate_id) = prepare_package(&workspace_file_manager, &parsed_files, package); - let file_diagnostics = match check_crate(&mut context, crate_id, false, false) { + let file_diagnostics = match check_crate(&mut context, crate_id, false, false, false) { Ok(((), warnings)) => warnings, Err(errors_and_warnings) => errors_and_warnings, }; diff --git a/tooling/lsp/src/requests/code_lens_request.rs b/tooling/lsp/src/requests/code_lens_request.rs index 893ba33d84..744bddedd9 100644 --- a/tooling/lsp/src/requests/code_lens_request.rs +++ b/tooling/lsp/src/requests/code_lens_request.rs @@ -67,7 +67,7 @@ fn on_code_lens_request_inner( let (mut context, crate_id) = prepare_source(source_string, state); // We ignore the warnings and errors produced by compilation for producing code lenses // because we can still get the test functions even if compilation fails - let _ = check_crate(&mut context, crate_id, false, false); + let _ = check_crate(&mut context, crate_id, false, false, false); let collected_lenses = collect_lenses_for_package(&context, crate_id, &workspace, package, None); diff --git a/tooling/lsp/src/requests/goto_declaration.rs b/tooling/lsp/src/requests/goto_declaration.rs index 8e6d519b89..5cff16b234 100644 --- a/tooling/lsp/src/requests/goto_declaration.rs +++ b/tooling/lsp/src/requests/goto_declaration.rs @@ -46,7 +46,7 @@ fn on_goto_definition_inner( interner = def_interner; } else { // We ignore the warnings and errors produced by compilation while resolving the definition - let _ = noirc_driver::check_crate(&mut context, crate_id, false, false); + let _ = noirc_driver::check_crate(&mut context, crate_id, false, false, false); interner = &context.def_interner; } diff --git a/tooling/lsp/src/requests/goto_definition.rs b/tooling/lsp/src/requests/goto_definition.rs index 88bb667f2e..32e13ce00f 100644 --- a/tooling/lsp/src/requests/goto_definition.rs +++ b/tooling/lsp/src/requests/goto_definition.rs @@ -54,7 +54,7 @@ fn on_goto_definition_inner( interner = def_interner; } else { // We ignore the warnings and errors produced by compilation while resolving the definition - let _ = noirc_driver::check_crate(&mut context, crate_id, false, false); + let _ = noirc_driver::check_crate(&mut context, crate_id, false, false, false); interner = &context.def_interner; } diff --git a/tooling/lsp/src/requests/test_run.rs b/tooling/lsp/src/requests/test_run.rs index 1844a3d9bf..83b05ba06a 100644 --- a/tooling/lsp/src/requests/test_run.rs +++ b/tooling/lsp/src/requests/test_run.rs @@ -60,7 +60,7 @@ fn on_test_run_request_inner( Some(package) => { let (mut context, crate_id) = prepare_package(&workspace_file_manager, &parsed_files, package); - if check_crate(&mut context, crate_id, false, false).is_err() { + if check_crate(&mut context, crate_id, false, false, false).is_err() { let result = NargoTestRunResult { id: params.id.clone(), result: "error".to_string(), diff --git a/tooling/lsp/src/requests/tests.rs b/tooling/lsp/src/requests/tests.rs index 5b78fcc65c..cdf4ad338c 100644 --- a/tooling/lsp/src/requests/tests.rs +++ b/tooling/lsp/src/requests/tests.rs @@ -61,7 +61,7 @@ fn on_tests_request_inner( prepare_package(&workspace_file_manager, &parsed_files, package); // We ignore the warnings and errors produced by compilation for producing tests // because we can still get the test functions even if compilation fails - let _ = check_crate(&mut context, crate_id, false, false); + let _ = check_crate(&mut context, crate_id, false, false, false); // We don't add test headings for a package if it contains no `#[test]` functions get_package_tests_in_crate(&context, &crate_id, &package.name) diff --git a/tooling/lsp/src/solver.rs b/tooling/lsp/src/solver.rs index 249406effa..87327b01e3 100644 --- a/tooling/lsp/src/solver.rs +++ b/tooling/lsp/src/solver.rs @@ -27,9 +27,13 @@ impl BlackBoxFunctionSolver for WrapperSolver { fn multi_scalar_mul( &self, points: &[acvm::FieldElement], - scalars: &[acvm::FieldElement], - ) -> Result<(acvm::FieldElement, acvm::FieldElement), acvm::BlackBoxResolutionError> { - self.0.multi_scalar_mul(points, scalars) + scalars_lo: &[acvm::FieldElement], + scalars_hi: &[acvm::FieldElement], + ) -> Result< + (acvm::FieldElement, acvm::FieldElement, acvm::FieldElement), + acvm::BlackBoxResolutionError, + > { + self.0.multi_scalar_mul(points, scalars_lo, scalars_hi) } fn pedersen_hash( @@ -44,10 +48,15 @@ impl BlackBoxFunctionSolver for WrapperSolver { &self, input1_x: &acvm::FieldElement, input1_y: &acvm::FieldElement, + input1_infinite: &acvm::FieldElement, input2_x: &acvm::FieldElement, input2_y: &acvm::FieldElement, - ) -> Result<(acvm::FieldElement, acvm::FieldElement), acvm::BlackBoxResolutionError> { - self.0.ec_add(input1_x, input1_y, input2_x, input2_y) + input2_infinite: &acvm::FieldElement, + ) -> Result< + (acvm::FieldElement, acvm::FieldElement, acvm::FieldElement), + acvm::BlackBoxResolutionError, + > { + self.0.ec_add(input1_x, input1_y, input1_infinite, input2_x, input2_y, input2_infinite) } fn poseidon2_permutation( diff --git a/tooling/nargo/src/artifacts/contract.rs b/tooling/nargo/src/artifacts/contract.rs index 83bb4b94f8..a864da7c33 100644 --- a/tooling/nargo/src/artifacts/contract.rs +++ b/tooling/nargo/src/artifacts/contract.rs @@ -9,7 +9,7 @@ use std::collections::{BTreeMap, HashMap}; use fm::FileId; -#[derive(Serialize, Deserialize)] +#[derive(Clone, Serialize, Deserialize)] pub struct ContractOutputsArtifact { pub structs: HashMap>, pub globals: HashMap>, @@ -21,7 +21,7 @@ impl From for ContractOutputsArtifact { } } -#[derive(Serialize, Deserialize)] +#[derive(Clone, Serialize, Deserialize)] pub struct ContractArtifact { /// Version of noir used to compile this contract pub noir_version: String, @@ -51,7 +51,7 @@ impl From for ContractArtifact { /// /// A contract function unlike a regular Noir program however can have additional properties. /// One of these being a function type. -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize)] pub struct ContractFunctionArtifact { pub name: String, diff --git a/tooling/nargo/src/artifacts/debug.rs b/tooling/nargo/src/artifacts/debug.rs index 496896468c..2570c3f5c9 100644 --- a/tooling/nargo/src/artifacts/debug.rs +++ b/tooling/nargo/src/artifacts/debug.rs @@ -9,6 +9,7 @@ use std::{ }; pub use super::debug_vars::{DebugVars, StackFrame}; +use super::{contract::ContractArtifact, program::ProgramArtifact}; use fm::{FileId, FileManager, PathString}; /// A Debug Artifact stores, for a given program, the debug info for every function @@ -128,6 +129,16 @@ impl From for DebugArtifact { } } +impl From for DebugArtifact { + fn from(program_artifact: ProgramArtifact) -> Self { + DebugArtifact { + debug_symbols: program_artifact.debug_symbols.debug_infos, + file_map: program_artifact.file_map, + warnings: Vec::new(), + } + } +} + impl From for DebugArtifact { fn from(compiled_artifact: CompiledContract) -> Self { let all_functions_debug: Vec = compiled_artifact @@ -144,6 +155,22 @@ impl From for DebugArtifact { } } +impl From for DebugArtifact { + fn from(compiled_artifact: ContractArtifact) -> Self { + let all_functions_debug: Vec = compiled_artifact + .functions + .into_iter() + .flat_map(|contract_function| contract_function.debug_symbols.debug_infos) + .collect(); + + DebugArtifact { + debug_symbols: all_functions_debug, + file_map: compiled_artifact.file_map, + warnings: Vec::new(), + } + } +} + impl<'a> Files<'a> for DebugArtifact { type FileId = FileId; type Name = PathString; diff --git a/tooling/nargo/src/artifacts/program.rs b/tooling/nargo/src/artifacts/program.rs index 67ac9f53ec..3c25b9e334 100644 --- a/tooling/nargo/src/artifacts/program.rs +++ b/tooling/nargo/src/artifacts/program.rs @@ -8,7 +8,7 @@ use noirc_driver::DebugFile; use noirc_errors::debug_info::ProgramDebugInfo; use serde::{Deserialize, Serialize}; -#[derive(Serialize, Deserialize, Debug)] +#[derive(Clone, Serialize, Deserialize, Debug)] pub struct ProgramArtifact { pub noir_version: String, diff --git a/tooling/nargo/src/constants.rs b/tooling/nargo/src/constants.rs index 0b50d61fe3..1048d86fcd 100644 --- a/tooling/nargo/src/constants.rs +++ b/tooling/nargo/src/constants.rs @@ -13,8 +13,6 @@ pub const EXPORT_DIR: &str = "export"; // Files /// The file from which Nargo pulls prover inputs pub const PROVER_INPUT_FILE: &str = "Prover"; -/// The file from which Nargo pulls verifier inputs -pub const VERIFIER_INPUT_FILE: &str = "Verifier"; /// The package definition file for a Noir project. pub const PKG_FILE: &str = "Nargo.toml"; diff --git a/tooling/nargo/src/package.rs b/tooling/nargo/src/package.rs index ecbf358521..44f0a3504f 100644 --- a/tooling/nargo/src/package.rs +++ b/tooling/nargo/src/package.rs @@ -2,7 +2,7 @@ use std::{collections::BTreeMap, fmt::Display, path::PathBuf}; use noirc_frontend::graph::CrateName; -use crate::constants::{PROVER_INPUT_FILE, VERIFIER_INPUT_FILE}; +use crate::constants::PROVER_INPUT_FILE; #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum PackageType { @@ -59,11 +59,6 @@ impl Package { // For now it is hard-coded to be toml. self.root_dir.join(format!("{PROVER_INPUT_FILE}.toml")) } - pub fn verifier_input_path(&self) -> PathBuf { - // TODO: This should be configurable, such as if we are looking for .json or .toml or custom paths - // For now it is hard-coded to be toml. - self.root_dir.join(format!("{VERIFIER_INPUT_FILE}.toml")) - } pub fn is_binary(&self) -> bool { self.package_type == PackageType::Binary diff --git a/tooling/nargo_cli/Cargo.toml b/tooling/nargo_cli/Cargo.toml index c20be037e6..d10dd6a22f 100644 --- a/tooling/nargo_cli/Cargo.toml +++ b/tooling/nargo_cli/Cargo.toml @@ -43,11 +43,10 @@ thiserror.workspace = true tower.workspace = true async-lsp = { workspace = true, features = ["client-monitor", "stdio", "tracing", "tokio"] } const_format.workspace = true -hex.workspace = true similar-asserts.workspace = true termcolor = "1.1.2" color-eyre = "0.6.2" -tokio = { version = "1.0", features = ["io-std"] } +tokio = { version = "1.0", features = ["io-std", "rt"] } dap.workspace = true clap-markdown = { git = "https://github.com/noir-lang/clap-markdown", rev = "450d759532c88f0dba70891ceecdbc9ff8f25d2b", optional = true } @@ -55,9 +54,6 @@ notify = "6.1.1" notify-debouncer-full = "0.3.1" termion = "3.0.0" -# Backends -backend-interface = { path = "../backend_interface" } - # Logs tracing-subscriber.workspace = true tracing-appender = "0.2.3" diff --git a/tooling/nargo_cli/build.rs b/tooling/nargo_cli/build.rs index 0ed2d4c07f..74042cf4e4 100644 --- a/tooling/nargo_cli/build.rs +++ b/tooling/nargo_cli/build.rs @@ -63,7 +63,6 @@ fn execution_success_{test_name}() {{ let test_program_dir = PathBuf::from("{test_dir}"); let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); cmd.arg("execute").arg("--force"); @@ -101,7 +100,6 @@ fn execution_failure_{test_name}() {{ let test_program_dir = PathBuf::from("{test_dir}"); let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); cmd.arg("execute").arg("--force"); @@ -139,7 +137,6 @@ fn noir_test_success_{test_name}() {{ let test_program_dir = PathBuf::from("{test_dir}"); let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); cmd.arg("test"); @@ -177,7 +174,6 @@ fn noir_test_failure_{test_name}() {{ let test_program_dir = PathBuf::from("{test_dir}"); let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); cmd.arg("test"); @@ -218,7 +214,6 @@ fn compile_success_empty_{test_name}() {{ let test_program_dir = PathBuf::from("{test_dir}"); let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); cmd.arg("info"); cmd.arg("--json"); @@ -269,7 +264,6 @@ fn compile_success_contract_{test_name}() {{ let test_program_dir = PathBuf::from("{test_dir}"); let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); cmd.arg("compile").arg("--force"); @@ -307,7 +301,6 @@ fn compile_failure_{test_name}() {{ let test_program_dir = PathBuf::from("{test_dir}"); let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); cmd.arg("compile").arg("--force"); diff --git a/tooling/nargo_cli/src/backends.rs b/tooling/nargo_cli/src/backends.rs deleted file mode 100644 index 2b3e9d8861..0000000000 --- a/tooling/nargo_cli/src/backends.rs +++ /dev/null @@ -1,39 +0,0 @@ -use std::path::PathBuf; - -use backend_interface::backends_directory; -pub(crate) use backend_interface::Backend; - -fn active_backend_file_path() -> PathBuf { - backends_directory().join(".selected_backend") -} - -pub(crate) use backend_interface::ACVM_BACKEND_BARRETENBERG; - -pub(crate) fn clear_active_backend() { - let active_backend_file = active_backend_file_path(); - if active_backend_file.is_file() { - std::fs::remove_file(active_backend_file_path()) - .expect("should delete active backend file"); - } -} - -pub(crate) fn set_active_backend(backend_name: &str) { - let active_backend_file = active_backend_file_path(); - let backends_directory = - active_backend_file.parent().expect("active backend file should have parent"); - - std::fs::create_dir_all(backends_directory).expect("Could not create backends directory"); - std::fs::write(active_backend_file, backend_name.as_bytes()) - .expect("Could not write to active backend file"); -} - -pub(crate) fn get_active_backend() -> String { - let active_backend_file = active_backend_file_path(); - - if !active_backend_file.is_file() { - set_active_backend(ACVM_BACKEND_BARRETENBERG); - return ACVM_BACKEND_BARRETENBERG.to_string(); - } - - std::fs::read_to_string(active_backend_file).expect("Could not read active backend file") -} diff --git a/tooling/nargo_cli/src/cli/backend_cmd/current_cmd.rs b/tooling/nargo_cli/src/cli/backend_cmd/current_cmd.rs deleted file mode 100644 index 5aba00764d..0000000000 --- a/tooling/nargo_cli/src/cli/backend_cmd/current_cmd.rs +++ /dev/null @@ -1,13 +0,0 @@ -use clap::Args; - -use crate::{backends::get_active_backend, errors::CliError}; - -/// Prints the name of the currently active backend -#[derive(Debug, Clone, Args)] -pub(crate) struct CurrentCommand; - -pub(crate) fn run(_args: CurrentCommand) -> Result<(), CliError> { - println!("{}", get_active_backend()); - - Ok(()) -} diff --git a/tooling/nargo_cli/src/cli/backend_cmd/install_cmd.rs b/tooling/nargo_cli/src/cli/backend_cmd/install_cmd.rs deleted file mode 100644 index 974db9ff7f..0000000000 --- a/tooling/nargo_cli/src/cli/backend_cmd/install_cmd.rs +++ /dev/null @@ -1,30 +0,0 @@ -use clap::Args; - -use backend_interface::{backends_directory, download_backend}; - -use crate::errors::{BackendError, CliError}; - -use super::ls_cmd::get_available_backends; - -/// Install a new backend from a URL. -#[derive(Debug, Clone, Args)] -pub(crate) struct InstallCommand { - /// The name of the backend to install. - backend: String, - - /// The URL from which to download the backend. - url: String, -} - -pub(crate) fn run(args: InstallCommand) -> Result<(), CliError> { - let installed_backends = get_available_backends(); - - if installed_backends.contains(&args.backend) { - return Err(BackendError::AlreadyInstalled(args.backend).into()); - } - - download_backend(&args.url, &backends_directory().join(args.backend).join("backend_binary")) - .map_err(BackendError::from)?; - - Ok(()) -} diff --git a/tooling/nargo_cli/src/cli/backend_cmd/ls_cmd.rs b/tooling/nargo_cli/src/cli/backend_cmd/ls_cmd.rs deleted file mode 100644 index da37b104d6..0000000000 --- a/tooling/nargo_cli/src/cli/backend_cmd/ls_cmd.rs +++ /dev/null @@ -1,34 +0,0 @@ -use backend_interface::backends_directory; -use clap::Args; - -use crate::errors::CliError; - -/// Prints the list of currently installed backends -#[derive(Debug, Clone, Args)] -pub(crate) struct LsCommand; - -pub(crate) fn run(_args: LsCommand) -> Result<(), CliError> { - for backend in get_available_backends() { - println!("{backend}"); - } - - Ok(()) -} - -pub(super) fn get_available_backends() -> Vec { - let backend_directory_contents = std::fs::read_dir(backends_directory()) - .expect("Could not read backends directory contents"); - - // TODO: Highlight the currently active backend. - backend_directory_contents - .into_iter() - .filter_map(|entry| { - let path = entry.ok()?.path(); - if path.is_dir() { - path.file_name().map(|name| name.to_string_lossy().to_string()) - } else { - None - } - }) - .collect() -} diff --git a/tooling/nargo_cli/src/cli/backend_cmd/mod.rs b/tooling/nargo_cli/src/cli/backend_cmd/mod.rs deleted file mode 100644 index 985dbbdb93..0000000000 --- a/tooling/nargo_cli/src/cli/backend_cmd/mod.rs +++ /dev/null @@ -1,41 +0,0 @@ -use clap::{Args, Subcommand}; - -use crate::errors::CliError; - -mod current_cmd; -mod install_cmd; -mod ls_cmd; -mod uninstall_cmd; -mod use_cmd; - -#[non_exhaustive] -#[derive(Args, Clone, Debug)] -/// Install and select custom backends used to generate and verify proofs. -pub(crate) struct BackendCommand { - #[command(subcommand)] - command: BackendCommands, -} - -#[non_exhaustive] -#[derive(Subcommand, Clone, Debug)] -pub(crate) enum BackendCommands { - Current(current_cmd::CurrentCommand), - Ls(ls_cmd::LsCommand), - Use(use_cmd::UseCommand), - Install(install_cmd::InstallCommand), - Uninstall(uninstall_cmd::UninstallCommand), -} - -pub(crate) fn run(cmd: BackendCommand) -> Result<(), CliError> { - let BackendCommand { command } = cmd; - - match command { - BackendCommands::Current(args) => current_cmd::run(args), - BackendCommands::Ls(args) => ls_cmd::run(args), - BackendCommands::Use(args) => use_cmd::run(args), - BackendCommands::Install(args) => install_cmd::run(args), - BackendCommands::Uninstall(args) => uninstall_cmd::run(args), - }?; - - Ok(()) -} diff --git a/tooling/nargo_cli/src/cli/backend_cmd/uninstall_cmd.rs b/tooling/nargo_cli/src/cli/backend_cmd/uninstall_cmd.rs deleted file mode 100644 index 7497f1bc2f..0000000000 --- a/tooling/nargo_cli/src/cli/backend_cmd/uninstall_cmd.rs +++ /dev/null @@ -1,59 +0,0 @@ -use clap::Args; - -use backend_interface::backends_directory; - -use crate::{ - backends::{ - clear_active_backend, get_active_backend, set_active_backend, ACVM_BACKEND_BARRETENBERG, - }, - errors::{BackendError, CliError}, -}; - -use super::ls_cmd::get_available_backends; - -/// Uninstalls a backend -#[derive(Debug, Clone, Args)] -pub(crate) struct UninstallCommand { - /// The name of the backend to uninstall. - backend: String, -} - -pub(crate) fn run(args: UninstallCommand) -> Result<(), CliError> { - let installed_backends = get_available_backends(); - - if !installed_backends.contains(&args.backend) { - return Err(BackendError::UnknownBackend(args.backend).into()); - } - - let active_backend = get_active_backend(); - - // Handle the case where we're uninstalling the currently active backend. - if active_backend == args.backend { - let barretenberg_is_installed = - installed_backends.iter().any(|backend_name| backend_name == ACVM_BACKEND_BARRETENBERG); - - let new_active_backend = - if args.backend != ACVM_BACKEND_BARRETENBERG && barretenberg_is_installed { - // Prefer switching to barretenberg if possible. - Some(ACVM_BACKEND_BARRETENBERG) - } else { - // Otherwise pick the first backend which isn't being uninstalled. - installed_backends - .iter() - .find(|&backend_name| backend_name != &args.backend) - .map(|name| name.as_str()) - }; - - if let Some(backend) = new_active_backend { - set_active_backend(backend); - } else { - // We've deleted the last backend. Clear the active backend file to be recreated once we install a new one. - clear_active_backend(); - } - } - - std::fs::remove_dir_all(backends_directory().join(args.backend)) - .expect("backend directory should be deleted"); - - Ok(()) -} diff --git a/tooling/nargo_cli/src/cli/backend_cmd/use_cmd.rs b/tooling/nargo_cli/src/cli/backend_cmd/use_cmd.rs deleted file mode 100644 index 66a129c214..0000000000 --- a/tooling/nargo_cli/src/cli/backend_cmd/use_cmd.rs +++ /dev/null @@ -1,26 +0,0 @@ -use clap::Args; - -use crate::{ - backends::set_active_backend, - errors::{BackendError, CliError}, -}; - -use super::ls_cmd::get_available_backends; - -/// Select the backend to use -#[derive(Debug, Clone, Args)] -pub(crate) struct UseCommand { - backend: String, -} - -pub(crate) fn run(args: UseCommand) -> Result<(), CliError> { - let backends = get_available_backends(); - - if !backends.contains(&args.backend) { - return Err(BackendError::UnknownBackend(args.backend).into()); - } - - set_active_backend(&args.backend); - - Ok(()) -} diff --git a/tooling/nargo_cli/src/cli/check_cmd.rs b/tooling/nargo_cli/src/cli/check_cmd.rs index 208379b098..e2e1f147b9 100644 --- a/tooling/nargo_cli/src/cli/check_cmd.rs +++ b/tooling/nargo_cli/src/cli/check_cmd.rs @@ -87,6 +87,7 @@ fn check_package( compile_options.deny_warnings, compile_options.disable_macros, compile_options.silence_warnings, + compile_options.use_elaborator, )?; if package.is_library() || package.is_contract() { @@ -94,13 +95,11 @@ fn check_package( Ok(false) } else { // XXX: We can have a --overwrite flag to determine if you want to overwrite the Prover/Verifier.toml files - if let Some((parameters, return_type)) = compute_function_abi(&context, &crate_id) { + if let Some((parameters, _)) = compute_function_abi(&context, &crate_id) { let path_to_prover_input = package.prover_input_path(); - let path_to_verifier_input = package.verifier_input_path(); // Before writing the file, check if it exists and whether overwrite is set let should_write_prover = !path_to_prover_input.exists() || allow_overwrite; - let should_write_verifier = !path_to_verifier_input.exists() || allow_overwrite; if should_write_prover { let prover_toml = create_input_toml_template(parameters.clone(), None); @@ -109,19 +108,7 @@ fn check_package( eprintln!("Note: Prover.toml already exists. Use --overwrite to force overwrite."); } - if should_write_verifier { - let public_inputs = - parameters.into_iter().filter(|param| param.is_public()).collect(); - - let verifier_toml = create_input_toml_template(public_inputs, return_type); - write_to_file(verifier_toml.as_bytes(), &path_to_verifier_input); - } else { - eprintln!( - "Note: Verifier.toml already exists. Use --overwrite to force overwrite." - ); - } - - let any_file_written = should_write_prover || should_write_verifier; + let any_file_written = should_write_prover; Ok(any_file_written) } else { @@ -173,8 +160,9 @@ pub(crate) fn check_crate_and_report_errors( deny_warnings: bool, disable_macros: bool, silence_warnings: bool, + use_elaborator: bool, ) -> Result<(), CompileError> { - let result = check_crate(context, crate_id, deny_warnings, disable_macros); + let result = check_crate(context, crate_id, deny_warnings, disable_macros, use_elaborator); report_errors(result, &context.file_manager, deny_warnings, silence_warnings) } diff --git a/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs b/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs deleted file mode 100644 index 8c64d9cd93..0000000000 --- a/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs +++ /dev/null @@ -1,83 +0,0 @@ -use super::fs::{create_named_dir, write_to_file}; -use super::NargoConfig; -use crate::backends::Backend; -use crate::errors::CliError; - -use clap::Args; -use nargo::ops::{compile_program, report_errors}; -use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; -use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{file_manager_with_stdlib, CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; -use noirc_frontend::graph::CrateName; - -/// Generates a Solidity verifier smart contract for the program -#[derive(Debug, Clone, Args)] -pub(crate) struct CodegenVerifierCommand { - /// The name of the package to codegen - #[clap(long, conflicts_with = "workspace")] - package: Option, - - /// Codegen all packages in the workspace - #[clap(long, conflicts_with = "package")] - workspace: bool, - - #[clap(flatten)] - compile_options: CompileOptions, -} - -pub(crate) fn run( - backend: &Backend, - args: CodegenVerifierCommand, - config: NargoConfig, -) -> Result<(), CliError> { - let toml_path = get_package_manifest(&config.program_dir)?; - let default_selection = - if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; - let selection = args.package.map_or(default_selection, PackageSelection::Selected); - let workspace = resolve_workspace_from_toml( - &toml_path, - selection, - Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), - )?; - - let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let parsed_files = parse_all(&workspace_file_manager); - - let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); - for package in binary_packages { - let compilation_result = compile_program( - &workspace_file_manager, - &parsed_files, - package, - &args.compile_options, - None, - ); - - let program = report_errors( - compilation_result, - &workspace_file_manager, - args.compile_options.deny_warnings, - args.compile_options.silence_warnings, - )?; - - let program = nargo::ops::transform_program(program, args.compile_options.expression_width); - - // TODO(https://github.com/noir-lang/noir/issues/4428): - // We do not expect to have a smart contract verifier for a foldable program with multiple circuits. - // However, in the future we can expect to possibly have non-inlined ACIR functions during compilation - // that will be inlined at a later step such as by the ACVM compiler or by the backend. - // Add appropriate handling here once the compiler enables multiple ACIR functions. - assert_eq!(program.program.functions.len(), 1); - let smart_contract_string = backend.eth_contract(&program.program)?; - - let contract_dir = workspace.contracts_directory_path(package); - create_named_dir(&contract_dir, "contract"); - let contract_path = contract_dir.join("plonk_vk").with_extension("sol"); - - let path = write_to_file(smart_contract_string.as_bytes(), &contract_path); - println!("[{}] Contract successfully created and located at {path}", package.name); - } - - Ok(()) -} diff --git a/tooling/nargo_cli/src/cli/compile_cmd.rs b/tooling/nargo_cli/src/cli/compile_cmd.rs index 2f87840693..ecf2e2e9f5 100644 --- a/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -22,7 +22,6 @@ use notify_debouncer_full::new_debouncer; use crate::errors::CliError; -use super::fs::program::only_acir; use super::fs::program::{read_program_from_file, save_contract_to_file, save_program_to_file}; use super::NargoConfig; use rayon::prelude::*; @@ -111,7 +110,7 @@ fn watch_workspace(workspace: &Workspace, compile_options: &CompileOptions) -> n Ok(()) } -fn compile_workspace_full( +pub(super) fn compile_workspace_full( workspace: &Workspace, compile_options: &CompileOptions, ) -> Result<(), CliError> { @@ -136,10 +135,9 @@ fn compile_workspace_full( .partition(|package| package.is_binary()); // Save build artifacts to disk. - let only_acir = compile_options.only_acir; for (package, program) in binary_packages.into_iter().zip(compiled_programs) { let program = nargo::ops::transform_program(program, compile_options.expression_width); - save_program(program.clone(), &package, &workspace.target_directory_path(), only_acir); + save_program(program.clone(), &package, &workspace.target_directory_path()); } let circuit_dir = workspace.target_directory_path(); for (package, contract) in contract_packages.into_iter().zip(compiled_contracts) { @@ -197,18 +195,9 @@ pub(super) fn compile_workspace( } } -pub(super) fn save_program( - program: CompiledProgram, - package: &Package, - circuit_dir: &Path, - only_acir_opt: bool, -) { - if only_acir_opt { - only_acir(program.program, circuit_dir); - } else { - let program_artifact = ProgramArtifact::from(program.clone()); - save_program_to_file(&program_artifact, &package.name, circuit_dir); - } +pub(super) fn save_program(program: CompiledProgram, package: &Package, circuit_dir: &Path) { + let program_artifact = ProgramArtifact::from(program.clone()); + save_program_to_file(&program_artifact, &package.name, circuit_dir); } fn save_contract(contract: CompiledContract, package: &Package, circuit_dir: &Path) { diff --git a/tooling/nargo_cli/src/cli/dap_cmd.rs b/tooling/nargo_cli/src/cli/dap_cmd.rs index d06d39f3b6..124e30069a 100644 --- a/tooling/nargo_cli/src/cli/dap_cmd.rs +++ b/tooling/nargo_cli/src/cli/dap_cmd.rs @@ -28,7 +28,7 @@ use noir_debugger::errors::{DapError, LoadError}; #[derive(Debug, Clone, Args)] pub(crate) struct DapCommand { /// Override the expression width requested by the backend. - #[arg(long, value_parser = parse_expression_width, default_value = "3")] + #[arg(long, value_parser = parse_expression_width, default_value = "4")] expression_width: ExpressionWidth, #[clap(long)] diff --git a/tooling/nargo_cli/src/cli/execute_cmd.rs b/tooling/nargo_cli/src/cli/execute_cmd.rs index 68f902dfe3..862a46884e 100644 --- a/tooling/nargo_cli/src/cli/execute_cmd.rs +++ b/tooling/nargo_cli/src/cli/execute_cmd.rs @@ -5,19 +5,18 @@ use clap::Args; use nargo::artifacts::debug::DebugArtifact; use nargo::constants::PROVER_INPUT_FILE; use nargo::errors::try_to_diagnose_runtime_error; -use nargo::ops::{compile_program, report_errors, DefaultForeignCallExecutor}; +use nargo::ops::DefaultForeignCallExecutor; use nargo::package::Package; -use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::{Format, InputValue}; use noirc_abi::InputMap; -use noirc_driver::{ - file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, -}; +use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::graph::CrateName; +use super::compile_cmd::compile_workspace_full; use super::fs::{inputs::read_inputs_from_file, witness::save_witness_to_dir}; use super::NargoConfig; +use crate::cli::fs::program::read_program_from_file; use crate::errors::CliError; /// Executes a circuit to calculate its return value @@ -59,32 +58,16 @@ pub(crate) fn run(args: ExecuteCommand, config: NargoConfig) -> Result<(), CliEr )?; let target_dir = &workspace.target_directory_path(); - let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let parsed_files = parse_all(&workspace_file_manager); + // Compile the full workspace in order to generate any build artifacts. + compile_workspace_full(&workspace, &args.compile_options)?; let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); for package in binary_packages { - let compilation_result = compile_program( - &workspace_file_manager, - &parsed_files, - package, - &args.compile_options, - None, - ); - - let compiled_program = report_errors( - compilation_result, - &workspace_file_manager, - args.compile_options.deny_warnings, - args.compile_options.silence_warnings, - )?; - - let compiled_program = - nargo::ops::transform_program(compiled_program, args.compile_options.expression_width); + let program_artifact_path = workspace.package_build_path(package); + let program: CompiledProgram = read_program_from_file(program_artifact_path)?.into(); let (return_value, witness_stack) = execute_program_and_decode( - compiled_program, + program, package, &args.prover_name, args.oracle_resolver.as_deref(), diff --git a/tooling/nargo_cli/src/cli/export_cmd.rs b/tooling/nargo_cli/src/cli/export_cmd.rs index a61f3ccfc0..324eed340a 100644 --- a/tooling/nargo_cli/src/cli/export_cmd.rs +++ b/tooling/nargo_cli/src/cli/export_cmd.rs @@ -89,6 +89,7 @@ fn compile_exported_functions( compile_options.deny_warnings, compile_options.disable_macros, compile_options.silence_warnings, + compile_options.use_elaborator, )?; let exported_functions = context.get_all_exported_functions_in_crate(&crate_id); diff --git a/tooling/nargo_cli/src/cli/fs/inputs.rs b/tooling/nargo_cli/src/cli/fs/inputs.rs index bd038c51ad..dee9a00507 100644 --- a/tooling/nargo_cli/src/cli/fs/inputs.rs +++ b/tooling/nargo_cli/src/cli/fs/inputs.rs @@ -6,8 +6,6 @@ use std::{collections::BTreeMap, path::Path}; use crate::errors::FilesystemError; -use super::write_to_file; - /// Returns the circuit's parameters and its return value, if one exists. /// # Examples /// @@ -36,99 +34,3 @@ pub(crate) fn read_inputs_from_file>( Ok((input_map, return_value)) } - -pub(crate) fn write_inputs_to_file>( - input_map: &InputMap, - return_value: &Option, - abi: &Abi, - path: P, - file_name: &str, - format: Format, -) -> Result<(), FilesystemError> { - let file_path = path.as_ref().join(file_name).with_extension(format.ext()); - - // We must insert the return value into the `InputMap` in order for it to be written to file. - let serialized_output = match return_value { - // Parameters and return values are kept separate except for when they're being written to file. - // As a result, we don't want to modify the original map and must clone it before insertion. - Some(return_value) => { - let mut input_map = input_map.clone(); - input_map.insert(MAIN_RETURN_NAME.to_owned(), return_value.clone()); - format.serialize(&input_map, abi)? - } - // If no return value exists, then we can serialize the original map directly. - None => format.serialize(input_map, abi)?, - }; - - write_to_file(serialized_output.as_bytes(), &file_path); - - Ok(()) -} - -#[cfg(test)] -mod tests { - use std::{collections::BTreeMap, vec}; - - use acvm::FieldElement; - use nargo::constants::VERIFIER_INPUT_FILE; - use noirc_abi::{ - input_parser::{Format, InputValue}, - Abi, AbiParameter, AbiReturnType, AbiType, AbiVisibility, - }; - use tempfile::TempDir; - - use super::{read_inputs_from_file, write_inputs_to_file}; - - #[test] - fn write_and_read_recovers_inputs_and_return_value() { - let input_dir = TempDir::new().unwrap().into_path(); - - // We purposefully test a simple ABI here as we're focussing on `fs`. - // Tests for serializing complex types should exist in `noirc_abi`. - let abi = Abi { - parameters: vec![ - AbiParameter { - name: "foo".into(), - typ: AbiType::Field, - visibility: AbiVisibility::Public, - }, - AbiParameter { - name: "bar".into(), - typ: AbiType::String { length: 11 }, - visibility: AbiVisibility::Private, - }, - ], - return_type: Some(AbiReturnType { - abi_type: AbiType::Field, - visibility: AbiVisibility::Public, - }), - - // Input serialization is only dependent on types, not position in witness map. - // Neither of these should be relevant so we leave them empty. - param_witnesses: BTreeMap::new(), - return_witnesses: Vec::new(), - error_types: BTreeMap::new(), - }; - let input_map = BTreeMap::from([ - ("foo".to_owned(), InputValue::Field(42u128.into())), - ("bar".to_owned(), InputValue::String("hello world".to_owned())), - ]); - let return_value = Some(InputValue::Field(FieldElement::zero())); - - write_inputs_to_file( - &input_map, - &return_value, - &abi, - &input_dir, - VERIFIER_INPUT_FILE, - Format::Toml, - ) - .unwrap(); - - let (loaded_inputs, loaded_return_value) = - read_inputs_from_file(input_dir, VERIFIER_INPUT_FILE, Format::Toml, &abi).unwrap(); - - assert_eq!(loaded_inputs, input_map); - assert_eq!(loaded_return_value, return_value); - } -} diff --git a/tooling/nargo_cli/src/cli/fs/mod.rs b/tooling/nargo_cli/src/cli/fs/mod.rs index 4ebce3b332..8658bd5b24 100644 --- a/tooling/nargo_cli/src/cli/fs/mod.rs +++ b/tooling/nargo_cli/src/cli/fs/mod.rs @@ -4,11 +4,8 @@ use std::{ path::{Path, PathBuf}, }; -use crate::errors::FilesystemError; - pub(super) mod inputs; pub(super) mod program; -pub(super) mod proof; pub(super) mod witness; pub(super) fn create_named_dir(named_dir: &Path, name: &str) -> PathBuf { @@ -31,12 +28,3 @@ pub(super) fn write_to_file(bytes: &[u8], path: &Path) -> String { Ok(_) => display.to_string(), } } - -pub(super) fn load_hex_data>(path: P) -> Result, FilesystemError> { - let hex_data: Vec<_> = std::fs::read(&path) - .map_err(|_| FilesystemError::PathNotValid(path.as_ref().to_path_buf()))?; - - let raw_bytes = hex::decode(hex_data).map_err(FilesystemError::HexArtifactNotValid)?; - - Ok(raw_bytes) -} diff --git a/tooling/nargo_cli/src/cli/fs/program.rs b/tooling/nargo_cli/src/cli/fs/program.rs index 77005e8d5a..ba01765166 100644 --- a/tooling/nargo_cli/src/cli/fs/program.rs +++ b/tooling/nargo_cli/src/cli/fs/program.rs @@ -1,6 +1,5 @@ use std::path::{Path, PathBuf}; -use acvm::acir::circuit::Program; use nargo::artifacts::{contract::ContractArtifact, program::ProgramArtifact}; use noirc_frontend::graph::CrateName; @@ -17,16 +16,6 @@ pub(crate) fn save_program_to_file>( save_build_artifact_to_file(program_artifact, &circuit_name, circuit_dir) } -/// Writes the bytecode as acir.gz -pub(crate) fn only_acir>(program: Program, circuit_dir: P) -> PathBuf { - create_named_dir(circuit_dir.as_ref(), "target"); - let circuit_path = circuit_dir.as_ref().join("acir").with_extension("gz"); - let bytes = Program::serialize_program(&program); - write_to_file(&bytes, &circuit_path); - - circuit_path -} - pub(crate) fn save_contract_to_file>( compiled_contract: &ContractArtifact, circuit_name: &str, diff --git a/tooling/nargo_cli/src/cli/fs/proof.rs b/tooling/nargo_cli/src/cli/fs/proof.rs deleted file mode 100644 index d2b3050708..0000000000 --- a/tooling/nargo_cli/src/cli/fs/proof.rs +++ /dev/null @@ -1,20 +0,0 @@ -use std::path::{Path, PathBuf}; - -use nargo::constants::PROOF_EXT; - -use crate::errors::FilesystemError; - -use super::{create_named_dir, write_to_file}; - -pub(crate) fn save_proof_to_dir>( - proof: &[u8], - proof_name: &str, - proof_dir: P, -) -> Result { - create_named_dir(proof_dir.as_ref(), "proof"); - let proof_path = proof_dir.as_ref().join(proof_name).with_extension(PROOF_EXT); - - write_to_file(hex::encode(proof).as_bytes(), &proof_path); - - Ok(proof_path) -} diff --git a/tooling/nargo_cli/src/cli/info_cmd.rs b/tooling/nargo_cli/src/cli/info_cmd.rs index cac3c36f90..11cf6e22ab 100644 --- a/tooling/nargo_cli/src/cli/info_cmd.rs +++ b/tooling/nargo_cli/src/cli/info_cmd.rs @@ -1,28 +1,25 @@ use std::collections::HashMap; -use acvm::acir::circuit::{ExpressionWidth, Program}; -use backend_interface::BackendError; +use acvm::acir::circuit::ExpressionWidth; use clap::Args; use iter_extended::vecmap; use nargo::{ - artifacts::debug::DebugArtifact, insert_all_files_for_workspace_into_file_manager, - ops::report_errors, package::Package, parse_all, + artifacts::{debug::DebugArtifact, program::ProgramArtifact}, + package::Package, }; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{ - file_manager_with_stdlib, CompileOptions, CompiledContract, CompiledProgram, - NOIR_ARTIFACT_VERSION_STRING, -}; +use noirc_driver::{CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; use noirc_errors::{debug_info::OpCodesCount, Location}; use noirc_frontend::graph::CrateName; use prettytable::{row, table, Row}; use rayon::prelude::*; use serde::Serialize; -use crate::backends::Backend; use crate::errors::CliError; -use super::{compile_cmd::compile_workspace, NargoConfig}; +use super::{ + compile_cmd::compile_workspace_full, fs::program::read_program_from_file, NargoConfig, +}; /// Provides detailed information on each of a program's function (represented by a single circuit) /// @@ -51,11 +48,7 @@ pub(crate) struct InfoCommand { compile_options: CompileOptions, } -pub(crate) fn run( - backend: &Backend, - args: InfoCommand, - config: NargoConfig, -) -> Result<(), CliError> { +pub(crate) fn run(args: InfoCommand, config: NargoConfig) -> Result<(), CliError> { let toml_path = get_package_manifest(&config.program_dir)?; let default_selection = if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; @@ -66,79 +59,42 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; - let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let parsed_files = parse_all(&workspace_file_manager); - - let compiled_workspace = compile_workspace( - &workspace_file_manager, - &parsed_files, - &workspace, - &args.compile_options, - ); - - let (compiled_programs, compiled_contracts) = report_errors( - compiled_workspace, - &workspace_file_manager, - args.compile_options.deny_warnings, - args.compile_options.silence_warnings, - )?; + // Compile the full workspace in order to generate any build artifacts. + compile_workspace_full(&workspace, &args.compile_options)?; - let compiled_programs = vecmap(compiled_programs, |program| { - nargo::ops::transform_program(program, args.compile_options.expression_width) - }); - let compiled_contracts = vecmap(compiled_contracts, |contract| { - nargo::ops::transform_contract(contract, args.compile_options.expression_width) - }); + let binary_packages: Vec<(Package, ProgramArtifact)> = workspace + .into_iter() + .filter(|package| package.is_binary()) + .map(|package| -> Result<(Package, ProgramArtifact), CliError> { + let program_artifact_path = workspace.package_build_path(package); + let program = read_program_from_file(program_artifact_path)?; + Ok((package.clone(), program)) + }) + .collect::>()?; if args.profile_info { - for compiled_program in &compiled_programs { + for (_, compiled_program) in &binary_packages { let debug_artifact = DebugArtifact::from(compiled_program.clone()); - for function_debug in compiled_program.debug.iter() { + for function_debug in compiled_program.debug_symbols.debug_infos.iter() { let span_opcodes = function_debug.count_span_opcodes(); print_span_opcodes(span_opcodes, &debug_artifact); } } - - for compiled_contract in &compiled_contracts { - let debug_artifact = DebugArtifact::from(compiled_contract.clone()); - let functions = &compiled_contract.functions; - for contract_function in functions { - for function_debug in contract_function.debug.iter() { - let span_opcodes = function_debug.count_span_opcodes(); - print_span_opcodes(span_opcodes, &debug_artifact); - } - } - } } - let binary_packages = - workspace.into_iter().filter(|package| package.is_binary()).zip(compiled_programs); - let program_info = binary_packages + .into_iter() .par_bridge() .map(|(package, program)| { count_opcodes_and_gates_in_program( - backend, program, - package, - args.compile_options.expression_width, - ) - }) - .collect::>()?; - - let contract_info = compiled_contracts - .into_par_iter() - .map(|contract| { - count_opcodes_and_gates_in_contract( - backend, - contract, + &package, args.compile_options.expression_width, ) }) - .collect::>()?; + .collect(); - let info_report = InfoReport { programs: program_info, contracts: contract_info }; + let info_report = InfoReport { programs: program_info, contracts: Vec::new() }; if args.json { // Expose machine-readable JSON data. @@ -156,23 +112,6 @@ pub(crate) fn run( } program_table.printstd(); } - if !info_report.contracts.is_empty() { - let mut contract_table = table!([ - Fm->"Contract", - Fm->"Function", - Fm->"Expression Width", - Fm->"ACIR Opcodes", - Fm->"Backend Circuit Size" - ]); - for contract_info in info_report.contracts { - let contract_rows: Vec = contract_info.into(); - for row in contract_rows { - contract_table.add_row(row); - } - } - - contract_table.printstd(); - } } Ok(()) @@ -249,7 +188,6 @@ impl From for Vec { Fc->format!("{}", function.name), format!("{:?}", program_info.expression_width), Fc->format!("{}", function.acir_opcodes), - Fc->format!("{}", function.circuit_size), ] }) } @@ -268,7 +206,6 @@ struct ContractInfo { struct FunctionInfo { name: String, acir_opcodes: usize, - circuit_size: u32, } impl From for Vec { @@ -279,56 +216,26 @@ impl From for Vec { Fc->format!("{}", function.name), format!("{:?}", contract_info.expression_width), Fc->format!("{}", function.acir_opcodes), - Fc->format!("{}", function.circuit_size), ] }) } } fn count_opcodes_and_gates_in_program( - backend: &Backend, - compiled_program: CompiledProgram, + compiled_program: ProgramArtifact, package: &Package, expression_width: ExpressionWidth, -) -> Result { +) -> ProgramInfo { let functions = compiled_program - .program + .bytecode .functions .into_par_iter() .enumerate() - .map(|(i, function)| -> Result<_, BackendError> { - Ok(FunctionInfo { - name: compiled_program.names[i].clone(), - acir_opcodes: function.opcodes.len(), - // Unconstrained functions do not matter to a backend circuit count so we pass nothing here - circuit_size: backend.get_exact_circuit_size(&Program { - functions: vec![function], - unconstrained_functions: Vec::new(), - })?, - }) - }) - .collect::>()?; - - Ok(ProgramInfo { package_name: package.name.to_string(), expression_width, functions }) -} - -fn count_opcodes_and_gates_in_contract( - backend: &Backend, - contract: CompiledContract, - expression_width: ExpressionWidth, -) -> Result { - let functions = contract - .functions - .into_par_iter() - .map(|function| -> Result<_, BackendError> { - Ok(FunctionInfo { - name: function.name, - // TODO(https://github.com/noir-lang/noir/issues/4720) - acir_opcodes: function.bytecode.functions[0].opcodes.len(), - circuit_size: backend.get_exact_circuit_size(&function.bytecode)?, - }) + .map(|(i, function)| FunctionInfo { + name: compiled_program.names[i].clone(), + acir_opcodes: function.opcodes.len(), }) - .collect::>()?; + .collect(); - Ok(ContractInfo { name: contract.name, expression_width, functions }) + ProgramInfo { package_name: package.name.to_string(), expression_width, functions } } diff --git a/tooling/nargo_cli/src/cli/mod.rs b/tooling/nargo_cli/src/cli/mod.rs index ad778549ac..485ccc7aba 100644 --- a/tooling/nargo_cli/src/cli/mod.rs +++ b/tooling/nargo_cli/src/cli/mod.rs @@ -6,13 +6,9 @@ use std::path::PathBuf; use color_eyre::eyre; -use crate::backends::get_active_backend; - mod fs; -mod backend_cmd; mod check_cmd; -mod codegen_verifier_cmd; mod compile_cmd; mod dap_cmd; mod debug_cmd; @@ -23,9 +19,7 @@ mod info_cmd; mod init_cmd; mod lsp_cmd; mod new_cmd; -mod prove_cmd; mod test_cmd; -mod verify_cmd; const GIT_HASH: &str = env!("GIT_COMMIT"); const IS_DIRTY: &str = env!("GIT_DIRTY"); @@ -60,10 +54,8 @@ pub(crate) struct NargoConfig { #[non_exhaustive] #[derive(Subcommand, Clone, Debug)] enum NargoCommand { - Backend(backend_cmd::BackendCommand), Check(check_cmd::CheckCommand), Fmt(fmt_cmd::FormatCommand), - CodegenVerifier(codegen_verifier_cmd::CodegenVerifierCommand), #[command(alias = "build")] Compile(compile_cmd::CompileCommand), New(new_cmd::NewCommand), @@ -73,8 +65,6 @@ enum NargoCommand { Export(export_cmd::ExportCommand), #[command(hide = true)] // Hidden while the feature is being built out Debug(debug_cmd::DebugCommand), - Prove(prove_cmd::ProveCommand), - Verify(verify_cmd::VerifyCommand), Test(test_cmd::TestCommand), Info(info_cmd::InfoCommand), Lsp(lsp_cmd::LspCommand), @@ -94,18 +84,11 @@ pub(crate) fn start_cli() -> eyre::Result<()> { // Search through parent directories to find package root if necessary. if !matches!( command, - NargoCommand::New(_) - | NargoCommand::Init(_) - | NargoCommand::Lsp(_) - | NargoCommand::Backend(_) - | NargoCommand::Dap(_) + NargoCommand::New(_) | NargoCommand::Init(_) | NargoCommand::Lsp(_) | NargoCommand::Dap(_) ) { config.program_dir = find_package_root(&config.program_dir)?; } - let active_backend = get_active_backend(); - let backend = crate::backends::Backend::new(active_backend); - match command { NargoCommand::New(args) => new_cmd::run(args, config), NargoCommand::Init(args) => init_cmd::run(args, config), @@ -114,12 +97,8 @@ pub(crate) fn start_cli() -> eyre::Result<()> { NargoCommand::Debug(args) => debug_cmd::run(args, config), NargoCommand::Execute(args) => execute_cmd::run(args, config), NargoCommand::Export(args) => export_cmd::run(args, config), - NargoCommand::Prove(args) => prove_cmd::run(&backend, args, config), - NargoCommand::Verify(args) => verify_cmd::run(&backend, args, config), NargoCommand::Test(args) => test_cmd::run(args, config), - NargoCommand::Info(args) => info_cmd::run(&backend, args, config), - NargoCommand::CodegenVerifier(args) => codegen_verifier_cmd::run(&backend, args, config), - NargoCommand::Backend(args) => backend_cmd::run(args), + NargoCommand::Info(args) => info_cmd::run(args, config), NargoCommand::Lsp(args) => lsp_cmd::run(args, config), NargoCommand::Dap(args) => dap_cmd::run(args, config), NargoCommand::Fmt(args) => fmt_cmd::run(args, config), diff --git a/tooling/nargo_cli/src/cli/prove_cmd.rs b/tooling/nargo_cli/src/cli/prove_cmd.rs deleted file mode 100644 index 47c71527fd..0000000000 --- a/tooling/nargo_cli/src/cli/prove_cmd.rs +++ /dev/null @@ -1,154 +0,0 @@ -use clap::Args; -use nargo::constants::{PROVER_INPUT_FILE, VERIFIER_INPUT_FILE}; -use nargo::ops::{compile_program, report_errors}; -use nargo::package::Package; -use nargo::workspace::Workspace; -use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; -use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_abi::input_parser::Format; -use noirc_driver::{ - file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, -}; -use noirc_frontend::graph::CrateName; - -use super::fs::{ - inputs::{read_inputs_from_file, write_inputs_to_file}, - proof::save_proof_to_dir, -}; -use super::NargoConfig; -use crate::{backends::Backend, cli::execute_cmd::execute_program, errors::CliError}; - -/// Create proof for this program. The proof is returned as a hex encoded string. -#[derive(Debug, Clone, Args)] -#[clap(visible_alias = "p")] -pub(crate) struct ProveCommand { - /// The name of the toml file which contains the inputs for the prover - #[clap(long, short, default_value = PROVER_INPUT_FILE)] - prover_name: String, - - /// The name of the toml file which contains the inputs for the verifier - #[clap(long, short, default_value = VERIFIER_INPUT_FILE)] - verifier_name: String, - - /// Verify proof after proving - #[arg(long)] - verify: bool, - - /// The name of the package to prove - #[clap(long, conflicts_with = "workspace")] - package: Option, - - /// Prove all packages in the workspace - #[clap(long, conflicts_with = "package")] - workspace: bool, - - #[clap(flatten)] - compile_options: CompileOptions, - - /// JSON RPC url to solve oracle calls - #[clap(long)] - oracle_resolver: Option, -} - -pub(crate) fn run( - backend: &Backend, - args: ProveCommand, - config: NargoConfig, -) -> Result<(), CliError> { - let toml_path = get_package_manifest(&config.program_dir)?; - let default_selection = - if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; - let selection = args.package.map_or(default_selection, PackageSelection::Selected); - let workspace = resolve_workspace_from_toml( - &toml_path, - selection, - Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), - )?; - - let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let parsed_files = parse_all(&workspace_file_manager); - - let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); - for package in binary_packages { - let compilation_result = compile_program( - &workspace_file_manager, - &parsed_files, - package, - &args.compile_options, - None, - ); - - let compiled_program = report_errors( - compilation_result, - &workspace_file_manager, - args.compile_options.deny_warnings, - args.compile_options.silence_warnings, - )?; - - let compiled_program = - nargo::ops::transform_program(compiled_program, args.compile_options.expression_width); - - prove_package( - backend, - &workspace, - package, - compiled_program, - &args.prover_name, - &args.verifier_name, - args.verify, - args.oracle_resolver.as_deref(), - )?; - } - - Ok(()) -} - -#[allow(clippy::too_many_arguments)] -pub(crate) fn prove_package( - backend: &Backend, - workspace: &Workspace, - package: &Package, - compiled_program: CompiledProgram, - prover_name: &str, - verifier_name: &str, - check_proof: bool, - foreign_call_resolver_url: Option<&str>, -) -> Result<(), CliError> { - // Parse the initial witness values from Prover.toml - let (inputs_map, _) = - read_inputs_from_file(&package.root_dir, prover_name, Format::Toml, &compiled_program.abi)?; - - let witness_stack = execute_program(&compiled_program, &inputs_map, foreign_call_resolver_url)?; - - // Write public inputs into Verifier.toml - let public_abi = compiled_program.abi.public_abi(); - // Get the entry point witness for the ABI - let main_witness = - &witness_stack.peek().expect("Should have at least one witness on the stack").witness; - let (public_inputs, return_value) = public_abi.decode(main_witness)?; - - write_inputs_to_file( - &public_inputs, - &return_value, - &public_abi, - &package.root_dir, - verifier_name, - Format::Toml, - )?; - - let proof = backend.prove(&compiled_program.program, witness_stack)?; - - if check_proof { - let public_inputs = public_abi.encode(&public_inputs, return_value)?; - let valid_proof = backend.verify(&proof, public_inputs, &compiled_program.program)?; - - if !valid_proof { - return Err(CliError::InvalidProof("".into())); - } - } - - save_proof_to_dir(&proof, &String::from(&package.name), workspace.proofs_directory_path())?; - - Ok(()) -} diff --git a/tooling/nargo_cli/src/cli/test_cmd.rs b/tooling/nargo_cli/src/cli/test_cmd.rs index 967d4c87e6..51e21248af 100644 --- a/tooling/nargo_cli/src/cli/test_cmd.rs +++ b/tooling/nargo_cli/src/cli/test_cmd.rs @@ -175,6 +175,7 @@ fn run_test( crate_id, compile_options.deny_warnings, compile_options.disable_macros, + compile_options.use_elaborator, ) .expect("Any errors should have occurred when collecting test functions"); @@ -208,6 +209,7 @@ fn get_tests_in_package( compile_options.deny_warnings, compile_options.disable_macros, compile_options.silence_warnings, + compile_options.use_elaborator, )?; Ok(context diff --git a/tooling/nargo_cli/src/cli/verify_cmd.rs b/tooling/nargo_cli/src/cli/verify_cmd.rs deleted file mode 100644 index a6078f6c1d..0000000000 --- a/tooling/nargo_cli/src/cli/verify_cmd.rs +++ /dev/null @@ -1,109 +0,0 @@ -use super::fs::{inputs::read_inputs_from_file, load_hex_data}; -use super::NargoConfig; -use crate::{backends::Backend, errors::CliError}; - -use clap::Args; -use nargo::constants::{PROOF_EXT, VERIFIER_INPUT_FILE}; -use nargo::ops::{compile_program, report_errors}; -use nargo::package::Package; -use nargo::workspace::Workspace; -use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; -use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_abi::input_parser::Format; -use noirc_driver::{ - file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, -}; -use noirc_frontend::graph::CrateName; - -/// Given a proof and a program, verify whether the proof is valid -#[derive(Debug, Clone, Args)] -#[clap(visible_alias = "v")] -pub(crate) struct VerifyCommand { - /// The name of the toml file which contains the inputs for the verifier - #[clap(long, short, default_value = VERIFIER_INPUT_FILE)] - verifier_name: String, - - /// The name of the package verify - #[clap(long, conflicts_with = "workspace")] - package: Option, - - /// Verify all packages in the workspace - #[clap(long, conflicts_with = "package")] - workspace: bool, - - #[clap(flatten)] - compile_options: CompileOptions, -} - -pub(crate) fn run( - backend: &Backend, - args: VerifyCommand, - config: NargoConfig, -) -> Result<(), CliError> { - let toml_path = get_package_manifest(&config.program_dir)?; - let default_selection = - if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; - let selection = args.package.map_or(default_selection, PackageSelection::Selected); - let workspace = resolve_workspace_from_toml( - &toml_path, - selection, - Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), - )?; - - let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let parsed_files = parse_all(&workspace_file_manager); - - let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); - for package in binary_packages { - let compilation_result = compile_program( - &workspace_file_manager, - &parsed_files, - package, - &args.compile_options, - None, - ); - - let compiled_program = report_errors( - compilation_result, - &workspace_file_manager, - args.compile_options.deny_warnings, - args.compile_options.silence_warnings, - )?; - - let compiled_program = - nargo::ops::transform_program(compiled_program, args.compile_options.expression_width); - - verify_package(backend, &workspace, package, compiled_program, &args.verifier_name)?; - } - - Ok(()) -} - -fn verify_package( - backend: &Backend, - workspace: &Workspace, - package: &Package, - compiled_program: CompiledProgram, - verifier_name: &str, -) -> Result<(), CliError> { - // Load public inputs (if any) from `verifier_name`. - let public_abi = compiled_program.abi.public_abi(); - let (public_inputs_map, return_value) = - read_inputs_from_file(&package.root_dir, verifier_name, Format::Toml, &public_abi)?; - - let public_inputs = public_abi.encode(&public_inputs_map, return_value)?; - - let proof_path = - workspace.proofs_directory_path().join(package.name.to_string()).with_extension(PROOF_EXT); - - let proof = load_hex_data(&proof_path)?; - - let valid_proof = backend.verify(&proof, public_inputs, &compiled_program.program)?; - - if valid_proof { - Ok(()) - } else { - Err(CliError::InvalidProof(proof_path)) - } -} diff --git a/tooling/nargo_cli/src/errors.rs b/tooling/nargo_cli/src/errors.rs index 40fb788640..3e0b13a9cb 100644 --- a/tooling/nargo_cli/src/errors.rs +++ b/tooling/nargo_cli/src/errors.rs @@ -1,5 +1,4 @@ use acvm::acir::native_types::WitnessStackError; -use hex::FromHexError; use nargo::{errors::CompileError, NargoError}; use nargo_toml::ManifestError; use noir_debugger::errors::DapError; @@ -11,8 +10,7 @@ use thiserror::Error; pub(crate) enum FilesystemError { #[error("Error: {} is not a valid path\nRun either `nargo compile` to generate missing build artifacts or `nargo prove` to construct a proof", .0.display())] PathNotValid(PathBuf), - #[error("Error: could not parse hex build artifact (proof, proving and/or verification keys, ACIR checksum) ({0})")] - HexArtifactNotValid(FromHexError), + #[error( " Error: cannot find {0}.toml file.\n Expected location: {1:?} \n Please generate this file at the expected location." )] @@ -37,9 +35,6 @@ pub(crate) enum CliError { #[error("Error: destination {} already exists", .0.display())] DestinationAlreadyExists(PathBuf), - #[error("Failed to verify proof {}", .0.display())] - InvalidProof(PathBuf), - #[error("Invalid package name {0}. Did you mean to use `--name`?")] InvalidPackageName(String), @@ -68,24 +63,4 @@ pub(crate) enum CliError { /// Error from the compilation pipeline #[error(transparent)] CompileError(#[from] CompileError), - - /// Error related to backend selection/installation. - #[error(transparent)] - BackendError(#[from] BackendError), - - /// Error related to communication with backend. - #[error(transparent)] - BackendCommunicationError(#[from] backend_interface::BackendError), -} - -#[derive(Debug, thiserror::Error)] -pub(crate) enum BackendError { - #[error("No backend is installed with the name {0}")] - UnknownBackend(String), - - #[error("The backend {0} is already installed")] - AlreadyInstalled(String), - - #[error("Backend installation failed: {0}")] - InstallationError(#[from] std::io::Error), } diff --git a/tooling/nargo_cli/src/main.rs b/tooling/nargo_cli/src/main.rs index 6e2b7069bc..a407d467ce 100644 --- a/tooling/nargo_cli/src/main.rs +++ b/tooling/nargo_cli/src/main.rs @@ -7,7 +7,6 @@ //! This name was used because it sounds like `cargo` and //! Noir Package Manager abbreviated is npm, which is already taken. -mod backends; mod cli; mod errors; diff --git a/tooling/nargo_cli/tests/codegen-verifier.rs b/tooling/nargo_cli/tests/codegen-verifier.rs deleted file mode 100644 index f991f72b10..0000000000 --- a/tooling/nargo_cli/tests/codegen-verifier.rs +++ /dev/null @@ -1,37 +0,0 @@ -//! This integration test aims to check that the `nargo codegen-verifier` will successfully create a -//! file containing a verifier for a simple program. - -use assert_cmd::prelude::*; -use predicates::prelude::*; -use std::process::Command; - -use assert_fs::prelude::{PathAssert, PathChild}; - -#[test] -fn simple_verifier_codegen() { - let test_dir = assert_fs::TempDir::new().unwrap(); - std::env::set_current_dir(&test_dir).unwrap(); - - // Create trivial program - let project_name = "hello_world"; - let project_dir = test_dir.child(project_name); - - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("new").arg(project_name); - cmd.assert().success(); - - std::env::set_current_dir(&project_dir).unwrap(); - - // Run `nargo codegen-verifier` - let mut cmd = Command::cargo_bin("nargo").unwrap(); - cmd.arg("codegen-verifier"); - cmd.assert() - .success() - .stdout(predicate::str::contains("Contract successfully created and located at")); - - project_dir - .child("contract") - .child("hello_world") - .child("plonk_vk.sol") - .assert(predicate::path::is_file()); -} diff --git a/tooling/nargo_cli/tests/stdlib-tests.rs b/tooling/nargo_cli/tests/stdlib-tests.rs index 9d377cfaee..70a9354f50 100644 --- a/tooling/nargo_cli/tests/stdlib-tests.rs +++ b/tooling/nargo_cli/tests/stdlib-tests.rs @@ -10,8 +10,7 @@ use nargo::{ parse_all, prepare_package, }; -#[test] -fn stdlib_noir_tests() { +fn run_stdlib_tests(use_elaborator: bool) { let mut file_manager = file_manager_with_stdlib(&PathBuf::from(".")); file_manager.add_file_with_source_canonical_path(&PathBuf::from("main.nr"), "".to_owned()); let parsed_files = parse_all(&file_manager); @@ -30,7 +29,7 @@ fn stdlib_noir_tests() { let (mut context, dummy_crate_id) = prepare_package(&file_manager, &parsed_files, &dummy_package); - let result = check_crate(&mut context, dummy_crate_id, true, false); + let result = check_crate(&mut context, dummy_crate_id, true, false, use_elaborator); report_errors(result, &context.file_manager, true, false) .expect("Error encountered while compiling standard library"); @@ -60,3 +59,15 @@ fn stdlib_noir_tests() { assert!(!test_report.is_empty(), "Could not find any tests within the stdlib"); assert!(test_report.iter().all(|(_, status)| !status.failed())); } + +#[test] +fn stdlib_noir_tests() { + run_stdlib_tests(false) +} + +// Once this no longer panics we can use the elaborator by default and remove the old passes +#[test] +#[should_panic] +fn stdlib_elaborator_tests() { + run_stdlib_tests(true) +} diff --git a/tooling/nargo_fmt/src/config.rs b/tooling/nargo_fmt/src/config.rs index 2bb5d97c0a..5e38dc7d8b 100644 --- a/tooling/nargo_fmt/src/config.rs +++ b/tooling/nargo_fmt/src/config.rs @@ -45,7 +45,7 @@ config! { max_width: usize, 100, "Maximum width of each line"; tab_spaces: usize, 4, "Number of spaces per tab"; remove_nested_parens: bool, true, "Remove nested parens"; - error_on_lost_comment: bool, true, "Error if unable to get comments"; + error_on_lost_comment: bool, false, "Error if unable to get comments"; short_array_element_width_threshold: usize, 10, "Width threshold for an array element to be considered short"; array_width: usize, 100, "Maximum width of an array literal before falling back to vertical formatting"; fn_call_width: usize, 60, "Maximum width of the args of a function call before falling back to vertical formatting"; diff --git a/tooling/noir_codegen/src/index.ts b/tooling/noir_codegen/src/index.ts index fbbab07bcf..d00990f01b 100644 --- a/tooling/noir_codegen/src/index.ts +++ b/tooling/noir_codegen/src/index.ts @@ -1,66 +1,17 @@ -import { AbiType } from '@noir-lang/noirc_abi'; import { CompiledCircuit } from '@noir-lang/types'; -import { PrimitiveTypesUsed, generateTsInterface, codegenStructDefinitions } from './noir_types.js'; - -// TODO: reenable this. See `abiTypeToTs` for reasoning. -// export type FixedLengthArray = L extends 0 ? never[]: T[] & { length: L }; - -const codegenPrelude = `/* Autogenerated file, do not edit! */ - -/* eslint-disable */ - -import { Noir, InputMap, CompiledCircuit, ForeignCallHandler } from "@noir-lang/noir_js" - -export { ForeignCallHandler } from "@noir-lang/noir_js" -`; - -const codegenFunction = ( - name: string, - compiled_program: CompiledCircuit, - function_signature: { inputs: [string, string][]; returnValue: string | null }, -) => { - const args = function_signature.inputs.map(([name]) => `${name}`).join(', '); - const args_with_types = function_signature.inputs.map(([name, type]) => `${name}: ${type}`).join(', '); - - return `export const ${name}_circuit: CompiledCircuit = ${JSON.stringify(compiled_program)}; - -export async function ${name}(${args_with_types}, foreignCallHandler?: ForeignCallHandler): Promise<${ - function_signature.returnValue - }> { - const program = new Noir(${name}_circuit); - const args: InputMap = { ${args} }; - const { returnValue } = await program.execute(args, foreignCallHandler); - return returnValue as ${function_signature.returnValue}; -} -`; +import { TypingsGenerator } from './utils/typings_generator.js'; + +export const codegen = ( + programs: [string, CompiledCircuit][], + embedArtifact: boolean, + useFixedLengthArrays: boolean, +): string => { + return new TypingsGenerator( + programs.map((program) => ({ + circuitName: program[0], + artifact: embedArtifact ? program[1] : undefined, + abi: structuredClone(program[1].abi), // We'll mutate the ABI types when doing typescript codegen, so we clone it to avoid mutating the artifact. + })), + useFixedLengthArrays, + ).codegen(); }; - -export const codegen = (programs: [string, CompiledCircuit][]): string => { - let results = [codegenPrelude]; - const primitiveTypeMap = new Map(); - const structTypeMap = new Map(); - - const functions: string[] = []; - for (const [name, program] of programs) { - const function_sig = generateTsInterface(program.abi, structTypeMap, primitiveTypeMap); - functions.push(codegenFunction(name, stripUnwantedFields(program), function_sig)); - } - - const structTypeDefinitions: string = codegenStructDefinitions(structTypeMap, primitiveTypeMap); - - // Add the primitive Noir types that do not have a 1-1 mapping to TypeScript. - const primitiveTypeAliases: string[] = []; - for (const value of primitiveTypeMap.values()) { - primitiveTypeAliases.push(`export type ${value.aliasName} = ${value.tsType};`); - } - - results = results.concat(...primitiveTypeAliases, '', structTypeDefinitions, ...functions); - - return results.join('\n'); -}; - -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function stripUnwantedFields(value: any): CompiledCircuit { - const { abi, bytecode } = value; - return { abi, bytecode }; -} diff --git a/tooling/noir_codegen/src/main.ts b/tooling/noir_codegen/src/main.ts index 591e7420db..835b24a9e4 100644 --- a/tooling/noir_codegen/src/main.ts +++ b/tooling/noir_codegen/src/main.ts @@ -24,7 +24,7 @@ function main() { return [program_name, { abi, bytecode }]; }); - const result = codegen(programs); + const result = codegen(programs, !cliConfig.externalArtifact, cliConfig.useFixedLengthArrays); const outputDir = path.resolve(cliConfig.outDir ?? './codegen'); const outputFile = path.join(outputDir, 'index.ts'); diff --git a/tooling/noir_codegen/src/noir_types.ts b/tooling/noir_codegen/src/noir_types.ts deleted file mode 100644 index 0c0e2b7c60..0000000000 --- a/tooling/noir_codegen/src/noir_types.ts +++ /dev/null @@ -1,187 +0,0 @@ -import { AbiType, Abi } from '@noir-lang/noirc_abi'; - -/** - * Keep track off all of the Noir primitive types that were used. - * Most of these will not have a 1-1 definition in TypeScript, - * so we will need to generate type aliases for them. - * - * We want to generate type aliases - * for specific types that are used in the ABI. - * - * For example: - * - If `Field` is used we want to alias that - * with `number`. - * - If `u32` is used we want to alias that with `number` too. - */ -export type PrimitiveTypesUsed = { - /** - * The name of the type alias that we will generate. - */ - aliasName: string; - /** - * The TypeScript type that we will alias to. - */ - tsType: string; -}; - -/** - * Typescript does not allow us to check for equality of non-primitive types - * easily, so we create a addIfUnique function that will only add an item - * to the map if it is not already there by using JSON.stringify. - * @param item - The item to add to the map. - */ -function addIfUnique(primitiveTypeMap: Map, item: PrimitiveTypesUsed) { - const key = JSON.stringify(item); - if (!primitiveTypeMap.has(key)) { - primitiveTypeMap.set(key, item); - } -} - -/** - * Converts an ABI type to a TypeScript type. - * @param type - The ABI type to convert. - * @returns The typescript code to define the type. - */ -function abiTypeToTs(type: AbiType, primitiveTypeMap: Map): string { - switch (type.kind) { - case 'field': - addIfUnique(primitiveTypeMap, { aliasName: 'Field', tsType: 'string' }); - return 'Field'; - case 'integer': { - const typeName = type.sign === 'signed' ? `i${type.width}` : `u${type.width}`; - // Javascript cannot safely represent the full range of Noir's integer types as numbers. - // `Number.MAX_SAFE_INTEGER == 2**53 - 1` so we disallow passing numbers to types which may exceed this. - // 52 has been chosen as the cutoff rather than 53 for safety. - const tsType = type.width <= 52 ? `string | number` : `string`; - - addIfUnique(primitiveTypeMap, { aliasName: typeName, tsType }); - return typeName; - } - case 'boolean': - return `boolean`; - case 'array': - // We can't force the usage of fixed length arrays as this currently throws errors in TS. - // The array would need to be `as const` to support this whereas that's unlikely to happen in user code. - // return `FixedLengthArray<${abiTypeToTs(type.type, primitiveTypeMap)}, ${type.length}>`; - return `${abiTypeToTs(type.type, primitiveTypeMap)}[]`; - case 'string': - // We could enforce that literals are the correct length but not generally. - // This would run into similar problems to above. - return `string`; - case 'struct': - return getLastComponentOfPath(type.path); - case 'tuple': { - const field_types = type.fields.map((field) => abiTypeToTs(field, primitiveTypeMap)); - return `[${field_types.join(', ')}]`; - } - default: - throw new Error(`Unknown ABI type ${JSON.stringify(type)}`); - } -} - -/** - * Returns the last component of a path, e.g. "foo::bar::baz" -\> "baz" - * Note: that if we have a path such as "Baz", we will return "Baz". - * - * Since these paths corresponds to structs, we can assume that we - * cannot have "foo::bar::". - * - * We also make the assumption that since these paths are coming from - * Noir, then we will not have two paths that look like this: - * - foo::bar::Baz - * - cat::dog::Baz - * ie the last component of the path (struct name) is enough to uniquely identify - * the whole path. - * - * TODO: We should double check this assumption when we use type aliases, - * I expect that `foo::bar::Baz as Dog` would effectively give `foo::bar::Dog` - * @param str - The path to get the last component of. - * @returns The last component of the path. - */ -function getLastComponentOfPath(str: string): string { - const parts = str.split('::'); - const lastPart = parts[parts.length - 1]; - return lastPart; -} - -/** - * Generates TypeScript interfaces for the structs used in the ABI. - * @param type - The ABI type to generate the interface for. - * @param output - The set of structs that we have already generated bindings for. - * @returns The TypeScript code to define the struct. - */ -function generateStructInterfaces( - type: AbiType, - structsEncountered: Map, - primitiveTypeMap: Map, -) { - // Edge case to handle the array of structs case. - if ( - type.kind === 'array' && - type.type.kind === 'struct' && - !structsEncountered.has(getLastComponentOfPath(type.type.path)) - ) { - generateStructInterfaces(type.type, structsEncountered, primitiveTypeMap); - } - if (type.kind !== 'struct') return; - - const structName = getLastComponentOfPath(type.path); - if (!structsEncountered.has(structName)) { - for (const field of type.fields) { - generateStructInterfaces(field.type, structsEncountered, primitiveTypeMap); - } - structsEncountered.set(structName, type.fields); - } -} - -/** - * Generates a TypeScript interface for the ABI. - * @param abiObj - The ABI to generate the interface for. - * @returns The TypeScript code to define the interface. - */ -export function generateTsInterface( - abiObj: Abi, - structsEncountered: Map, - primitiveTypeMap: Map, -): { inputs: [string, string][]; returnValue: string | null } { - // Define structs for composite types - for (const param of abiObj.parameters) { - generateStructInterfaces(param.type, structsEncountered, primitiveTypeMap); - } - - // Generating Return type, if it exists - if (abiObj.return_type != null) { - generateStructInterfaces(abiObj.return_type.abi_type, structsEncountered, primitiveTypeMap); - } - - return getTsFunctionSignature(abiObj, primitiveTypeMap); -} - -export function codegenStructDefinitions( - structsEncountered: Map, - primitiveTypeMap: Map, -): string { - let codeGeneratedStruct = ''; - - for (const [structName, structFields] of structsEncountered) { - codeGeneratedStruct += `export type ${structName} = {\n`; - for (const field of structFields) { - codeGeneratedStruct += ` ${field.name}: ${abiTypeToTs(field.type, primitiveTypeMap)};\n`; - } - codeGeneratedStruct += `};\n\n`; - } - - return codeGeneratedStruct; -} - -function getTsFunctionSignature( - abi: Abi, - primitiveTypeMap: Map, -): { inputs: [string, string][]; returnValue: string | null } { - const inputs: [string, string][] = abi.parameters.map((param) => [ - param.name, - abiTypeToTs(param.type, primitiveTypeMap), - ]); - const returnValue = abi.return_type ? abiTypeToTs(abi.return_type.abi_type, primitiveTypeMap) : null; - return { inputs, returnValue }; -} diff --git a/tooling/noir_codegen/src/parseArgs.ts b/tooling/noir_codegen/src/parseArgs.ts index 58468c1b8f..83b6c8bcdb 100644 --- a/tooling/noir_codegen/src/parseArgs.ts +++ b/tooling/noir_codegen/src/parseArgs.ts @@ -6,6 +6,8 @@ export interface ParsedArgs { files: string[]; outDir?: string | undefined; inputDir?: string | undefined; + externalArtifact: boolean; + useFixedLengthArrays: boolean; } export function parseArgs(): ParsedArgs { @@ -27,6 +29,17 @@ export function parseArgs(): ParsedArgs { 'Directory containing program artifact files. Inferred as lowest common path of all files if not specified.', }, help: { type: Boolean, defaultValue: false, alias: 'h', description: 'Prints this message.' }, + 'external-artifact': { + type: Boolean, + defaultValue: false, + description: + 'Does not embed the circuit artifact in the code, instead requiring passing the circuit artifact as an argument to the generated functions.', + }, + 'fixed-length-arrays': { + type: Boolean, + defaultValue: false, + description: 'Use fixed-length arrays for inputs and outputs.', + }, }, { helpArg: 'help', @@ -53,6 +66,8 @@ export function parseArgs(): ParsedArgs { files: rawOptions.glob, outDir: rawOptions['out-dir'], inputDir: rawOptions['input-dir'], + externalArtifact: rawOptions['external-artifact'], + useFixedLengthArrays: rawOptions['fixed-length-arrays'], }; } @@ -61,4 +76,6 @@ interface CommandLineArgs { 'out-dir'?: string; 'input-dir'?: string; help: boolean; + 'external-artifact': boolean; + 'fixed-length-arrays': boolean; } diff --git a/tooling/noir_codegen/src/utils/abi_type_with_generics.ts b/tooling/noir_codegen/src/utils/abi_type_with_generics.ts new file mode 100644 index 0000000000..844e116f44 --- /dev/null +++ b/tooling/noir_codegen/src/utils/abi_type_with_generics.ts @@ -0,0 +1,139 @@ +import { AbiType } from '@noir-lang/noirc_abi'; + +/** + * Represents a binding to a generic. + */ +export class BindingId { + constructor( + public id: number, + public isNumeric: boolean, + ) {} +} + +export type StructType = { + path: string; + fields: { name: string; type: AbiTypeWithGenerics }[]; + /** The generics of the struct, bound to the fields */ + generics: BindingId[]; +}; + +export type StringType = { + kind: 'string'; + length: number | BindingId | null; +}; + +export type Constant = { + kind: 'constant'; + value: number; +}; + +export type ArrayType = { + kind: 'array'; + length: number | BindingId | null; + type: AbiTypeWithGenerics; +}; + +export type Tuple = { + kind: 'tuple'; + fields: AbiTypeWithGenerics[]; +}; + +export type Struct = { + kind: 'struct'; + structType: StructType; + /** The arguments are the concrete instantiation of the generics in the struct type. */ + args: AbiTypeWithGenerics[]; +}; + +export type AbiTypeWithGenerics = + | { kind: 'field' } + | { kind: 'boolean' } + | { kind: 'integer'; sign: string; width: number } + | { kind: 'binding'; id: BindingId } + | { kind: 'constant'; value: number } + | StringType + | ArrayType + | Tuple + | Struct; + +/** + * Maps an ABI type to an ABI type with generics. + * This performs pure type conversion, and does not generate any bindings. + */ +export function mapAbiTypeToAbiTypeWithGenerics(abiType: AbiType): AbiTypeWithGenerics { + switch (abiType.kind) { + case 'field': + case 'boolean': + case 'string': + case 'integer': + return abiType; + case 'array': + return { + kind: 'array', + length: abiType.length, + type: mapAbiTypeToAbiTypeWithGenerics(abiType.type), + }; + case 'struct': { + const structType = { + path: abiType.path, + fields: abiType.fields.map((field) => ({ + name: field.name, + type: mapAbiTypeToAbiTypeWithGenerics(field.type), + })), + generics: [], + }; + return { + kind: 'struct', + structType, + args: [], + }; + } + case 'tuple': + return { + kind: 'tuple', + fields: abiType.fields.map(mapAbiTypeToAbiTypeWithGenerics), + }; + default: { + const exhaustiveCheck: never = abiType; + throw new Error(`Unhandled abi type: ${exhaustiveCheck}`); + } + } +} + +/** + * Finds the structs in an ABI type. + * This won't explore nested structs. + */ +export function findStructsInType(abiType: AbiTypeWithGenerics): Struct[] { + switch (abiType.kind) { + case 'field': + case 'boolean': + case 'string': + case 'integer': + return []; + case 'array': + return findStructsInType(abiType.type); + case 'tuple': + return abiType.fields.flatMap(findStructsInType); + case 'struct': + return [abiType]; + default: { + return []; + } + } +} + +/** + * Finds all the structs in an ABI type, including nested structs. + */ +export function findAllStructsInType(abiType: AbiTypeWithGenerics): Struct[] { + let allStructs: Struct[] = []; + let lastStructs = findStructsInType(abiType); + while (lastStructs.length > 0) { + allStructs = allStructs.concat(lastStructs); + lastStructs = lastStructs.flatMap((struct) => + struct.structType.fields.flatMap((field) => findStructsInType(field.type)), + ); + } + return allStructs; +} diff --git a/tooling/noir_codegen/src/utils/demonomorphizer.ts b/tooling/noir_codegen/src/utils/demonomorphizer.ts new file mode 100644 index 0000000000..2b33b57455 --- /dev/null +++ b/tooling/noir_codegen/src/utils/demonomorphizer.ts @@ -0,0 +1,284 @@ +import { + type AbiTypeWithGenerics, + type ArrayType, + BindingId, + type Constant, + type StringType, + type Struct, + type StructType, + type Tuple, + findAllStructsInType, + findStructsInType, +} from './abi_type_with_generics.js'; + +export interface DemonomorphizerConfig { + leaveArrayLengthsUnbounded: boolean; + leaveStringLengthsUnbounded: boolean; +} + +/** + * Demonomorphizes a list of ABI types adding generics to structs. + * Since monomorphization of the generics destroys information, this process is not guaranteed to return the original structure. + * However, it should successfully unify all struct types that share the same name and field names. + */ +export class Demonomorphizer { + private variantsMap: Map; + private visitedStructs: Map; + private lastBindingId = 0; + + /** + * Demonomorphizes the passed in ABI types, mutating them. + */ + public static demonomorphize(abiTypes: AbiTypeWithGenerics[], config: DemonomorphizerConfig) { + new Demonomorphizer(abiTypes, config); + } + + private constructor( + private types: AbiTypeWithGenerics[], + private config: DemonomorphizerConfig, + ) { + this.variantsMap = new Map(); + this.fillVariantsMap(); + + this.visitedStructs = new Map(); + this.demonomorphizeStructs(); + } + + /** + * Finds all the variants of the structs in the types. + * A variant is every use of a struct with the same name and fields. + */ + private fillVariantsMap() { + const allStructs = this.types.flatMap(findAllStructsInType); + for (const struct of allStructs) { + const id = Demonomorphizer.buildIdForStruct(struct.structType); + const variants = this.variantsMap.get(id) ?? []; + variants.push(struct); + this.variantsMap.set(id, variants); + } + } + + private demonomorphizeStructs() { + for (const type of this.types) { + const topLevelStructs = findStructsInType(type); + for (const struct of topLevelStructs) { + this.demonomorphizeStruct(struct); + } + } + } + + /** + * Demonomorphizes a struct, by demonomorphizing its dependencies first. + * Then it'll unify the types of the variants generating a unique generic type. + * It'll also generate args that instantiate the generic type with the concrete arguments for each variant. + */ + private demonomorphizeStruct(struct: Struct) { + const id = Demonomorphizer.buildIdForStruct(struct.structType); + if (this.visitedStructs.has(id)) { + return; + } + const dependencies = struct.structType.fields.flatMap((field) => findStructsInType(field.type)); + for (const dependency of dependencies) { + this.demonomorphizeStruct(dependency); + } + if (this.visitedStructs.has(id)) { + throw new Error('Circular dependency detected'); + } + + const variants = this.variantsMap.get(id)!; + const mappedStructType = struct.structType; + + for (let i = 0; i < struct.structType.fields.length; i++) { + const variantTypes = variants.map((variant) => variant.structType.fields[i].type); + const mappedType = this.unifyTypes(variantTypes, mappedStructType.generics, variants); + mappedStructType.fields[i].type = mappedType; + } + + // Mutate variants setting the new struct type + variants.forEach((variant) => (variant.structType = mappedStructType)); + + this.visitedStructs.set(id, mappedStructType); + } + + /** + * Tries to unify the types of a set of variants recursively. + * Unification will imply replacing some properties with bindings and pushing bindings to the generics of the struct. + */ + private unifyTypes( + types: AbiTypeWithGenerics[], + generics: BindingId[], // Mutates generics adding new bindings + variants: Struct[], // mutates variants adding different args to the variants + ): AbiTypeWithGenerics { + const kinds = new Set(types.map((type) => type.kind)); + if (kinds.size > 1) { + return this.buildBindingAndPushToVariants(types, generics, variants); + } + switch (types[0].kind) { + case 'field': + case 'boolean': + case 'binding': + return types[0]; + case 'integer': { + if (allDeepEqual(types)) { + return types[0]; + } else { + return this.buildBindingAndPushToVariants(types, generics, variants); + } + } + case 'string': { + const strings = types as StringType[]; + const unifiedStringType = strings[0]; + if (strings.every((string) => string.length === unifiedStringType.length)) { + return unifiedStringType; + } else if (!this.config.leaveStringLengthsUnbounded) { + unifiedStringType.length = this.buildNumericBindingAndPushToVariants( + strings.map((string) => { + if (typeof string.length !== 'number') { + throw new Error('Trying to unify strings with bindings'); + } + return string.length; + }), + generics, + variants, + ); + return unifiedStringType; + } else { + unifiedStringType.length = null; + return unifiedStringType; + } + } + case 'array': { + const arrays = types as ArrayType[]; + const unifiedArrayType: ArrayType = arrays[0]; + if (!arrays.every((array) => array.length === unifiedArrayType.length)) { + if (!this.config.leaveArrayLengthsUnbounded) { + unifiedArrayType.length = this.buildNumericBindingAndPushToVariants( + arrays.map((array) => { + if (typeof array.length !== 'number') { + throw new Error('Trying to unify arrays with bindings'); + } + return array.length; + }), + generics, + variants, + ); + } else { + unifiedArrayType.length = null; + } + } + + unifiedArrayType.type = this.unifyTypes( + arrays.map((array) => array.type), + generics, + variants, + ); + return unifiedArrayType; + } + case 'tuple': { + const tuples = types as Tuple[]; + const unifiedTupleType: Tuple = tuples[0]; + for (let i = 0; i < unifiedTupleType.fields.length; i++) { + unifiedTupleType.fields[i] = this.unifyTypes( + tuples.map((tuple) => tuple.fields[i]), + generics, + variants, + ); + } + return unifiedTupleType; + } + case 'struct': { + const structs = types as Struct[]; + const ids = new Set(structs.map((struct) => Demonomorphizer.buildIdForStruct(struct.structType))); + if (ids.size > 1) { + // If the types are different structs, we can only unify them by creating a new binding. + // For example, if we have a struct A { x: u32 } and a struct A { x: Field }, the only possible unification is A { x: T } + return this.buildBindingAndPushToVariants(types, generics, variants); + } else { + // If the types are the same struct, we must unify the arguments to the struct. + // For example, if we have A and A, we need to unify to A and push T to the generics of the struct type. + const unifiedStruct = structs[0]; + + if (!structs.every((struct) => struct.args.length === unifiedStruct.args.length)) { + throw new Error('Same struct with different number of args encountered'); + } + for (let i = 0; i < unifiedStruct.args.length; i++) { + const argTypes = structs.map((struct) => struct.args[i]); + unifiedStruct.args[i] = this.unifyTypes(argTypes, generics, variants); + } + return unifiedStruct; + } + } + + case 'constant': { + const constants = types as Constant[]; + if (constants.every((constant) => constant.value === constants[0].value)) { + return constants[0]; + } else { + return this.buildBindingAndPushToVariants(types, generics, variants, true); + } + } + + default: { + const exhaustiveCheck: never = types[0]; + throw new Error(`Unhandled abi type: ${exhaustiveCheck}`); + } + } + } + + /** + * We consider a struct to be the same if it has the same name and field names. + * Structs with the same id will be unified into a single type by the demonomorphizer. + */ + public static buildIdForStruct(struct: StructType): string { + const name = struct.path.split('::').pop()!; + const fields = struct.fields.map((field) => field.name).join(','); + return `${name}(${fields})`; + } + + private buildBindingAndPushToVariants( + concreteTypes: AbiTypeWithGenerics[], + generics: BindingId[], + variants: Struct[], + isNumeric = false, + ): AbiTypeWithGenerics { + const bindingId = new BindingId(this.lastBindingId++, isNumeric); + + for (let i = 0; i < variants.length; i++) { + const variant = variants[i]; + const concreteType = concreteTypes[i]; + variant.args.push(concreteType); + } + + generics.push(bindingId); + return { kind: 'binding', id: bindingId }; + } + + private buildNumericBindingAndPushToVariants( + concreteNumbers: number[], + generics: BindingId[], + variants: Struct[], + ): BindingId { + const bindingId = new BindingId(this.lastBindingId++, true); + + for (let i = 0; i < variants.length; i++) { + const variant = variants[i]; + variant.args.push({ kind: 'constant', value: concreteNumbers[i] }); + } + + generics.push(bindingId); + return bindingId; + } +} + +function allDeepEqual(arr: T[]): boolean { + if (arr.length === 0) { + return true; + } + const first = JSON.stringify(arr[0]); + for (let i = 0; i < arr.length; i++) { + if (JSON.stringify(arr[i]) !== first) { + return false; + } + } + return true; +} diff --git a/tooling/noir_codegen/src/utils/typings_generator.ts b/tooling/noir_codegen/src/utils/typings_generator.ts new file mode 100644 index 0000000000..36d2de140f --- /dev/null +++ b/tooling/noir_codegen/src/utils/typings_generator.ts @@ -0,0 +1,324 @@ +import { CompiledCircuit } from '@noir-lang/types'; +import { + AbiTypeWithGenerics, + BindingId, + StructType, + findAllStructsInType, + mapAbiTypeToAbiTypeWithGenerics, +} from './abi_type_with_generics.js'; +import { Demonomorphizer } from './demonomorphizer.js'; +import { Abi } from '@noir-lang/noirc_abi'; + +const codegenPrelude = `/* Autogenerated file, do not edit! */ + +/* eslint-disable */ + +import { Noir, InputMap, CompiledCircuit, ForeignCallHandler } from "@noir-lang/noir_js" + +export { ForeignCallHandler } from "@noir-lang/noir_js" +`; +/** + * Keep track off all of the Noir primitive types that were used. + * Most of these will not have a 1-1 definition in TypeScript, + * so we will need to generate type aliases for them. + * + * We want to generate type aliases + * for specific types that are used in the ABI. + * + * For example: + * - If `Field` is used we want to alias that + * with `number`. + * - If `u32` is used we want to alias that with `number` too. + */ +type PrimitiveTypesUsed = { + /** + * The name of the type alias that we will generate. + */ + aliasName: string; + /** + * The TypeScript type that we will alias to. + */ + tsType: string; +}; + +/** + * Returns the last component of a path, e.g. "foo::bar::baz" -\> "baz" + * Note: that if we have a path such as "Baz", we will return "Baz". + * + * Since these paths corresponds to structs, we can assume that we + * cannot have "foo::bar::". + * + * We also make the assumption that since these paths are coming from + * Noir, then we will not have two paths that look like this: + * - foo::bar::Baz + * - cat::dog::Baz + * ie the last component of the path (struct name) is enough to uniquely identify + * the whole path. + * + * TODO: We should double check this assumption when we use type aliases, + * I expect that `foo::bar::Baz as Dog` would effectively give `foo::bar::Dog` + * @param str - The path to get the last component of. + * @returns The last component of the path. + */ +function getLastComponentOfPath(str: string): string { + const parts = str.split('::'); + const lastPart = parts[parts.length - 1]; + return lastPart; +} + +/** + * Replaces a numeric binding with the corresponding generics name or the actual value. + */ +function replaceNumericBinding(id: number | BindingId, genericsNameMap: Map): string { + if (typeof id === 'number') { + return id.toString(); + } else { + return genericsNameMap.get(id.id) ?? 'unknown'; + } +} + +export class TypingsGenerator { + /** All the types in the ABIs */ + private allTypes: AbiTypeWithGenerics[] = []; + /** The demonomorphized ABIs of the circuits */ + private demonomorphizedAbis: { + circuitName: string; + params: { name: string; type: AbiTypeWithGenerics }[]; + returnType?: AbiTypeWithGenerics; + artifact?: CompiledCircuit; + }[] = []; + /** Maps struct id to name for structs with the same name and different field sets */ + private structIdToTsName = new Map(); + /** Collect all the primitives used in the types to add them to the codegen */ + private primitiveTypesUsed = new Map(); + + constructor( + circuits: { abi: Abi; circuitName: string; artifact?: CompiledCircuit }[], + private useFixedLengthArrays: boolean, + ) { + // Map all the types used in the ABIs to the demonomorphized types + for (const { abi, circuitName, artifact } of circuits) { + const params = abi.parameters.map((param) => { + const type = mapAbiTypeToAbiTypeWithGenerics(param.type); + this.allTypes.push(type); + return { name: param.name, type }; + }); + if (abi.return_type) { + const returnType = mapAbiTypeToAbiTypeWithGenerics(abi.return_type.abi_type); + this.allTypes.push(returnType); + this.demonomorphizedAbis.push({ circuitName, params, returnType, artifact }); + } else { + this.demonomorphizedAbis.push({ circuitName, params, artifact }); + } + } + // Demonomorphize the types + Demonomorphizer.demonomorphize(this.allTypes, { + leaveArrayLengthsUnbounded: !useFixedLengthArrays, + leaveStringLengthsUnbounded: true, + }); + } + + public codegen(): string { + this.primitiveTypesUsed = new Map(); + const structsCode = this.codegenAllStructs(); + const interfacesCode = this.codegenAllInterfaces(); + const primitivesCode = this.codegenAllPrimitives(); + + return ` +${codegenPrelude} +${primitivesCode} +${structsCode} +${interfacesCode}`; + } + + private codegenAllStructs(): string { + const allStructs = this.allTypes.flatMap(findAllStructsInType); + // First, deduplicate the structs used + const structTypesToExport = new Map(); + for (const struct of allStructs) { + const id = Demonomorphizer.buildIdForStruct(struct.structType); + if (structTypesToExport.has(id)) { + continue; + } + structTypesToExport.set(id, struct.structType); + } + + // Then, we have to consider the case where we have struct with the same name but different fields. + // For those, we'll naively append a number to the name. + const idsPerName = new Map(); + for (const [id, structType] of structTypesToExport.entries()) { + const name = getLastComponentOfPath(structType.path); + const ids = idsPerName.get(name) ?? []; + ids.push(id); + idsPerName.set(name, ids); + } + + this.structIdToTsName = new Map(); + for (const [name, ids] of idsPerName.entries()) { + if (ids.length !== 1) { + ids.forEach((id, index) => { + this.structIdToTsName.set(id, `${name}${index + 1}`); + }); + } + } + // Now we can just generate the code for the structs + let resultCode = ''; + + for (const structType of structTypesToExport.values()) { + resultCode += this.codegenStructType(structType); + } + + return resultCode; + } + + private getStructName(structType: StructType): string { + return ( + this.structIdToTsName.get(Demonomorphizer.buildIdForStruct(structType)) || getLastComponentOfPath(structType.path) + ); + } + + private codegenStructType(structType: StructType): string { + // Generate names for the generic bindings. + const genericsNameMap = new Map(); + structType.generics.forEach((generic, index) => { + genericsNameMap.set(generic.id, String.fromCharCode('A'.charCodeAt(0) + index)); + }); + + const name = this.getStructName(structType); + const generics = structType.generics.length + ? `<${structType.generics + .map((generic) => `${genericsNameMap.get(generic.id)}${generic.isNumeric ? ' extends number' : ''}`) + .join(', ')}>` + : ''; + + let resultCode = `export type ${name}${generics} = {\n`; + + for (const field of structType.fields) { + resultCode += ` ${field.name}: ${this.codegenType(field.type, genericsNameMap)};\n`; + } + + resultCode += '}\n\n'; + + return resultCode; + } + + private codegenType(type: AbiTypeWithGenerics, genericsNameMap: Map): string { + switch (type.kind) { + case 'field': + this.addIfUnique({ aliasName: 'Field', tsType: 'string' }); + return 'Field'; + case 'boolean': + return 'boolean'; + case 'integer': { + const typeName = type.sign === 'signed' ? `i${type.width}` : `u${type.width}`; + // Even though noir accepts numbers or strings for integers, it always returns strings + // So we must use string as the type here. + this.addIfUnique({ aliasName: typeName, tsType: 'string' }); + return typeName; + } + case 'binding': + return genericsNameMap.get(type.id.id) ?? 'unknown'; + case 'constant': + return type.value.toString(); + case 'string': + return `string`; + case 'array': + if (this.useFixedLengthArrays) { + if (type.length === null) { + throw new Error('Got unbounded array with fixed length arrays enabled'); + } + return `FixedLengthArray<${this.codegenType(type.type, genericsNameMap)}, ${replaceNumericBinding( + type.length, + genericsNameMap, + )}>`; + } else { + return `${this.codegenType(type.type, genericsNameMap)}[]`; + } + case 'tuple': { + const fieldTypes = type.fields.map((field) => this.codegenType(field, genericsNameMap)); + return `[${fieldTypes.join(', ')}]`; + } + case 'struct': { + const name = this.getStructName(type.structType); + if (type.args.length) { + const args = type.args.map((arg) => this.codegenType(arg, genericsNameMap)).join(', '); + return `${name}<${args}>`; + } else { + return name; + } + } + } + } + + /** + * Typescript does not allow us to check for equality of non-primitive types + * easily, so we create a addIfUnique function that will only add an item + * to the map if it is not already there by using JSON.stringify. + * @param item - The item to add to the map. + */ + private addIfUnique(item: PrimitiveTypesUsed) { + const key = JSON.stringify(item); + if (!this.primitiveTypesUsed.has(key)) { + this.primitiveTypesUsed.set(key, item); + } + } + + /** + * Codegen all the interfaces for the circuits. + * For a circuit named Foo, we'll codegen FooInputType and FooReturnType. + */ + private codegenAllInterfaces(): string { + let resultCode = ''; + for (const { circuitName, params, returnType, artifact } of this.demonomorphizedAbis) { + const functionSignature = { + inputs: params.map((param): [string, string] => [param.name, this.codegenType(param.type, new Map())]), + returnValue: returnType ? this.codegenType(returnType, new Map()) : null, + }; + resultCode += this.codegenStructType({ + path: `${circuitName}InputType`, + fields: params, + generics: [], + }); + + if (returnType) { + resultCode += `export type ${circuitName}ReturnType = ${this.codegenType(returnType, new Map())};\n`; + } + + resultCode += codegenFunction(circuitName, functionSignature, artifact); + } + return resultCode; + } + + private codegenAllPrimitives(): string { + let primitiveTypeAliases = this.useFixedLengthArrays + ? 'export type FixedLengthArray = L extends 0 ? never[]: T[] & { length: L }\n' + : ''; + for (const [, value] of this.primitiveTypesUsed) { + primitiveTypeAliases += `export type ${value.aliasName} = ${value.tsType};\n`; + } + return primitiveTypeAliases; + } +} + +const codegenFunction = ( + name: string, + function_signature: { inputs: [string, string][]; returnValue: string | null }, + compiled_program?: CompiledCircuit, +): string => { + const args = function_signature.inputs.map(([name]) => `${name}`).join(', '); + const args_with_types = function_signature.inputs.map(([name, type]) => `${name}: ${type}`).join(', '); + + const artifact = compiled_program + ? `export const ${name}_circuit: CompiledCircuit = ${JSON.stringify(compiled_program)};` + : ''; + + return `${artifact} + +export async function ${name}(${args_with_types}${compiled_program ? '' : `, ${name}_circuit: CompiledCircuit`}, foreignCallHandler?: ForeignCallHandler): Promise<${function_signature.returnValue}> { + const program = new Noir(${name}_circuit); + const args: InputMap = { ${args} }; + const { returnValue } = await program.execute(args, foreignCallHandler); + return returnValue as ${function_signature.returnValue}; +} +`; +}; diff --git a/tooling/noir_codegen/test/index.test.ts b/tooling/noir_codegen/test/index.test.ts index 03fb680a53..afc7769ed9 100644 --- a/tooling/noir_codegen/test/index.test.ts +++ b/tooling/noir_codegen/test/index.test.ts @@ -1,18 +1,18 @@ import { expect } from 'chai'; // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore File is codegenned at test time. -import { exported_function_foo, MyStruct, u64, ForeignCallHandler } from './codegen/index.js'; +import { exported_function_foo, MyStruct, u64, u32, ForeignCallHandler } from './codegen/index.js'; it('codegens a callable function', async () => { - const my_struct = { foo: true, bar: ['12345', '12345', '12345'], baz: '0x00' }; + const my_struct = { foo: true, bar: ['123', '123', '123', '123'], baz: '0x00' }; - const [sum, constant, struct]: [u64, u64, MyStruct] = await exported_function_foo( + const [sum, constant, struct]: [u64, u32, MyStruct] = await exported_function_foo( '2', '3', - [0, 0, 0, 0, 0], + ['0x00', '0x00', '0x00', '0x00', '0x00'], { foo: my_struct, - bar: [my_struct, my_struct, my_struct], + bar: [my_struct, my_struct], baz: '64', }, '12345', @@ -35,15 +35,15 @@ it('allows passing a custom foreign call handler', async () => { return []; }; - const my_struct = { foo: true, bar: ['12345', '12345', '12345'], baz: '0x00' }; + const my_struct = { foo: true, bar: ['123', '123', '123', '123'], baz: '0x00' }; - const [sum, constant, struct]: [u64, u64, MyStruct] = await exported_function_foo( + const [sum, constant, struct]: [u64, u32, MyStruct] = await exported_function_foo( '2', '3', - [0, 0, 0, 0, 0], + ['0x00', '0x00', '0x00', '0x00', '0x00'], { foo: my_struct, - bar: [my_struct, my_struct, my_struct], + bar: [my_struct, my_struct], baz: '64', }, '12345', diff --git a/tooling/noir_codegen/test/test_lib/src/lib.nr b/tooling/noir_codegen/test/test_lib/src/lib.nr index 23607c6f65..4915b0a2c4 100644 --- a/tooling/noir_codegen/test/test_lib/src/lib.nr +++ b/tooling/noir_codegen/test/test_lib/src/lib.nr @@ -1,17 +1,23 @@ -struct MyStruct { +struct MyStruct { foo: bool, - bar: [str<5>; 3], + bar: [str; BAR_SIZE], baz: Field } -struct NestedStruct { - foo: MyStruct, - bar: [MyStruct; 3], - baz: u64 +struct NestedStruct { + foo: MyStruct, + bar: [MyStruct; BAR_SIZE], + baz: BAZ_TYP } #[export] -fn exported_function_foo(x: u64, y: u64, array: [u8; 5], my_struct: NestedStruct, string: str<5>) -> (u64, u64, MyStruct) { +fn exported_function_foo( + x: u64, + y: u64, + array: [u8; 5], + my_struct: NestedStruct<2,3,4, Field>, + string: str<5> +) -> (u64, u32, MyStruct<3, 4>) { assert(array.len() == 5); assert(my_struct.foo.foo); assert(string == "12345"); @@ -22,6 +28,6 @@ fn exported_function_foo(x: u64, y: u64, array: [u8; 5], my_struct: NestedStruct } #[export] -fn exported_function_bar(my_struct: NestedStruct) -> (u64) { +fn exported_function_bar(my_struct: NestedStruct<1,2,3, u64>) -> (u64) { my_struct.baz } diff --git a/tooling/noir_js_backend_barretenberg/package.json b/tooling/noir_js_backend_barretenberg/package.json index 3368dcd8a0..c6985f4b03 100644 --- a/tooling/noir_js_backend_barretenberg/package.json +++ b/tooling/noir_js_backend_barretenberg/package.json @@ -42,7 +42,7 @@ "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" }, "dependencies": { - "@aztec/bb.js": "0.38.0", + "@aztec/bb.js": "portal:../../../../barretenberg/ts", "@noir-lang/types": "workspace:*", "fflate": "^0.8.0" }, diff --git a/tooling/noirc_abi/src/lib.rs b/tooling/noirc_abi/src/lib.rs index 7e89a102a9..7a1d1787ca 100644 --- a/tooling/noirc_abi/src/lib.rs +++ b/tooling/noirc_abi/src/lib.rs @@ -471,7 +471,15 @@ impl Abi { .copied() }) { - Some(decode_value(&mut return_witness_values.into_iter(), &return_type.abi_type)?) + // We do not return value for the data bus. + if return_type.visibility == AbiVisibility::DataBus { + None + } else { + Some(decode_value( + &mut return_witness_values.into_iter(), + &return_type.abi_type, + )?) + } } else { // Unlike for the circuit inputs, we tolerate not being able to find the witness values for the return value. // This is because the user may be decoding a partial witness map for which is hasn't been calculated yet. diff --git a/yarn.lock b/yarn.lock index 85966ce339..b45678f5d8 100644 --- a/yarn.lock +++ b/yarn.lock @@ -221,19 +221,18 @@ __metadata: languageName: node linkType: hard -"@aztec/bb.js@npm:0.38.0": - version: 0.38.0 - resolution: "@aztec/bb.js@npm:0.38.0" +"@aztec/bb.js@portal:../../../../barretenberg/ts::locator=%40noir-lang%2Fbackend_barretenberg%40workspace%3Atooling%2Fnoir_js_backend_barretenberg": + version: 0.0.0-use.local + resolution: "@aztec/bb.js@portal:../../../../barretenberg/ts::locator=%40noir-lang%2Fbackend_barretenberg%40workspace%3Atooling%2Fnoir_js_backend_barretenberg" dependencies: comlink: ^4.4.1 commander: ^10.0.1 debug: ^4.3.4 tslib: ^2.4.0 bin: - bb.js: dest/node/main.js - checksum: 5ebc2850f37993db1d0fe4306ec612e9df14c5d227e1451f1b2f96e63e61c64225c46b32d1e1d2a1a0c37795e50b2875362520e9eb49324312516ec9fd6de2c7 + bb.js: ./dest/node/main.js languageName: node - linkType: hard + linkType: soft "@babel/code-frame@npm:^7.0.0, @babel/code-frame@npm:^7.10.4, @babel/code-frame@npm:^7.12.11, @babel/code-frame@npm:^7.16.0, @babel/code-frame@npm:^7.22.13, @babel/code-frame@npm:^7.23.5, @babel/code-frame@npm:^7.8.3": version: 7.23.5 @@ -4396,7 +4395,7 @@ __metadata: version: 0.0.0-use.local resolution: "@noir-lang/backend_barretenberg@workspace:tooling/noir_js_backend_barretenberg" dependencies: - "@aztec/bb.js": 0.38.0 + "@aztec/bb.js": "portal:../../../../barretenberg/ts" "@noir-lang/types": "workspace:*" "@types/node": ^20.6.2 "@types/prettier": ^3