diff --git a/.noir-sync-commit b/.noir-sync-commit index b6e1166fe48..e2676e1626e 100644 --- a/.noir-sync-commit +++ b/.noir-sync-commit @@ -1 +1 @@ -31640e91ba75b9c5200ea66d1f54cc5185e0d196 +f065c6682e2c896a346716cf88ac285f1d4bf846 diff --git a/noir/noir-repo/.github/scripts/merge-bench-reports.sh b/noir/noir-repo/.github/scripts/merge-bench-reports.sh new file mode 100755 index 00000000000..23a62874148 --- /dev/null +++ b/noir/noir-repo/.github/scripts/merge-bench-reports.sh @@ -0,0 +1,27 @@ +#!/bin/bash +set -eu + +echo "Merging reports" + +REPORT_NAME=$1 +NAME_PLURAL=""$REPORT_NAME"s" + +combined_reports="[]" + +# Iterate over each report and merge them +for report in ./reports/*; do + # The report is saved under ./$REPORT_NAME_{ matrix_report }/$REPORT_NAME_{ matrix_report }.json + FILE_PATH=$(echo $(ls $report)) + + # Extract the $NAME_PLURAL array from each report and merge it + combined_reports=$(jq '[."'"$NAME_PLURAL"'"[]] + '"$combined_reports" <<< "$(cat "$report/$FILE_PATH")") +done + +combined_reports=$(jq '[."'$NAME_PLURAL'"[]] + '"$combined_reports" <<< "$(cat ./$REPORT_NAME.json)") + +# Wrap the merged memory reports into a new object as to keep the $NAME_PLURAL key +final_report="{\"$NAME_PLURAL\": $combined_reports}" + +echo "$final_report" > $REPORT_NAME.json + +cat $REPORT_NAME.json \ No newline at end of file diff --git a/noir/noir-repo/.github/workflows/cache-cleanup.yml b/noir/noir-repo/.github/workflows/cache-cleanup.yml index cf2b0ec413e..bb05c5454e5 100644 --- a/noir/noir-repo/.github/workflows/cache-cleanup.yml +++ b/noir/noir-repo/.github/workflows/cache-cleanup.yml @@ -12,7 +12,7 @@ on: jobs: cleanup: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Cleanup run: | diff --git a/noir/noir-repo/.github/workflows/deny.yml b/noir/noir-repo/.github/workflows/deny.yml index 8ae7d03e076..11dbc3eef4b 100644 --- a/noir/noir-repo/.github/workflows/deny.yml +++ b/noir/noir-repo/.github/workflows/deny.yml @@ -18,7 +18,7 @@ concurrency: deny-${{ github.head_ref || github.run_id }} jobs: deny: name: deny - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4 - uses: EmbarkStudios/cargo-deny-action@v1 diff --git a/noir/noir-repo/.github/workflows/docs-dead-links.yml b/noir/noir-repo/.github/workflows/docs-dead-links.yml index 40e948fe2c1..b46c5393f8d 100644 --- a/noir/noir-repo/.github/workflows/docs-dead-links.yml +++ b/noir/noir-repo/.github/workflows/docs-dead-links.yml @@ -13,7 +13,7 @@ concurrency: jobs: markdown-link-check: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - uses: actions/checkout@master - uses: gaurav-nelson/github-action-markdown-link-check@v1 diff --git a/noir/noir-repo/.github/workflows/docs-pr.yml b/noir/noir-repo/.github/workflows/docs-pr.yml index 78abb8252b3..fdd4d25f5ae 100644 --- a/noir/noir-repo/.github/workflows/docs-pr.yml +++ b/noir/noir-repo/.github/workflows/docs-pr.yml @@ -5,7 +5,7 @@ on: jobs: add_label: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 outputs: has_label: ${{ steps.check-labels.outputs.result }} steps: @@ -49,7 +49,7 @@ jobs: } build_preview: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Checkout code uses: actions/checkout@v4 @@ -93,7 +93,7 @@ jobs: deploy_preview: needs: [build_preview, add_label] - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 permissions: pull-requests: write if: needs.add_label.outputs.has_label == 'true' @@ -121,7 +121,7 @@ jobs: add_comment: needs: [deploy_preview] - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 permissions: pull-requests: write steps: diff --git a/noir/noir-repo/.github/workflows/formatting.yml b/noir/noir-repo/.github/workflows/formatting.yml index ab92d452c79..f8ebd53dc70 100644 --- a/noir/noir-repo/.github/workflows/formatting.yml +++ b/noir/noir-repo/.github/workflows/formatting.yml @@ -15,7 +15,7 @@ concurrency: jobs: clippy: name: cargo clippy - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 timeout-minutes: 30 env: RUSTFLAGS: -Dwarnings @@ -41,7 +41,7 @@ jobs: rustfmt: name: cargo fmt - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 timeout-minutes: 30 env: RUSTFLAGS: -Dwarnings @@ -67,7 +67,7 @@ jobs: eslint: name: eslint - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 timeout-minutes: 30 steps: @@ -115,7 +115,7 @@ jobs: nargo_fmt: needs: [build-nargo] name: Nargo fmt - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 timeout-minutes: 30 steps: diff --git a/noir/noir-repo/.github/workflows/publish-acvm.yml b/noir/noir-repo/.github/workflows/publish-acvm.yml index feb4d4216c3..fb2e2001e40 100644 --- a/noir/noir-repo/.github/workflows/publish-acvm.yml +++ b/noir/noir-repo/.github/workflows/publish-acvm.yml @@ -10,7 +10,7 @@ on: jobs: publish: name: Publish in order - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Checkout sources uses: actions/checkout@v4 diff --git a/noir/noir-repo/.github/workflows/publish-docs.yml b/noir/noir-repo/.github/workflows/publish-docs.yml index 8896e613608..16959256d2a 100644 --- a/noir/noir-repo/.github/workflows/publish-docs.yml +++ b/noir/noir-repo/.github/workflows/publish-docs.yml @@ -10,7 +10,7 @@ on: jobs: publish-docs: name: Publish docs - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Checkout release branch diff --git a/noir/noir-repo/.github/workflows/publish-es-packages.yml b/noir/noir-repo/.github/workflows/publish-es-packages.yml index 682fed69c7b..e629ae1f133 100644 --- a/noir/noir-repo/.github/workflows/publish-es-packages.yml +++ b/noir/noir-repo/.github/workflows/publish-es-packages.yml @@ -16,7 +16,7 @@ run-name: Publish ES Packages from ${{ inputs.noir-ref }} under @${{ inputs.npm- jobs: build-noirc_abi_wasm: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Checkout Noir repo uses: actions/checkout@v4 @@ -50,7 +50,7 @@ jobs: retention-days: 10 build-noir_wasm: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Checkout sources uses: actions/checkout@v4 @@ -87,7 +87,7 @@ jobs: retention-days: 3 build-acvm_js: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Checkout sources uses: actions/checkout@v4 @@ -121,7 +121,7 @@ jobs: retention-days: 3 publish-es-packages: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 needs: [build-acvm_js, build-noirc_abi_wasm, build-noir_wasm] steps: - name: Checkout sources diff --git a/noir/noir-repo/.github/workflows/publish-nightly.yml b/noir/noir-repo/.github/workflows/publish-nightly.yml index f5c013883bb..2eef9ab60f7 100644 --- a/noir/noir-repo/.github/workflows/publish-nightly.yml +++ b/noir/noir-repo/.github/workflows/publish-nightly.yml @@ -7,7 +7,7 @@ on: jobs: dispatch-publish-es: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Dispatch to publish-nargo uses: benc-uk/workflow-dispatch@v1 diff --git a/noir/noir-repo/.github/workflows/pull-request-title.yml b/noir/noir-repo/.github/workflows/pull-request-title.yml index 7e9b729da28..41922bd32ab 100644 --- a/noir/noir-repo/.github/workflows/pull-request-title.yml +++ b/noir/noir-repo/.github/workflows/pull-request-title.yml @@ -15,7 +15,7 @@ permissions: jobs: conventional-title: name: Validate PR title is Conventional Commit - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Check title if: github.event_name == 'pull_request_target' @@ -30,7 +30,7 @@ jobs: force-push-comment: name: Warn external contributors about force-pushing - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 if: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.repo.full_name != 'noir-lang/noir' }} permissions: pull-requests: write diff --git a/noir/noir-repo/.github/workflows/recrawler.yml b/noir/noir-repo/.github/workflows/recrawler.yml index ee832e273a1..808e5819353 100644 --- a/noir/noir-repo/.github/workflows/recrawler.yml +++ b/noir/noir-repo/.github/workflows/recrawler.yml @@ -7,7 +7,7 @@ on: jobs: algolia_recrawl: name: Algolia Recrawl - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Algolia crawler creation and crawl uses: algolia/algoliasearch-crawler-github-actions@v1.1.0 diff --git a/noir/noir-repo/.github/workflows/release.yml b/noir/noir-repo/.github/workflows/release.yml index 59c3d9a1415..bbe9a7fff62 100644 --- a/noir/noir-repo/.github/workflows/release.yml +++ b/noir/noir-repo/.github/workflows/release.yml @@ -11,7 +11,7 @@ jobs: outputs: release-pr: ${{ steps.release.outputs.pr }} tag-name: ${{ steps.release.outputs.tag_name }} - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Run release-please id: release @@ -23,7 +23,7 @@ jobs: name: Update acvm workspace package versions needs: [release-please] if: ${{ needs.release-please.outputs.release-pr }} - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Checkout release branch uses: actions/checkout@v4 @@ -66,7 +66,7 @@ jobs: name: Update docs needs: [release-please, update-acvm-workspace-package-versions] if: ${{ needs.release-please.outputs.release-pr }} - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Checkout release branch @@ -110,7 +110,7 @@ jobs: release-end: name: Release End - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 # We want this job to always run (even if the dependant jobs fail) as we need apply changes to the sticky comment. if: ${{ always() }} @@ -145,7 +145,7 @@ jobs: name: Build binaries needs: [release-please] if: ${{ needs.release-please.outputs.tag-name }} - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Dispatch to publish workflow uses: benc-uk/workflow-dispatch@v1 @@ -160,7 +160,7 @@ jobs: name: Publish ES packages needs: [release-please] if: ${{ needs.release-please.outputs.tag-name }} - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Dispatch to publish-es-packages uses: benc-uk/workflow-dispatch@v1 @@ -174,7 +174,7 @@ jobs: name: Publish acvm needs: [release-please] if: ${{ needs.release-please.outputs.tag-name }} - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Dispatch to publish-acvm diff --git a/noir/noir-repo/.github/workflows/reports.yml b/noir/noir-repo/.github/workflows/reports.yml index 8f8aeabb65e..0a03add8338 100644 --- a/noir/noir-repo/.github/workflows/reports.yml +++ b/noir/noir-repo/.github/workflows/reports.yml @@ -8,7 +8,7 @@ on: jobs: build-nargo: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Checkout Noir repo @@ -38,11 +38,10 @@ jobs: path: ./dist/* retention-days: 3 - compare_gates_reports: name: Circuit sizes needs: [build-nargo] - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 permissions: pull-requests: write @@ -93,7 +92,7 @@ jobs: compare_brillig_bytecode_size_reports: name: Brillig bytecode sizes needs: [build-nargo] - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 permissions: pull-requests: write @@ -142,7 +141,7 @@ jobs: compare_brillig_execution_reports: name: Brillig execution trace sizes needs: [build-nargo] - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 permissions: pull-requests: write @@ -191,7 +190,7 @@ jobs: generate_memory_report: name: Peak memory usage needs: [build-nargo] - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 permissions: pull-requests: write @@ -218,9 +217,267 @@ jobs: ./memory_report.sh mv memory_report.json ../memory_report.json + - name: Upload memory report + uses: actions/upload-artifact@v4 + with: + name: in_progress_memory_report + path: memory_report.json + retention-days: 3 + overwrite: true + + generate_compilation_report: + name: Compilation time + needs: [build-nargo] + runs-on: ubuntu-22.04 + permissions: + pull-requests: write + + steps: + - uses: actions/checkout@v4 + + - name: Download nargo binary + uses: actions/download-artifact@v4 + with: + name: nargo + path: ./nargo + + - name: Set nargo on PATH + run: | + nargo_binary="${{ github.workspace }}/nargo/nargo" + chmod +x $nargo_binary + echo "$(dirname $nargo_binary)" >> $GITHUB_PATH + export PATH="$PATH:$(dirname $nargo_binary)" + nargo -V + + - name: Generate Compilation report + working-directory: ./test_programs + run: | + ./compilation_report.sh + cat compilation_report.json + mv compilation_report.json ../compilation_report.json + + - name: Upload compilation report + uses: actions/upload-artifact@v4 + with: + name: in_progress_compilation_report + path: compilation_report.json + retention-days: 3 + overwrite: true + + external_repo_compilation_report: + needs: [build-nargo] + runs-on: ubuntu-latest + timeout-minutes: 15 + strategy: + fail-fast: false + matrix: + include: + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-contracts } + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/parity-root } + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/private-kernel-inner } + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/private-kernel-tail } + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/private-kernel-reset } + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/rollup-base-private } + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/rollup-base-public } + + name: External repo compilation report - ${{ matrix.project.repo }}/${{ matrix.project.path }} + steps: + - name: Download nargo binary + uses: actions/download-artifact@v4 + with: + name: nargo + path: ./nargo + + - name: Set nargo on PATH + run: | + nargo_binary="${{ github.workspace }}/nargo/nargo" + chmod +x $nargo_binary + echo "$(dirname $nargo_binary)" >> $GITHUB_PATH + export PATH="$PATH:$(dirname $nargo_binary)" + nargo -V + + - uses: actions/checkout@v4 + with: + path: scripts + sparse-checkout: | + test_programs/compilation_report.sh + sparse-checkout-cone-mode: false + + - name: Checkout + uses: actions/checkout@v4 + with: + repository: ${{ matrix.project.repo }} + path: test-repo + ref: ${{ matrix.project.ref }} + + - name: Generate compilation report + working-directory: ./test-repo/${{ matrix.project.path }} + run: | + mv /home/runner/work/noir/noir/scripts/test_programs/compilation_report.sh ./compilation_report.sh + chmod +x ./compilation_report.sh + ./compilation_report.sh 1 + + - name: Move compilation report + id: report + shell: bash + run: | + PACKAGE_NAME=${{ matrix.project.path }} + PACKAGE_NAME=$(basename $PACKAGE_NAME) + mv ./test-repo/${{ matrix.project.path }}/compilation_report.json ./compilation_report_$PACKAGE_NAME.json + echo "compilation_report_name=$PACKAGE_NAME" >> $GITHUB_OUTPUT + + - name: Upload compilation report + uses: actions/upload-artifact@v4 + with: + name: compilation_report_${{ steps.report.outputs.compilation_report_name }} + path: compilation_report_${{ steps.report.outputs.compilation_report_name }}.json + retention-days: 3 + overwrite: true + + upload_compilation_report: + name: Upload compilation report + needs: [generate_compilation_report, external_repo_compilation_report] + # We want this job to run even if one variation of the matrix in `external_repo_compilation_report` fails + if: always() + runs-on: ubuntu-latest + permissions: + pull-requests: write + + steps: + - uses: actions/checkout@v4 + + - name: Download initial compilation report + uses: actions/download-artifact@v4 + with: + name: in_progress_compilation_report + + - name: Download matrix compilation reports + uses: actions/download-artifact@v4 + with: + pattern: compilation_report_* + path: ./reports + + - name: Merge compilation reports using jq + run: | + mv ./.github/scripts/merge-bench-reports.sh merge-bench-reports.sh + ./merge-bench-reports.sh compilation_report + + - name: Parse compilation report + id: compilation_report + uses: noir-lang/noir-bench-report@0d7464a8c39170523932d7846b6e6b458a294aea + with: + report: compilation_report.json + header: | + # Compilation Report + memory_report: false + + - name: Add memory report to sticky comment + if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target' + uses: marocchino/sticky-pull-request-comment@v2 + with: + header: compilation + message: ${{ steps.compilation_report.outputs.markdown }} + + external_repo_memory_report: + needs: [build-nargo] + runs-on: ubuntu-latest + timeout-minutes: 30 + strategy: + fail-fast: false + matrix: + include: + # TODO: Bring this report back under a flag. The `noir-contracts` report takes just under 30 minutes. + # - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-contracts } + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/parity-root } + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/private-kernel-inner } + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/private-kernel-reset } + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/private-kernel-tail } + + name: External repo memory report - ${{ matrix.project.repo }}/${{ matrix.project.path }} + steps: + - name: Download nargo binary + uses: actions/download-artifact@v4 + with: + name: nargo + path: ./nargo + + - name: Set nargo on PATH + run: | + nargo_binary="${{ github.workspace }}/nargo/nargo" + chmod +x $nargo_binary + echo "$(dirname $nargo_binary)" >> $GITHUB_PATH + export PATH="$PATH:$(dirname $nargo_binary)" + nargo -V + + - uses: actions/checkout@v4 + with: + path: scripts + sparse-checkout: | + test_programs/memory_report.sh + sparse-checkout-cone-mode: false + + - name: Checkout + uses: actions/checkout@v4 + with: + repository: ${{ matrix.project.repo }} + path: test-repo + ref: ${{ matrix.project.ref }} + + - name: Generate compilation report + working-directory: ./test-repo/${{ matrix.project.path }} + run: | + mv /home/runner/work/noir/noir/scripts/test_programs/memory_report.sh ./memory_report.sh + chmod +x ./memory_report.sh + ./memory_report.sh 1 + + - name: Move compilation report + id: report + shell: bash + run: | + PACKAGE_NAME=${{ matrix.project.path }} + PACKAGE_NAME=$(basename $PACKAGE_NAME) + mv ./test-repo/${{ matrix.project.path }}/memory_report.json ./memory_report_$PACKAGE_NAME.json + echo "memory_report_name=$PACKAGE_NAME" >> $GITHUB_OUTPUT + + - name: Upload memory report + uses: actions/upload-artifact@v4 + with: + name: memory_report_${{ steps.report.outputs.memory_report_name }} + path: memory_report_${{ steps.report.outputs.memory_report_name }}.json + retention-days: 3 + overwrite: true + + upload_memory_report: + name: Upload memory report + needs: [generate_memory_report, external_repo_memory_report] + # We want this job to run even if one variation of the matrix in `external_repo_memory_report` fails + if: always() + runs-on: ubuntu-latest + permissions: + pull-requests: write + + steps: + - uses: actions/checkout@v4 + + - name: Download initial memory report + uses: actions/download-artifact@v4 + with: + name: in_progress_memory_report + + - name: Download matrix memory reports + uses: actions/download-artifact@v4 + with: + pattern: memory_report_* + path: ./reports + + - name: Merge memory reports using jq + run: | + mv ./.github/scripts/merge-bench-reports.sh merge-bench-reports.sh + ./merge-bench-reports.sh memory_report + - name: Parse memory report id: memory_report - uses: noir-lang/noir-bench-report@ccb0d806a91d3bd86dba0ba3d580a814eed5673c + uses: noir-lang/noir-bench-report@0d7464a8c39170523932d7846b6e6b458a294aea with: report: memory_report.json header: | diff --git a/noir/noir-repo/.github/workflows/spellcheck.yml b/noir/noir-repo/.github/workflows/spellcheck.yml index 83d67325775..2b9a1461231 100644 --- a/noir/noir-repo/.github/workflows/spellcheck.yml +++ b/noir/noir-repo/.github/workflows/spellcheck.yml @@ -10,7 +10,7 @@ concurrency: jobs: code: name: Code - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Checkout sources uses: actions/checkout@v4 @@ -25,7 +25,7 @@ jobs: docs: name: Documentation - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Checkout sources uses: actions/checkout@v4 diff --git a/noir/noir-repo/.github/workflows/test-js-packages.yml b/noir/noir-repo/.github/workflows/test-js-packages.yml index 6a9a918b955..dde0deed0cf 100644 --- a/noir/noir-repo/.github/workflows/test-js-packages.yml +++ b/noir/noir-repo/.github/workflows/test-js-packages.yml @@ -14,7 +14,7 @@ concurrency: jobs: yarn-lock: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 timeout-minutes: 30 steps: @@ -60,7 +60,7 @@ jobs: retention-days: 3 build-noirc-abi: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 timeout-minutes: 30 steps: @@ -92,7 +92,7 @@ jobs: retention-days: 10 build-noir-wasm: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 timeout-minutes: 30 steps: @@ -127,7 +127,7 @@ jobs: retention-days: 3 build-acvm-js: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 timeout-minutes: 30 steps: @@ -161,7 +161,7 @@ jobs: test-acvm_js-node: needs: [build-acvm-js] name: ACVM JS (Node.js) - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 timeout-minutes: 30 steps: @@ -183,7 +183,7 @@ jobs: test-acvm_js-browser: needs: [build-acvm-js] name: ACVM JS (Browser) - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 timeout-minutes: 30 steps: @@ -208,7 +208,7 @@ jobs: test-noirc-abi: needs: [build-noirc-abi] name: noirc_abi - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 timeout-minutes: 30 steps: @@ -236,7 +236,7 @@ jobs: test-noir-js: needs: [build-nargo, build-acvm-js, build-noirc-abi] name: Noir JS - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 timeout-minutes: 30 steps: @@ -283,7 +283,7 @@ jobs: test-noir-wasm: needs: [build-noir-wasm, build-nargo] name: noir_wasm - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 timeout-minutes: 30 steps: @@ -327,7 +327,7 @@ jobs: test-noir-codegen: needs: [build-acvm-js, build-noirc-abi, build-nargo] name: noir_codegen - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 timeout-minutes: 30 steps: @@ -374,7 +374,7 @@ jobs: test-integration-node: name: Integration Tests (Node) - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 needs: [build-acvm-js, build-noir-wasm, build-nargo, build-noirc-abi] timeout-minutes: 30 @@ -435,7 +435,7 @@ jobs: test-integration-browser: name: Integration Tests (Browser) - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 needs: [build-acvm-js, build-noir-wasm, build-noirc-abi] timeout-minutes: 30 @@ -480,7 +480,7 @@ jobs: test-examples: name: Example scripts - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 needs: [build-nargo] timeout-minutes: 30 @@ -521,36 +521,45 @@ jobs: working-directory: ./examples/codegen_verifier run: ./test.sh + critical-library-list: + name: Load critical library list + runs-on: ubuntu-22.04 + outputs: + libraries: ${{ steps.get_critical_libraries.outputs.libraries }} + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Build list of libraries + id: get_critical_libraries + run: | + LIBRARIES=$(grep -Po "^https://github.com/\K.+" ./CRITICAL_NOIR_LIBRARIES | jq -R -s -c 'split("\n") | map(select(. != "")) | map({ repo: ., path: ""})') + echo "libraries=$LIBRARIES" + echo "libraries=$LIBRARIES" >> $GITHUB_OUTPUT + env: + GH_TOKEN: ${{ github.token }} + external-repo-checks: - needs: [build-nargo] - runs-on: ubuntu-latest + needs: [build-nargo, critical-library-list] + runs-on: ubuntu-22.04 # Only run when 'run-external-checks' label is present if: contains(github.event.pull_request.labels.*.name, 'run-external-checks') timeout-minutes: 30 strategy: fail-fast: false matrix: - project: - - { repo: noir-lang/ec, path: ./ } - - { repo: noir-lang/eddsa, path: ./ } - - { repo: noir-lang/mimc, path: ./ } - - { repo: noir-lang/noir_sort, path: ./ } - - { repo: noir-lang/noir-edwards, path: ./ } - - { repo: noir-lang/noir-bignum, path: ./ } - - { repo: noir-lang/noir_bigcurve, path: ./ } - - { repo: noir-lang/noir_base64, path: ./ } - - { repo: noir-lang/noir_string_search, path: ./ } - - { repo: noir-lang/sparse_array, path: ./ } - - { repo: noir-lang/noir_rsa, path: ./lib } - - { repo: AztecProtocol/aztec-packages, path: ./noir-projects/aztec-nr } - - { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-contracts } - - { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/parity-lib } - - { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/private-kernel-lib } - - { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/reset-kernel-lib } - - { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/rollup-lib } - - { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/types } - - name: Check external repo - ${{ matrix.project.repo }} + project: ${{ fromJson( needs.critical-library-list.outputs.libraries )}} + include: + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/aztec-nr } + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-contracts } + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/parity-lib } + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/private-kernel-lib } + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/reset-kernel-lib } + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/rollup-lib } + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/types } + + name: Check external repo - ${{ matrix.project.repo }}/${{ matrix.project.path }} steps: - name: Checkout uses: actions/checkout@v4 @@ -582,7 +591,7 @@ jobs: - name: Run nargo test working-directory: ./test-repo/${{ matrix.project.path }} - run: nargo test --silence-warnings + run: nargo test -q --silence-warnings env: NARGO_IGNORE_TEST_FAILURES_FROM_FOREIGN_CALLS: true @@ -590,7 +599,7 @@ jobs: # This allows us to add/remove test jobs without having to update the required workflows. tests-end: name: End - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 # We want this job to always run (even if the dependant jobs fail) as we want this job to fail rather than skipping. if: ${{ always() }} needs: diff --git a/noir/noir-repo/.github/workflows/test-rust-workspace-msrv.yml b/noir/noir-repo/.github/workflows/test-rust-workspace-msrv.yml index ae016169830..6fd71eb56a2 100644 --- a/noir/noir-repo/.github/workflows/test-rust-workspace-msrv.yml +++ b/noir/noir-repo/.github/workflows/test-rust-workspace-msrv.yml @@ -21,7 +21,7 @@ concurrency: jobs: build-test-artifacts: name: Build test artifacts - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 timeout-minutes: 30 steps: @@ -62,7 +62,7 @@ jobs: run-tests: name: "Run tests (partition ${{matrix.partition}})" - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 needs: [build-test-artifacts] strategy: fail-fast: false @@ -87,6 +87,7 @@ jobs: name: nextest-archive - name: Run tests run: | + RUST_MIN_STACK=8388608 \ cargo nextest run --archive-file nextest-archive.tar.zst \ --partition count:${{ matrix.partition }}/4 \ --no-fail-fast @@ -95,7 +96,7 @@ jobs: # This allows us to add/remove test jobs without having to update the required workflows. tests-end: name: Rust End - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 # We want this job to always run (even if the dependant jobs fail) as we want this job to fail rather than skipping. if: ${{ always() }} needs: diff --git a/noir/noir-repo/.github/workflows/test-rust-workspace.yml b/noir/noir-repo/.github/workflows/test-rust-workspace.yml index 1f3ee5e2268..1514270ff56 100644 --- a/noir/noir-repo/.github/workflows/test-rust-workspace.yml +++ b/noir/noir-repo/.github/workflows/test-rust-workspace.yml @@ -15,7 +15,7 @@ concurrency: jobs: build-test-artifacts: name: Build test artifacts - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 timeout-minutes: 30 steps: @@ -49,7 +49,7 @@ jobs: run-tests: name: "Run tests (partition ${{matrix.partition}})" - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 needs: [build-test-artifacts] strategy: fail-fast: false @@ -74,6 +74,7 @@ jobs: name: nextest-archive - name: Run tests run: | + RUST_MIN_STACK=8388608 \ cargo nextest run --archive-file nextest-archive.tar.zst \ --partition count:${{ matrix.partition }}/4 \ --no-fail-fast @@ -82,7 +83,7 @@ jobs: # This allows us to add/remove test jobs without having to update the required workflows. tests-end: name: Rust End - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 # We want this job to always run (even if the dependant jobs fail) as we want this job to fail rather than skipping. if: ${{ always() }} needs: diff --git a/noir/noir-repo/.gitignore b/noir/noir-repo/.gitignore index f1f0ea47bcf..8442d688fbf 100644 --- a/noir/noir-repo/.gitignore +++ b/noir/noir-repo/.gitignore @@ -35,6 +35,7 @@ tooling/noir_js/lib gates_report.json gates_report_brillig.json gates_report_brillig_execution.json +compilation_report.json # Github Actions scratch space # This gives a location to download artifacts into the repository in CI without making git dirty. diff --git a/noir/noir-repo/CRITICAL_NOIR_LIBRARIES b/noir/noir-repo/CRITICAL_NOIR_LIBRARIES new file mode 100644 index 00000000000..c753b76a4fc --- /dev/null +++ b/noir/noir-repo/CRITICAL_NOIR_LIBRARIES @@ -0,0 +1,13 @@ +https://github.com/noir-lang/ec +https://github.com/noir-lang/eddsa +https://github.com/noir-lang/mimc +https://github.com/noir-lang/schnorr +https://github.com/noir-lang/noir_sort +https://github.com/noir-lang/noir-edwards +https://github.com/noir-lang/noir-bignum +https://github.com/noir-lang/noir_bigcurve +https://github.com/noir-lang/noir_base64 +https://github.com/noir-lang/noir_string_search +https://github.com/noir-lang/sparse_array +https://github.com/noir-lang/noir_rsa +https://github.com/noir-lang/noir_json_parser diff --git a/noir/noir-repo/Cargo.lock b/noir/noir-repo/Cargo.lock index 96ceb94fcdd..4907de7ae62 100644 --- a/noir/noir-repo/Cargo.lock +++ b/noir/noir-repo/Cargo.lock @@ -48,6 +48,7 @@ dependencies = [ "ark-bn254", "bn254_blackbox_solver", "brillig_vm", + "fxhash", "indexmap 1.9.3", "num-bigint", "proptest", @@ -868,7 +869,7 @@ checksum = "fc4159b76af02757139baf42c0c971c6dc155330999fbfd8eddb29b97fb2db68" dependencies = [ "codespan-reporting", "lsp-types 0.88.0", - "url 2.5.3", + "url 2.5.4", ] [[package]] @@ -2656,7 +2657,7 @@ dependencies = [ "serde", "serde_json", "serde_repr", - "url 2.5.3", + "url 2.5.4", ] [[package]] @@ -2669,7 +2670,7 @@ dependencies = [ "serde", "serde_json", "serde_repr", - "url 2.5.3", + "url 2.5.4", ] [[package]] @@ -2882,9 +2883,11 @@ dependencies = [ "noirc_frontend", "semver", "serde", + "tempfile", + "test-case", "thiserror", "toml 0.7.8", - "url 2.5.3", + "url 2.5.4", ] [[package]] @@ -3184,6 +3187,7 @@ dependencies = [ "test-case", "thiserror", "tracing", + "tracing-test", ] [[package]] @@ -3206,7 +3210,6 @@ dependencies = [ "proptest", "proptest-derive 0.5.0", "rangemap", - "regex", "rustc-hash", "serde", "serde_json", @@ -3225,7 +3228,6 @@ dependencies = [ "acvm", "iter-extended", "jsonrpc", - "regex", "serde", "serde_json", "thiserror", @@ -5074,6 +5076,27 @@ dependencies = [ "tracing-log", ] +[[package]] +name = "tracing-test" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "557b891436fe0d5e0e363427fc7f217abf9ccd510d5136549847bdcbcd011d68" +dependencies = [ + "tracing-core", + "tracing-subscriber", + "tracing-test-macro", +] + +[[package]] +name = "tracing-test-macro" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04659ddb06c87d233c566112c1c9c5b9e98256d9af50ec3bc9c8327f873a7568" +dependencies = [ + "quote", + "syn 2.0.87", +] + [[package]] name = "tracing-web" version = "0.1.3" @@ -5178,9 +5201,9 @@ dependencies = [ [[package]] name = "url" -version = "2.5.3" +version = "2.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d157f1b96d14500ffdc1f10ba712e780825526c03d9a49b4d0324b0d9113ada" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" dependencies = [ "form_urlencoded", "idna 1.0.3", @@ -5398,7 +5421,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.48.0", ] [[package]] diff --git a/noir/noir-repo/Cargo.toml b/noir/noir-repo/Cargo.toml index 4ce0ddd999f..0acee2a040b 100644 --- a/noir/noir-repo/Cargo.toml +++ b/noir/noir-repo/Cargo.toml @@ -135,7 +135,7 @@ serde_json = "1.0" smol_str = { version = "0.1.17", features = ["serde"] } thiserror = "1.0.21" toml = "0.7.2" -url = "2.2.0" +url = "2.5.4" base64 = "0.21.2" fxhash = "0.2.1" build-data = "0.1.3" diff --git a/noir/noir-repo/acvm-repo/acvm/Cargo.toml b/noir/noir-repo/acvm-repo/acvm/Cargo.toml index e513ae4e727..ba01ac8ec16 100644 --- a/noir/noir-repo/acvm-repo/acvm/Cargo.toml +++ b/noir/noir-repo/acvm-repo/acvm/Cargo.toml @@ -17,7 +17,7 @@ workspace = true thiserror.workspace = true tracing.workspace = true serde.workspace = true - +fxhash.workspace = true acir.workspace = true brillig_vm.workspace = true acvm_blackbox_solver.workspace = true diff --git a/noir/noir-repo/acvm-repo/acvm/src/compiler/mod.rs b/noir/noir-repo/acvm-repo/acvm/src/compiler/mod.rs index 8829f77e50b..daedd77c4a0 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/compiler/mod.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/compiler/mod.rs @@ -16,6 +16,10 @@ pub use simulator::CircuitSimulator; use transformers::transform_internal; pub use transformers::{transform, MIN_EXPRESSION_WIDTH}; +/// We need multiple passes to stabilize the output. +/// The value was determined by running tests. +const MAX_OPTIMIZER_PASSES: usize = 3; + /// This module moves and decomposes acir opcodes. The transformation map allows consumers of this module to map /// metadata they had about the opcodes to the new opcode structure generated after the transformation. #[derive(Debug)] @@ -28,9 +32,9 @@ impl AcirTransformationMap { /// Builds a map from a vector of pointers to the old acir opcodes. /// The index of the vector is the new opcode index. /// The value of the vector is the old opcode index pointed. - fn new(acir_opcode_positions: Vec) -> Self { + fn new(acir_opcode_positions: &[usize]) -> Self { let mut old_indices_to_new_indices = HashMap::with_capacity(acir_opcode_positions.len()); - for (new_index, old_index) in acir_opcode_positions.into_iter().enumerate() { + for (new_index, old_index) in acir_opcode_positions.iter().copied().enumerate() { old_indices_to_new_indices.entry(old_index).or_insert_with(Vec::new).push(new_index); } AcirTransformationMap { old_indices_to_new_indices } @@ -72,17 +76,51 @@ fn transform_assert_messages( } /// Applies [`ProofSystemCompiler`][crate::ProofSystemCompiler] specific optimizations to a [`Circuit`]. +/// +/// Runs multiple passes until the output stabilizes. pub fn compile( acir: Circuit, expression_width: ExpressionWidth, ) -> (Circuit, AcirTransformationMap) { - let (acir, acir_opcode_positions) = optimize_internal(acir); + let acir_opcode_positions = (0..acir.opcodes.len()).collect::>(); + + if MAX_OPTIMIZER_PASSES == 0 { + return (acir, AcirTransformationMap::new(&acir_opcode_positions)); + } + + let mut pass = 0; + let mut prev_opcodes_hash = fxhash::hash64(&acir.opcodes); + let mut prev_acir = acir; + let mut prev_acir_opcode_positions = acir_opcode_positions; + + // For most test programs it would be enough to only loop `transform_internal`, + // but some of them don't stabilize unless we also repeat the backend agnostic optimizations. + let (mut acir, acir_opcode_positions) = loop { + let (acir, acir_opcode_positions) = + optimize_internal(prev_acir, prev_acir_opcode_positions); - let (mut acir, acir_opcode_positions) = - transform_internal(acir, expression_width, acir_opcode_positions); + // Stop if we have already done at least one transform and an extra optimization changed nothing. + if pass > 0 && prev_opcodes_hash == fxhash::hash64(&acir.opcodes) { + break (acir, acir_opcode_positions); + } + + let (acir, acir_opcode_positions) = + transform_internal(acir, expression_width, acir_opcode_positions); + + let opcodes_hash = fxhash::hash64(&acir.opcodes); + + // Stop if the output hasn't change in this loop or we went too long. + if pass == MAX_OPTIMIZER_PASSES - 1 || prev_opcodes_hash == opcodes_hash { + break (acir, acir_opcode_positions); + } - let transformation_map = AcirTransformationMap::new(acir_opcode_positions); + pass += 1; + prev_acir = acir; + prev_opcodes_hash = opcodes_hash; + prev_acir_opcode_positions = acir_opcode_positions; + }; + let transformation_map = AcirTransformationMap::new(&acir_opcode_positions); acir.assert_messages = transform_assert_messages(acir.assert_messages, &transformation_map); (acir, transformation_map) diff --git a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/merge_expressions.rs b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/merge_expressions.rs index 0a55e4ca17c..43e32101cc5 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/merge_expressions.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/merge_expressions.rs @@ -12,26 +12,38 @@ use acir::{ use crate::compiler::CircuitSimulator; -pub(crate) struct MergeExpressionsOptimizer { +pub(crate) struct MergeExpressionsOptimizer { resolved_blocks: HashMap>, + modified_gates: HashMap>, + deleted_gates: BTreeSet, } -impl MergeExpressionsOptimizer { +impl MergeExpressionsOptimizer { pub(crate) fn new() -> Self { - MergeExpressionsOptimizer { resolved_blocks: HashMap::new() } + MergeExpressionsOptimizer { + resolved_blocks: HashMap::new(), + modified_gates: HashMap::new(), + deleted_gates: BTreeSet::new(), + } } /// This pass analyzes the circuit and identifies intermediate variables that are /// only used in two gates. It then merges the gate that produces the /// intermediate variable into the second one that uses it /// Note: This pass is only relevant for backends that can handle unlimited width - pub(crate) fn eliminate_intermediate_variable( + pub(crate) fn eliminate_intermediate_variable( &mut self, circuit: &Circuit, acir_opcode_positions: Vec, ) -> (Vec>, Vec) { + // Initialization + self.modified_gates.clear(); + self.deleted_gates.clear(); + self.resolved_blocks.clear(); + // Keep track, for each witness, of the gates that use it - let circuit_inputs = circuit.circuit_arguments(); - self.resolved_blocks = HashMap::new(); + let circuit_io: BTreeSet = + circuit.circuit_arguments().union(&circuit.public_inputs().0).cloned().collect(); + let mut used_witness: BTreeMap> = BTreeMap::new(); for (i, opcode) in circuit.opcodes.iter().enumerate() { let witnesses = self.witness_inputs(opcode); @@ -39,87 +51,96 @@ impl MergeExpressionsOptimizer { self.resolved_blocks.insert(*block_id, witnesses.clone()); } for w in witnesses { - // We do not simplify circuit inputs - if !circuit_inputs.contains(&w) { + // We do not simplify circuit inputs and outputs + if !circuit_io.contains(&w) { used_witness.entry(w).or_default().insert(i); } } } - let mut modified_gates: HashMap> = HashMap::new(); - let mut new_circuit = Vec::new(); - let mut new_acir_opcode_positions = Vec::new(); // For each opcode, try to get a target opcode to merge with - for (i, (opcode, opcode_position)) in - circuit.opcodes.iter().zip(acir_opcode_positions).enumerate() - { + for (i, opcode) in circuit.opcodes.iter().enumerate() { if !matches!(opcode, Opcode::AssertZero(_)) { - new_circuit.push(opcode.clone()); - new_acir_opcode_positions.push(opcode_position); continue; } - let opcode = modified_gates.get(&i).unwrap_or(opcode).clone(); - let mut to_keep = true; - let input_witnesses = self.witness_inputs(&opcode); - for w in input_witnesses { - let Some(gates_using_w) = used_witness.get(&w) else { - continue; - }; - // We only consider witness which are used in exactly two arithmetic gates - if gates_using_w.len() == 2 { - let first = *gates_using_w.first().expect("gates_using_w.len == 2"); - let second = *gates_using_w.last().expect("gates_using_w.len == 2"); - let b = if second == i { - first - } else { - // sanity check - assert!(i == first); - second + if let Some(opcode) = self.get_opcode(i, circuit) { + let input_witnesses = self.witness_inputs(&opcode); + for w in input_witnesses { + let Some(gates_using_w) = used_witness.get(&w) else { + continue; }; - - let second_gate = modified_gates.get(&b).unwrap_or(&circuit.opcodes[b]); - if let (Opcode::AssertZero(expr_define), Opcode::AssertZero(expr_use)) = - (&opcode, second_gate) - { - // We cannot merge an expression into an earlier opcode, because this - // would break the 'execution ordering' of the opcodes - // This case can happen because a previous merge would change an opcode - // and eliminate a witness from it, giving new opportunities for this - // witness to be used in only two expressions - // TODO: the missed optimization for the i>b case can be handled by - // - doing this pass again until there is no change, or - // - merging 'b' into 'i' instead - if i < b { - if let Some(expr) = Self::merge(expr_use, expr_define, w) { - modified_gates.insert(b, Opcode::AssertZero(expr)); - to_keep = false; - // Update the 'used_witness' map to account for the merge. - for w2 in CircuitSimulator::expr_wit(expr_define) { - if !circuit_inputs.contains(&w2) { - let v = used_witness.entry(w2).or_default(); - v.insert(b); - v.remove(&i); + // We only consider witness which are used in exactly two arithmetic gates + if gates_using_w.len() == 2 { + let first = *gates_using_w.first().expect("gates_using_w.len == 2"); + let second = *gates_using_w.last().expect("gates_using_w.len == 2"); + let b = if second == i { + first + } else { + // sanity check + assert!(i == first); + second + }; + // Merge the opcode with smaller index into the other one + // by updating modified_gates/deleted_gates/used_witness + // returns false if it could not merge them + let mut merge_opcodes = |op1, op2| -> bool { + if op1 == op2 { + return false; + } + let (source, target) = if op1 < op2 { (op1, op2) } else { (op2, op1) }; + let source_opcode = self.get_opcode(source, circuit); + let target_opcode = self.get_opcode(target, circuit); + if let ( + Some(Opcode::AssertZero(expr_use)), + Some(Opcode::AssertZero(expr_define)), + ) = (target_opcode, source_opcode) + { + if let Some(expr) = + Self::merge_expression(&expr_use, &expr_define, w) + { + self.modified_gates.insert(target, Opcode::AssertZero(expr)); + self.deleted_gates.insert(source); + // Update the 'used_witness' map to account for the merge. + let mut witness_list = CircuitSimulator::expr_wit(&expr_use); + witness_list.extend(CircuitSimulator::expr_wit(&expr_define)); + for w2 in witness_list { + if !circuit_io.contains(&w2) { + used_witness.entry(w2).and_modify(|v| { + v.insert(target); + v.remove(&source); + }); + } } + return true; } - // We need to stop here and continue with the next opcode - // because the merge invalidates the current opcode. - break; } + false + }; + + if merge_opcodes(b, i) { + // We need to stop here and continue with the next opcode + // because the merge invalidates the current opcode. + break; } } } } + } + + // Construct the new circuit from modified/deleted gates + let mut new_circuit = Vec::new(); + let mut new_acir_opcode_positions = Vec::new(); - if to_keep { - let opcode = modified_gates.get(&i).cloned().unwrap_or(opcode); - new_circuit.push(opcode); - new_acir_opcode_positions.push(opcode_position); + for (i, opcode_position) in acir_opcode_positions.iter().enumerate() { + if let Some(op) = self.get_opcode(i, circuit) { + new_circuit.push(op); + new_acir_opcode_positions.push(*opcode_position); } } (new_circuit, new_acir_opcode_positions) } - fn brillig_input_wit(&self, input: &BrilligInputs) -> BTreeSet { + fn brillig_input_wit(&self, input: &BrilligInputs) -> BTreeSet { let mut result = BTreeSet::new(); match input { BrilligInputs::Single(expr) => { @@ -152,7 +173,7 @@ impl MergeExpressionsOptimizer { } // Returns the input witnesses used by the opcode - fn witness_inputs(&self, opcode: &Opcode) -> BTreeSet { + fn witness_inputs(&self, opcode: &Opcode) -> BTreeSet { match opcode { Opcode::AssertZero(expr) => CircuitSimulator::expr_wit(expr), Opcode::BlackBoxFuncCall(bb_func) => { @@ -198,7 +219,7 @@ impl MergeExpressionsOptimizer { // Merge 'expr' into 'target' via Gaussian elimination on 'w' // Returns None if the expressions cannot be merged - fn merge( + fn merge_expression( target: &Expression, expr: &Expression, w: Witness, @@ -226,6 +247,13 @@ impl MergeExpressionsOptimizer { } None } + + fn get_opcode(&self, g: usize, circuit: &Circuit) -> Option> { + if self.deleted_gates.contains(&g) { + return None; + } + self.modified_gates.get(&g).or(circuit.opcodes.get(g)).cloned() + } } #[cfg(test)] @@ -300,6 +328,50 @@ mod tests { check_circuit(circuit); } + #[test] + fn does_not_eliminate_witnesses_returned_from_circuit() { + let opcodes = vec![ + Opcode::AssertZero(Expression { + mul_terms: vec![(FieldElement::from(-1i128), Witness(0), Witness(0))], + linear_combinations: vec![(FieldElement::from(1i128), Witness(1))], + q_c: FieldElement::zero(), + }), + Opcode::AssertZero(Expression { + mul_terms: Vec::new(), + linear_combinations: vec![ + (FieldElement::from(-1i128), Witness(1)), + (FieldElement::from(1i128), Witness(2)), + ], + q_c: FieldElement::zero(), + }), + ]; + // Witness(1) could be eliminated because it's only used by 2 opcodes. + + let mut private_parameters = BTreeSet::new(); + private_parameters.insert(Witness(0)); + + let mut return_values = BTreeSet::new(); + return_values.insert(Witness(1)); + return_values.insert(Witness(2)); + + let circuit = Circuit { + current_witness_index: 2, + expression_width: ExpressionWidth::Bounded { width: 4 }, + opcodes, + private_parameters, + public_parameters: PublicInputs::default(), + return_values: PublicInputs(return_values), + assert_messages: Default::default(), + }; + + let mut merge_optimizer = MergeExpressionsOptimizer::new(); + let acir_opcode_positions = vec![0; 20]; + let (opcodes, _) = + merge_optimizer.eliminate_intermediate_variable(&circuit, acir_opcode_positions); + + assert_eq!(opcodes.len(), 2); + } + #[test] fn does_not_attempt_to_merge_into_previous_opcodes() { let opcodes = vec![ diff --git a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/mod.rs b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/mod.rs index 1947a80dc35..3531825c709 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/mod.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/mod.rs @@ -21,9 +21,13 @@ use super::{transform_assert_messages, AcirTransformationMap}; /// Applies [`ProofSystemCompiler`][crate::ProofSystemCompiler] independent optimizations to a [`Circuit`]. pub fn optimize(acir: Circuit) -> (Circuit, AcirTransformationMap) { - let (mut acir, new_opcode_positions) = optimize_internal(acir); + // Track original acir opcode positions throughout the transformation passes of the compilation + // by applying the modifications done to the circuit opcodes and also to the opcode_positions (delete and insert) + let acir_opcode_positions = (0..acir.opcodes.len()).collect(); + + let (mut acir, new_opcode_positions) = optimize_internal(acir, acir_opcode_positions); - let transformation_map = AcirTransformationMap::new(new_opcode_positions); + let transformation_map = AcirTransformationMap::new(&new_opcode_positions); acir.assert_messages = transform_assert_messages(acir.assert_messages, &transformation_map); @@ -31,12 +35,13 @@ pub fn optimize(acir: Circuit) -> (Circuit, AcirTransformati } /// Applies [`ProofSystemCompiler`][crate::ProofSystemCompiler] independent optimizations to a [`Circuit`]. -#[tracing::instrument(level = "trace", name = "optimize_acir" skip(acir))] -pub(super) fn optimize_internal(acir: Circuit) -> (Circuit, Vec) { - // Track original acir opcode positions throughout the transformation passes of the compilation - // by applying the modifications done to the circuit opcodes and also to the opcode_positions (delete and insert) - let acir_opcode_positions = (0..acir.opcodes.len()).collect(); - +/// +/// Accepts an injected `acir_opcode_positions` to allow optimizations to be applied in a loop. +#[tracing::instrument(level = "trace", name = "optimize_acir" skip(acir, acir_opcode_positions))] +pub(super) fn optimize_internal( + acir: Circuit, + acir_opcode_positions: Vec, +) -> (Circuit, Vec) { if acir.opcodes.len() == 1 && matches!(acir.opcodes[0], Opcode::BrilligCall { .. }) { info!("Program is fully unconstrained, skipping optimization pass"); return (acir, acir_opcode_positions); diff --git a/noir/noir-repo/acvm-repo/acvm/src/compiler/transformers/mod.rs b/noir/noir-repo/acvm-repo/acvm/src/compiler/transformers/mod.rs index c9ce4ac7895..a499aec1b30 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/compiler/transformers/mod.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/compiler/transformers/mod.rs @@ -1,5 +1,10 @@ use acir::{ - circuit::{brillig::BrilligOutputs, Circuit, ExpressionWidth, Opcode}, + circuit::{ + self, + brillig::{BrilligInputs, BrilligOutputs}, + opcodes::{BlackBoxFuncCall, FunctionInput, MemOp}, + Circuit, ExpressionWidth, Opcode, + }, native_types::{Expression, Witness}, AcirField, }; @@ -12,6 +17,7 @@ pub use csat::MIN_EXPRESSION_WIDTH; use super::{ optimizers::MergeExpressionsOptimizer, transform_assert_messages, AcirTransformationMap, + MAX_OPTIMIZER_PASSES, }; /// Applies [`ProofSystemCompiler`][crate::ProofSystemCompiler] specific optimizations to a [`Circuit`]. @@ -26,7 +32,7 @@ pub fn transform( let (mut acir, acir_opcode_positions) = transform_internal(acir, expression_width, acir_opcode_positions); - let transformation_map = AcirTransformationMap::new(acir_opcode_positions); + let transformation_map = AcirTransformationMap::new(&acir_opcode_positions); acir.assert_messages = transform_assert_messages(acir.assert_messages, &transformation_map); @@ -36,9 +42,52 @@ pub fn transform( /// Applies [`ProofSystemCompiler`][crate::ProofSystemCompiler] specific optimizations to a [`Circuit`]. /// /// Accepts an injected `acir_opcode_positions` to allow transformations to be applied directly after optimizations. +/// +/// Does multiple passes until the output stabilizes. #[tracing::instrument(level = "trace", name = "transform_acir", skip(acir, acir_opcode_positions))] pub(super) fn transform_internal( - acir: Circuit, + mut acir: Circuit, + expression_width: ExpressionWidth, + mut acir_opcode_positions: Vec, +) -> (Circuit, Vec) { + // Allow multiple passes until we have stable output. + let mut prev_opcodes_hash = fxhash::hash64(&acir.opcodes); + + // For most test programs it would be enough to loop here, but some of them + // don't stabilize unless we also repeat the backend agnostic optimizations. + for _ in 0..MAX_OPTIMIZER_PASSES { + let (new_acir, new_acir_opcode_positions) = + transform_internal_once(acir, expression_width, acir_opcode_positions); + + acir = new_acir; + acir_opcode_positions = new_acir_opcode_positions; + + let new_opcodes_hash = fxhash::hash64(&acir.opcodes); + + if new_opcodes_hash == prev_opcodes_hash { + break; + } + prev_opcodes_hash = new_opcodes_hash; + } + // After the elimination of intermediate variables the `current_witness_index` is potentially higher than it needs to be, + // which would cause gaps if we ran the optimization a second time, making it look like new variables were added. + acir.current_witness_index = max_witness(&acir).witness_index(); + + (acir, acir_opcode_positions) +} + +/// Applies [`ProofSystemCompiler`][crate::ProofSystemCompiler] specific optimizations to a [`Circuit`]. +/// +/// Accepts an injected `acir_opcode_positions` to allow transformations to be applied directly after optimizations. +/// +/// Does a single optimization pass. +#[tracing::instrument( + level = "trace", + name = "transform_acir_once", + skip(acir, acir_opcode_positions) +)] +fn transform_internal_once( + mut acir: Circuit, expression_width: ExpressionWidth, acir_opcode_positions: Vec, ) -> (Circuit, Vec) { @@ -79,8 +128,6 @@ pub(super) fn transform_internal( &mut next_witness_index, ); - // Update next_witness counter - next_witness_index += (intermediate_variables.len() - len) as u32; let mut new_opcodes = Vec::new(); for (g, (norm, w)) in intermediate_variables.iter().skip(len) { // de-normalize @@ -150,23 +197,275 @@ pub(super) fn transform_internal( let current_witness_index = next_witness_index - 1; - let acir = Circuit { + acir = Circuit { current_witness_index, expression_width, opcodes: transformed_opcodes, // The transformer does not add new public inputs ..acir }; + let mut merge_optimizer = MergeExpressionsOptimizer::new(); + let (opcodes, new_acir_opcode_positions) = merge_optimizer.eliminate_intermediate_variable(&acir, new_acir_opcode_positions); - // n.b. we do not update current_witness_index after the eliminate_intermediate_variable pass, the real index could be less. - let acir = Circuit { - current_witness_index, - expression_width, + + // n.b. if we do not update current_witness_index after the eliminate_intermediate_variable pass, the real index could be less. + acir = Circuit { opcodes, // The optimizer does not add new public inputs ..acir }; + (acir, new_acir_opcode_positions) } + +/// Find the witness with the highest ID in the circuit. +fn max_witness(circuit: &Circuit) -> Witness { + let mut witnesses = WitnessFolder::new(Witness::default(), |state, witness| { + *state = witness.max(*state); + }); + witnesses.fold_circuit(circuit); + witnesses.into_state() +} + +/// Fold all witnesses in a circuit. +struct WitnessFolder { + state: S, + accumulate: A, +} + +impl WitnessFolder +where + A: Fn(&mut S, Witness), +{ + /// Create the folder with some initial state and an accumulator function. + fn new(init: S, accumulate: A) -> Self { + Self { state: init, accumulate } + } + + /// Take the accumulated state. + fn into_state(self) -> S { + self.state + } + + /// Add all witnesses from the circuit. + fn fold_circuit(&mut self, circuit: &Circuit) { + self.fold_many(circuit.private_parameters.iter()); + self.fold_many(circuit.public_parameters.0.iter()); + self.fold_many(circuit.return_values.0.iter()); + for opcode in &circuit.opcodes { + self.fold_opcode(opcode); + } + } + + /// Fold a witness into the state. + fn fold(&mut self, witness: Witness) { + (self.accumulate)(&mut self.state, witness); + } + + /// Fold many witnesses into the state. + fn fold_many<'w, I: Iterator>(&mut self, witnesses: I) { + for w in witnesses { + self.fold(*w); + } + } + + /// Add witnesses from the opcode. + fn fold_opcode(&mut self, opcode: &Opcode) { + match opcode { + Opcode::AssertZero(expr) => { + self.fold_expr(expr); + } + Opcode::BlackBoxFuncCall(call) => self.fold_blackbox(call), + Opcode::MemoryOp { block_id: _, op, predicate } => { + let MemOp { operation, index, value } = op; + self.fold_expr(operation); + self.fold_expr(index); + self.fold_expr(value); + if let Some(pred) = predicate { + self.fold_expr(pred); + } + } + Opcode::MemoryInit { block_id: _, init, block_type: _ } => { + for w in init { + self.fold(*w); + } + } + // We keep the display for a BrilligCall and circuit Call separate as they + // are distinct in their functionality and we should maintain this separation for debugging. + Opcode::BrilligCall { id: _, inputs, outputs, predicate } => { + if let Some(pred) = predicate { + self.fold_expr(pred); + } + self.fold_brillig_inputs(inputs); + self.fold_brillig_outputs(outputs); + } + Opcode::Call { id: _, inputs, outputs, predicate } => { + if let Some(pred) = predicate { + self.fold_expr(pred); + } + self.fold_many(inputs.iter()); + self.fold_many(outputs.iter()); + } + } + } + + fn fold_expr(&mut self, expr: &Expression) { + for i in &expr.mul_terms { + self.fold(i.1); + self.fold(i.2); + } + for i in &expr.linear_combinations { + self.fold(i.1); + } + } + + fn fold_brillig_inputs(&mut self, inputs: &[BrilligInputs]) { + for input in inputs { + match input { + BrilligInputs::Single(expr) => { + self.fold_expr(expr); + } + BrilligInputs::Array(exprs) => { + for expr in exprs { + self.fold_expr(expr); + } + } + BrilligInputs::MemoryArray(_) => {} + } + } + } + + fn fold_brillig_outputs(&mut self, outputs: &[BrilligOutputs]) { + for output in outputs { + match output { + BrilligOutputs::Simple(w) => { + self.fold(*w); + } + BrilligOutputs::Array(ws) => self.fold_many(ws.iter()), + } + } + } + + fn fold_blackbox(&mut self, call: &BlackBoxFuncCall) { + match call { + BlackBoxFuncCall::AES128Encrypt { inputs, iv, key, outputs } => { + self.fold_function_inputs(inputs.as_slice()); + self.fold_function_inputs(iv.as_slice()); + self.fold_function_inputs(key.as_slice()); + self.fold_many(outputs.iter()); + } + BlackBoxFuncCall::AND { lhs, rhs, output } => { + self.fold_function_input(lhs); + self.fold_function_input(rhs); + self.fold(*output); + } + BlackBoxFuncCall::XOR { lhs, rhs, output } => { + self.fold_function_input(lhs); + self.fold_function_input(rhs); + self.fold(*output); + } + BlackBoxFuncCall::RANGE { input } => { + self.fold_function_input(input); + } + BlackBoxFuncCall::Blake2s { inputs, outputs } => { + self.fold_function_inputs(inputs.as_slice()); + self.fold_many(outputs.iter()); + } + BlackBoxFuncCall::Blake3 { inputs, outputs } => { + self.fold_function_inputs(inputs.as_slice()); + self.fold_many(outputs.iter()); + } + BlackBoxFuncCall::EcdsaSecp256k1 { + public_key_x, + public_key_y, + signature, + hashed_message, + output, + } => { + self.fold_function_inputs(public_key_x.as_slice()); + self.fold_function_inputs(public_key_y.as_slice()); + self.fold_function_inputs(signature.as_slice()); + self.fold_function_inputs(hashed_message.as_slice()); + self.fold(*output); + } + BlackBoxFuncCall::EcdsaSecp256r1 { + public_key_x, + public_key_y, + signature, + hashed_message, + output, + } => { + self.fold_function_inputs(public_key_x.as_slice()); + self.fold_function_inputs(public_key_y.as_slice()); + self.fold_function_inputs(signature.as_slice()); + self.fold_function_inputs(hashed_message.as_slice()); + self.fold(*output); + } + BlackBoxFuncCall::MultiScalarMul { points, scalars, outputs } => { + self.fold_function_inputs(points.as_slice()); + self.fold_function_inputs(scalars.as_slice()); + let (x, y, i) = outputs; + self.fold(*x); + self.fold(*y); + self.fold(*i); + } + BlackBoxFuncCall::EmbeddedCurveAdd { input1, input2, outputs } => { + self.fold_function_inputs(input1.as_slice()); + self.fold_function_inputs(input2.as_slice()); + let (x, y, i) = outputs; + self.fold(*x); + self.fold(*y); + self.fold(*i); + } + BlackBoxFuncCall::Keccakf1600 { inputs, outputs } => { + self.fold_function_inputs(inputs.as_slice()); + self.fold_many(outputs.iter()); + } + BlackBoxFuncCall::RecursiveAggregation { + verification_key, + proof, + public_inputs, + key_hash, + proof_type: _, + } => { + self.fold_function_inputs(verification_key.as_slice()); + self.fold_function_inputs(proof.as_slice()); + self.fold_function_inputs(public_inputs.as_slice()); + self.fold_function_input(key_hash); + } + BlackBoxFuncCall::BigIntAdd { .. } + | BlackBoxFuncCall::BigIntSub { .. } + | BlackBoxFuncCall::BigIntMul { .. } + | BlackBoxFuncCall::BigIntDiv { .. } => {} + BlackBoxFuncCall::BigIntFromLeBytes { inputs, modulus: _, output: _ } => { + self.fold_function_inputs(inputs.as_slice()); + } + BlackBoxFuncCall::BigIntToLeBytes { input: _, outputs } => { + self.fold_many(outputs.iter()); + } + BlackBoxFuncCall::Poseidon2Permutation { inputs, outputs, len: _ } => { + self.fold_function_inputs(inputs.as_slice()); + self.fold_many(outputs.iter()); + } + BlackBoxFuncCall::Sha256Compression { inputs, hash_values, outputs } => { + self.fold_function_inputs(inputs.as_slice()); + self.fold_function_inputs(hash_values.as_slice()); + self.fold_many(outputs.iter()); + } + } + } + + fn fold_function_input(&mut self, input: &FunctionInput) { + if let circuit::opcodes::ConstantOrWitnessEnum::Witness(witness) = input.input() { + self.fold(witness); + } + } + + fn fold_function_inputs(&mut self, inputs: &[FunctionInput]) { + for input in inputs { + self.fold_function_input(input); + } + } +} diff --git a/noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs b/noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs index 20c12a72fc0..f9188cca700 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/pwg/mod.rs @@ -359,7 +359,6 @@ impl<'a, F: AcirField, B: BlackBoxFunctionSolver> ACVM<'a, F, B> { pub fn solve_opcode(&mut self) -> ACVMStatus { let opcode = &self.opcodes[self.instruction_pointer]; - let resolution = match opcode { Opcode::AssertZero(expr) => ExpressionSolver::solve(&mut self.witness_map, expr), Opcode::BlackBoxFuncCall(bb_func) => blackbox::solve( diff --git a/noir/noir-repo/acvm-repo/acvm_js/build.sh b/noir/noir-repo/acvm-repo/acvm_js/build.sh index c07d2d8a4c1..16fb26e55db 100755 --- a/noir/noir-repo/acvm-repo/acvm_js/build.sh +++ b/noir/noir-repo/acvm-repo/acvm_js/build.sh @@ -25,7 +25,7 @@ function run_if_available { require_command jq require_command cargo require_command wasm-bindgen -#require_command wasm-opt +require_command wasm-opt self_path=$(dirname "$(readlink -f "$0")") pname=$(cargo read-manifest | jq -r '.name') diff --git a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/benches/criterion.rs b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/benches/criterion.rs index 8bf239eec8a..fc566b70a26 100644 --- a/noir/noir-repo/acvm-repo/bn254_blackbox_solver/benches/criterion.rs +++ b/noir/noir-repo/acvm-repo/bn254_blackbox_solver/benches/criterion.rs @@ -2,8 +2,7 @@ use criterion::{criterion_group, criterion_main, Criterion}; use std::{hint::black_box, time::Duration}; use acir::{AcirField, FieldElement}; -use acvm_blackbox_solver::BlackBoxFunctionSolver; -use bn254_blackbox_solver::{poseidon2_permutation, Bn254BlackBoxSolver}; +use bn254_blackbox_solver::poseidon2_permutation; use pprof::criterion::{Output, PProfProfiler}; diff --git a/noir/noir-repo/compiler/fm/src/file_map.rs b/noir/noir-repo/compiler/fm/src/file_map.rs index ba552fe5156..857c7460fb9 100644 --- a/noir/noir-repo/compiler/fm/src/file_map.rs +++ b/noir/noir-repo/compiler/fm/src/file_map.rs @@ -80,6 +80,19 @@ impl FileMap { pub fn all_file_ids(&self) -> impl Iterator { self.name_to_id.values() } + + pub fn get_name(&self, file_id: FileId) -> Result { + let name = self.files.get(file_id.as_usize())?.name().clone(); + + // See if we can make the file name a bit shorter/easier to read if it starts with the current directory + if let Some(current_dir) = &self.current_dir { + if let Ok(name_without_prefix) = name.0.strip_prefix(current_dir) { + return Ok(PathString::from_path(name_without_prefix.to_path_buf())); + } + } + + Ok(name) + } } impl Default for FileMap { fn default() -> Self { @@ -97,16 +110,7 @@ impl<'a> Files<'a> for FileMap { type Source = &'a str; fn name(&self, file_id: Self::FileId) -> Result { - let name = self.files.get(file_id.as_usize())?.name().clone(); - - // See if we can make the file name a bit shorter/easier to read if it starts with the current directory - if let Some(current_dir) = &self.current_dir { - if let Ok(name_without_prefix) = name.0.strip_prefix(current_dir) { - return Ok(PathString::from_path(name_without_prefix.to_path_buf())); - } - } - - Ok(name) + self.get_name(file_id) } fn source(&'a self, file_id: Self::FileId) -> Result { diff --git a/noir/noir-repo/compiler/integration-tests/package.json b/noir/noir-repo/compiler/integration-tests/package.json index e33179f31e7..bfaa1cabd16 100644 --- a/noir/noir-repo/compiler/integration-tests/package.json +++ b/noir/noir-repo/compiler/integration-tests/package.json @@ -13,7 +13,7 @@ "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" }, "dependencies": { - "@aztec/bb.js": "portal:../../../../barretenberg/ts", + "@aztec/bb.js": "0.66.0", "@noir-lang/noir_js": "workspace:*", "@noir-lang/noir_wasm": "workspace:*", "@nomicfoundation/hardhat-chai-matchers": "^2.0.0", diff --git a/noir/noir-repo/compiler/noirc_driver/src/lib.rs b/noir/noir-repo/compiler/noirc_driver/src/lib.rs index 5bedefaf563..9318e4d2b5c 100644 --- a/noir/noir-repo/compiler/noirc_driver/src/lib.rs +++ b/noir/noir-repo/compiler/noirc_driver/src/lib.rs @@ -10,7 +10,7 @@ use clap::Args; use fm::{FileId, FileManager}; use iter_extended::vecmap; use noirc_abi::{AbiParameter, AbiType, AbiValue}; -use noirc_errors::{CustomDiagnostic, FileDiagnostic}; +use noirc_errors::{CustomDiagnostic, DiagnosticKind, FileDiagnostic}; use noirc_evaluator::create_program; use noirc_evaluator::errors::RuntimeError; use noirc_evaluator::ssa::{SsaLogging, SsaProgramArtifact}; @@ -131,6 +131,12 @@ pub struct CompileOptions { #[arg(long)] pub skip_underconstrained_check: bool, + /// Flag to turn off the compiler check for missing Brillig call constrains. + /// Warning: This can improve compilation speed but can also lead to correctness errors. + /// This check should always be run on production code. + #[arg(long)] + pub skip_brillig_constraints_check: bool, + /// Setting to decide on an inlining strategy for Brillig functions. /// A more aggressive inliner should generate larger programs but more optimized /// A less aggressive inliner should generate smaller programs @@ -301,7 +307,6 @@ pub fn check_crate( crate_id: CrateId, options: &CompileOptions, ) -> CompilationResult<()> { - let mut errors = vec![]; let error_on_unused_imports = true; let diagnostics = CrateDefMap::collect_defs( crate_id, @@ -309,15 +314,22 @@ pub fn check_crate( options.debug_comptime_in_file.as_deref(), error_on_unused_imports, ); - errors.extend(diagnostics.into_iter().map(|(error, file_id)| { - let diagnostic = CustomDiagnostic::from(&error); - diagnostic.in_file(file_id) - })); + let warnings_and_errors: Vec = diagnostics + .into_iter() + .map(|(error, file_id)| { + let diagnostic = CustomDiagnostic::from(&error); + diagnostic.in_file(file_id) + }) + .filter(|diagnostic| { + // We filter out any warnings if they're going to be ignored later on to free up memory. + !options.silence_warnings || diagnostic.diagnostic.kind != DiagnosticKind::Warning + }) + .collect(); - if has_errors(&errors, options.deny_warnings) { - Err(errors) + if has_errors(&warnings_and_errors, options.deny_warnings) { + Err(warnings_and_errors) } else { - Ok(((), errors)) + Ok(((), warnings_and_errors)) } } @@ -625,6 +637,7 @@ pub fn compile_no_check( }, emit_ssa: if options.emit_ssa { Some(context.package_build_path.clone()) } else { None }, skip_underconstrained_check: options.skip_underconstrained_check, + skip_brillig_constraints_check: options.skip_brillig_constraints_check, inliner_aggressiveness: options.inliner_aggressiveness, max_bytecode_increase_percent: options.max_bytecode_increase_percent, }; diff --git a/noir/noir-repo/compiler/noirc_errors/src/reporter.rs b/noir/noir-repo/compiler/noirc_errors/src/reporter.rs index f029b4e6de8..e57775d9a7f 100644 --- a/noir/noir-repo/compiler/noirc_errors/src/reporter.rs +++ b/noir/noir-repo/compiler/noirc_errors/src/reporter.rs @@ -272,7 +272,7 @@ fn convert_diagnostic( diagnostic.with_message(&cd.message).with_labels(secondary_labels).with_notes(notes) } -fn stack_trace<'files>( +pub fn stack_trace<'files>( files: &'files impl Files<'files, FileId = fm::FileId>, call_stack: &[Location], ) -> String { diff --git a/noir/noir-repo/compiler/noirc_evaluator/Cargo.toml b/noir/noir-repo/compiler/noirc_evaluator/Cargo.toml index bb8c62cfd95..72fba8aadc2 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/Cargo.toml +++ b/noir/noir-repo/compiler/noirc_evaluator/Cargo.toml @@ -32,6 +32,7 @@ cfg-if.workspace = true [dev-dependencies] proptest.workspace = true similar-asserts.workspace = true +tracing-test = "0.2.5" num-traits.workspace = true test-case.workspace = true diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/acir/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/acir/mod.rs index 76f0dea95bb..769d0d80cc4 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/acir/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/acir/mod.rs @@ -31,6 +31,7 @@ use crate::brillig::{ Brillig, }; use crate::errors::{InternalError, InternalWarning, RuntimeError, SsaReport}; +use crate::ssa::ir::instruction::Hint; use crate::ssa::{ function_builder::data_bus::DataBus, ir::{ @@ -821,14 +822,12 @@ impl<'a> Context<'a> { }) .sum(); - let Some(acir_function_id) = - ssa.entry_point_to_generated_index.get(id) - else { + let Some(acir_function_id) = ssa.get_entry_point_index(id) else { unreachable!("Expected an associated final index for call to acir function {id} with args {arguments:?}"); }; let output_vars = self.acir_context.call_acir_function( - AcirFunctionId(*acir_function_id), + AcirFunctionId(acir_function_id), inputs, output_count, self.current_side_effects_enabled_var, @@ -1873,14 +1872,15 @@ impl<'a> Context<'a> { let acir_value = match value { Value::NumericConstant { constant, typ } => { - AcirValue::Var(self.acir_context.add_constant(*constant), typ.into()) + let typ = AcirType::from(Type::Numeric(*typ)); + AcirValue::Var(self.acir_context.add_constant(*constant), typ) } Value::Intrinsic(..) => todo!(), Value::Function(function_id) => { // This conversion is for debugging support only, to allow the // debugging instrumentation code to work. Taking the reference // of a function in ACIR is useless. - let id = self.acir_context.add_constant(function_id.to_usize()); + let id = self.acir_context.add_constant(function_id.to_u32()); AcirValue::Var(id, AcirType::field()) } Value::ForeignFunction(_) => unimplemented!( @@ -2133,6 +2133,15 @@ impl<'a> Context<'a> { result_ids: &[ValueId], ) -> Result, RuntimeError> { match intrinsic { + Intrinsic::Hint(Hint::BlackBox) => { + // Identity function; at the ACIR level this is a no-op, it only affects the SSA. + assert_eq!( + arguments.len(), + result_ids.len(), + "ICE: BlackBox input and output lengths should match." + ); + Ok(arguments.iter().map(|v| self.convert_value(*v, dfg)).collect()) + } Intrinsic::BlackBox(black_box) => { // Slices are represented as a tuple of (length, slice contents). // We must check the inputs to determine if there are slices @@ -2884,7 +2893,6 @@ mod test { }, FieldElement, }; - use im::vector; use noirc_errors::Location; use noirc_frontend::monomorphization::ast::InlineType; use std::collections::BTreeMap; @@ -2894,7 +2902,13 @@ mod test { brillig::Brillig, ssa::{ function_builder::FunctionBuilder, - ir::{function::FunctionId, instruction::BinaryOp, map::Id, types::Type}, + ir::{ + dfg::CallStack, + function::FunctionId, + instruction::BinaryOp, + map::Id, + types::{NumericType, Type}, + }, }, }; @@ -2916,13 +2930,15 @@ mod test { builder.new_function("foo".into(), foo_id, inline_type); } // Set a call stack for testing whether `brillig_locations` in the `GeneratedAcir` was accurately set. - builder.set_call_stack(vector![Location::dummy(), Location::dummy()]); + let mut stack = CallStack::unit(Location::dummy()); + stack.push_back(Location::dummy()); + builder.set_call_stack(stack); let foo_v0 = builder.add_parameter(Type::field()); let foo_v1 = builder.add_parameter(Type::field()); let foo_equality_check = builder.insert_binary(foo_v0, BinaryOp::Eq, foo_v1); - let zero = builder.numeric_constant(0u128, Type::unsigned(1)); + let zero = builder.numeric_constant(0u128, NumericType::unsigned(1)); builder.insert_constrain(foo_equality_check, zero, None); builder.terminate_with_return(vec![foo_v0]); } @@ -2979,7 +2995,7 @@ mod test { build_basic_foo_with_return(&mut builder, foo_id, false, inline_type); - let ssa = builder.finish(); + let ssa = builder.finish().generate_entry_point_index(); let (acir_functions, _, _, _) = ssa .into_acir(&Brillig::default(), ExpressionWidth::default()) @@ -3087,6 +3103,7 @@ mod test { let ssa = builder.finish(); let (acir_functions, _, _, _) = ssa + .generate_entry_point_index() .into_acir(&Brillig::default(), ExpressionWidth::default()) .expect("Should compile manually written SSA into ACIR"); // The expected result should look very similar to the above test expect that the input witnesses of the `Call` @@ -3184,7 +3201,7 @@ mod test { build_basic_foo_with_return(&mut builder, foo_id, false, inline_type); - let ssa = builder.finish(); + let ssa = builder.finish().generate_entry_point_index(); let (acir_functions, _, _, _) = ssa .into_acir(&Brillig::default(), ExpressionWidth::default()) @@ -3311,6 +3328,7 @@ mod test { let brillig = ssa.to_brillig(false); let (acir_functions, brillig_functions, _, _) = ssa + .generate_entry_point_index() .into_acir(&brillig, ExpressionWidth::default()) .expect("Should compile manually written SSA into ACIR"); @@ -3364,7 +3382,7 @@ mod test { // Call the same primitive operation again let v1_div_v2 = builder.insert_binary(main_v1, BinaryOp::Div, main_v2); - let one = builder.numeric_constant(1u128, Type::unsigned(32)); + let one = builder.numeric_constant(1u128, NumericType::unsigned(32)); builder.insert_constrain(v1_div_v2, one, None); builder.terminate_with_return(vec![]); @@ -3375,6 +3393,7 @@ mod test { // The Brillig bytecode we insert for the stdlib is hardcoded so we do not need to provide any // Brillig artifacts to the ACIR gen pass. let (acir_functions, brillig_functions, _, _) = ssa + .generate_entry_point_index() .into_acir(&Brillig::default(), ExpressionWidth::default()) .expect("Should compile manually written SSA into ACIR"); @@ -3436,7 +3455,7 @@ mod test { // Call the same primitive operation again let v1_div_v2 = builder.insert_binary(main_v1, BinaryOp::Div, main_v2); - let one = builder.numeric_constant(1u128, Type::unsigned(32)); + let one = builder.numeric_constant(1u128, NumericType::unsigned(32)); builder.insert_constrain(v1_div_v2, one, None); builder.terminate_with_return(vec![]); @@ -3449,6 +3468,7 @@ mod test { println!("{}", ssa); let (acir_functions, brillig_functions, _, _) = ssa + .generate_entry_point_index() .into_acir(&brillig, ExpressionWidth::default()) .expect("Should compile manually written SSA into ACIR"); @@ -3521,7 +3541,7 @@ mod test { // Call the same primitive operation again let v1_div_v2 = builder.insert_binary(main_v1, BinaryOp::Div, main_v2); - let one = builder.numeric_constant(1u128, Type::unsigned(32)); + let one = builder.numeric_constant(1u128, NumericType::unsigned(32)); builder.insert_constrain(v1_div_v2, one, None); builder.terminate_with_return(vec![]); @@ -3537,6 +3557,7 @@ mod test { println!("{}", ssa); let (acir_functions, brillig_functions, _, _) = ssa + .generate_entry_point_index() .into_acir(&brillig, ExpressionWidth::default()) .expect("Should compile manually written SSA into ACIR"); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index 9c88c559b59..d2bf7e5bdca 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -8,7 +8,7 @@ use crate::brillig::brillig_ir::{ BrilligBinaryOp, BrilligContext, ReservedRegisters, BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, }; use crate::ssa::ir::dfg::CallStack; -use crate::ssa::ir::instruction::ConstrainError; +use crate::ssa::ir::instruction::{ConstrainError, Hint}; use crate::ssa::ir::{ basic_block::BasicBlockId, dfg::DataFlowGraph, @@ -226,16 +226,14 @@ impl<'block> BrilligBlock<'block> { dfg.get_numeric_constant_with_type(*rhs), ) { // If the constraint is of the form `x == u1 1` then we can simply constrain `x` directly - ( - Some((constant, Type::Numeric(NumericType::Unsigned { bit_size: 1 }))), - None, - ) if constant == FieldElement::one() => { + (Some((constant, NumericType::Unsigned { bit_size: 1 })), None) + if constant == FieldElement::one() => + { (self.convert_ssa_single_addr_value(*rhs, dfg), false) } - ( - None, - Some((constant, Type::Numeric(NumericType::Unsigned { bit_size: 1 }))), - ) if constant == FieldElement::one() => { + (None, Some((constant, NumericType::Unsigned { bit_size: 1 }))) + if constant == FieldElement::one() => + { (self.convert_ssa_single_addr_value(*lhs, dfg), false) } @@ -552,6 +550,10 @@ impl<'block> BrilligBlock<'block> { false, ); } + Intrinsic::Hint(Hint::BlackBox) => { + let result_ids = dfg.instruction_results(instruction_id); + self.convert_ssa_identity_call(arguments, dfg, result_ids); + } Intrinsic::BlackBox(bb_func) => { // Slices are represented as a tuple of (length, slice contents). // We must check the inputs to determine if there are slices @@ -874,6 +876,30 @@ impl<'block> BrilligBlock<'block> { self.brillig_context.codegen_call(func_id, &argument_variables, &return_variables); } + /// Copy the input arguments to the results. + fn convert_ssa_identity_call( + &mut self, + arguments: &[ValueId], + dfg: &DataFlowGraph, + result_ids: &[ValueId], + ) { + let argument_variables = + vecmap(arguments, |argument_id| self.convert_ssa_value(*argument_id, dfg)); + + let return_variables = vecmap(result_ids, |result_id| { + self.variables.define_variable( + self.function_context, + self.brillig_context, + *result_id, + dfg, + ) + }); + + for (src, dst) in argument_variables.into_iter().zip(return_variables) { + self.brillig_context.mov_instruction(dst.extract_register(), src.extract_register()); + } + } + fn validate_array_index( &mut self, array_variable: BrilligVariable, @@ -1257,8 +1283,8 @@ impl<'block> BrilligBlock<'block> { result_variable: SingleAddrVariable, ) { let binary_type = type_of_binary_operation( - dfg[binary.lhs].get_type(), - dfg[binary.rhs].get_type(), + dfg[binary.lhs].get_type().as_ref(), + dfg[binary.rhs].get_type().as_ref(), binary.operator, ); @@ -1588,7 +1614,7 @@ impl<'block> BrilligBlock<'block> { self.brillig_context.const_instruction( new_variable.extract_single_addr(), - value_id.to_usize().into(), + value_id.to_u32().into(), ); new_variable } @@ -1767,7 +1793,7 @@ impl<'block> BrilligBlock<'block> { dfg: &DataFlowGraph, ) -> BrilligVariable { let typ = dfg[result].get_type(); - match typ { + match typ.as_ref() { Type::Numeric(_) => self.variables.define_variable( self.function_context, self.brillig_context, @@ -1783,7 +1809,7 @@ impl<'block> BrilligBlock<'block> { dfg, ); let array = variable.extract_array(); - self.allocate_foreign_call_result_array(typ, array); + self.allocate_foreign_call_result_array(typ.as_ref(), array); variable } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs index 87165c36dff..d6851a9ecf9 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/brillig/brillig_gen/variable_liveness.rs @@ -372,7 +372,7 @@ mod test { let v3 = builder.insert_allocate(Type::field()); - let zero = builder.numeric_constant(0u128, Type::field()); + let zero = builder.field_constant(0u128); builder.insert_store(v3, zero); let v4 = builder.insert_binary(v0, BinaryOp::Eq, zero); @@ -381,7 +381,7 @@ mod test { builder.switch_to_block(b2); - let twenty_seven = builder.numeric_constant(27u128, Type::field()); + let twenty_seven = builder.field_constant(27u128); let v7 = builder.insert_binary(v0, BinaryOp::Add, twenty_seven); builder.insert_store(v3, v7); @@ -487,7 +487,7 @@ mod test { let v3 = builder.insert_allocate(Type::field()); - let zero = builder.numeric_constant(0u128, Type::field()); + let zero = builder.field_constant(0u128); builder.insert_store(v3, zero); builder.terminate_with_jmp(b1, vec![zero]); @@ -515,7 +515,7 @@ mod test { builder.switch_to_block(b5); - let twenty_seven = builder.numeric_constant(27u128, Type::field()); + let twenty_seven = builder.field_constant(27u128); let v10 = builder.insert_binary(v7, BinaryOp::Eq, twenty_seven); let v11 = builder.insert_not(v10); @@ -534,7 +534,7 @@ mod test { builder.switch_to_block(b8); - let one = builder.numeric_constant(1u128, Type::field()); + let one = builder.field_constant(1u128); let v15 = builder.insert_binary(v7, BinaryOp::Add, one); builder.terminate_with_jmp(b4, vec![v15]); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/errors.rs b/noir/noir-repo/compiler/noirc_evaluator/src/errors.rs index 75a3ceb3a72..bb224617994 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/errors.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/errors.rs @@ -93,7 +93,10 @@ impl From for FileDiagnostic { let message = bug.to_string(); let (secondary_message, call_stack) = match bug { InternalBug::IndependentSubgraph { call_stack } => { - ("There is no path from the output of this brillig call to either return values or inputs of the circuit, which creates an independent subgraph. This is quite likely a soundness vulnerability".to_string(),call_stack) + ("There is no path from the output of this Brillig call to either return values or inputs of the circuit, which creates an independent subgraph. This is quite likely a soundness vulnerability".to_string(), call_stack) + } + InternalBug::UncheckedBrilligCall { call_stack } => { + ("This Brillig call's inputs and its return values haven't been sufficiently constrained. This should be done to prevent potential soundness vulnerabilities".to_string(), call_stack) } InternalBug::AssertFailed { call_stack } => ("As a result, the compiled circuit is ensured to fail. Other assertions may also fail during execution".to_string(), call_stack) }; @@ -117,8 +120,10 @@ pub enum InternalWarning { #[derive(Debug, PartialEq, Eq, Clone, Error, Serialize, Deserialize, Hash)] pub enum InternalBug { - #[error("Input to brillig function is in a separate subgraph to output")] + #[error("Input to Brillig function is in a separate subgraph to output")] IndependentSubgraph { call_stack: CallStack }, + #[error("Brillig function call isn't properly covered by a manual constraint")] + UncheckedBrilligCall { call_stack: CallStack }, #[error("Assertion is always false")] AssertFailed { call_stack: CallStack }, } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/lib.rs b/noir/noir-repo/compiler/noirc_evaluator/src/lib.rs index 8127e3d03ef..75ea557d3de 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/lib.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/lib.rs @@ -12,8 +12,7 @@ pub mod ssa; pub use ssa::create_program; pub use ssa::ir::instruction::ErrorType; -/// Trims leading whitespace from each line of the input string, according to -/// how much leading whitespace there is on the first non-empty line. +/// Trims leading whitespace from each line of the input string #[cfg(test)] pub(crate) fn trim_leading_whitespace_from_lines(src: &str) -> String { let mut lines = src.trim_end().lines(); @@ -21,11 +20,10 @@ pub(crate) fn trim_leading_whitespace_from_lines(src: &str) -> String { while first_line.is_empty() { first_line = lines.next().unwrap(); } - let indent = first_line.len() - first_line.trim_start().len(); let mut result = first_line.trim_start().to_string(); for line in lines { result.push('\n'); - result.push_str(&line[indent..]); + result.push_str(line.trim_start()); } result } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs index 8f31023f790..9377cadb260 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa.rs @@ -72,7 +72,10 @@ pub struct SsaEvaluatorOptions { /// Skip the check for under constrained values pub skip_underconstrained_check: bool, - /// The higher the value, the more inlined brillig functions will be. + /// Skip the missing Brillig call constraints check + pub skip_brillig_constraints_check: bool, + + /// The higher the value, the more inlined Brillig functions will be. pub inliner_aggressiveness: i64, /// Maximum accepted percentage increase in the Brillig bytecode size after unrolling loops. @@ -94,60 +97,32 @@ pub(crate) fn optimize_into_acir( ) -> Result { let ssa_gen_span = span!(Level::TRACE, "ssa_generation"); let ssa_gen_span_guard = ssa_gen_span.enter(); - - let mut ssa = SsaBuilder::new( + let builder = SsaBuilder::new( program, options.ssa_logging.clone(), options.force_brillig_output, options.print_codegen_timings, &options.emit_ssa, - )? - .run_pass(Ssa::defunctionalize, "Defunctionalization") - .run_pass(Ssa::remove_paired_rc, "Removing Paired rc_inc & rc_decs") - .run_pass(Ssa::separate_runtime, "Runtime Separation") - .run_pass(Ssa::resolve_is_unconstrained, "Resolving IsUnconstrained") - .run_pass(|ssa| ssa.inline_functions(options.inliner_aggressiveness), "Inlining (1st)") - // Run mem2reg with the CFG separated into blocks - .run_pass(Ssa::mem2reg, "Mem2Reg (1st)") - .run_pass(Ssa::simplify_cfg, "Simplifying (1st)") - .run_pass(Ssa::as_slice_optimization, "`as_slice` optimization") - .try_run_pass( - Ssa::evaluate_static_assert_and_assert_constant, - "`static_assert` and `assert_constant`", - )? - .run_pass(Ssa::loop_invariant_code_motion, "Loop Invariant Code Motion") - .try_run_pass( - |ssa| ssa.unroll_loops_iteratively(options.max_bytecode_increase_percent), - "Unrolling", - )? - .run_pass(Ssa::simplify_cfg, "Simplifying (2nd)") - .run_pass(Ssa::flatten_cfg, "Flattening") - .run_pass(Ssa::remove_bit_shifts, "After Removing Bit Shifts") - // Run mem2reg once more with the flattened CFG to catch any remaining loads/stores - .run_pass(Ssa::mem2reg, "Mem2Reg (2nd)") - // Run the inlining pass again to handle functions with `InlineType::NoPredicates`. - // Before flattening is run, we treat functions marked with the `InlineType::NoPredicates` as an entry point. - // This pass must come immediately following `mem2reg` as the succeeding passes - // may create an SSA which inlining fails to handle. - .run_pass( - |ssa| ssa.inline_functions_with_no_predicates(options.inliner_aggressiveness), - "Inlining (2nd)", - ) - .run_pass(Ssa::remove_if_else, "Remove IfElse") - .run_pass(Ssa::fold_constants, "Constant Folding") - .run_pass(Ssa::remove_enable_side_effects, "EnableSideEffectsIf removal") - .run_pass(Ssa::fold_constants_using_constraints, "Constraint Folding") - .run_pass(Ssa::dead_instruction_elimination, "Dead Instruction Elimination (1st)") - .run_pass(Ssa::simplify_cfg, "Simplifying:") - .run_pass(Ssa::array_set_optimization, "Array Set Optimizations") - .finish(); + )?; - let ssa_level_warnings = if options.skip_underconstrained_check { - vec![] - } else { - time("After Check for Underconstrained Values", options.print_codegen_timings, || { - ssa.check_for_underconstrained_values() - }) + let mut ssa = optimize_all(builder, options)?; + + let mut ssa_level_warnings = vec![]; + + if !options.skip_underconstrained_check { + ssa_level_warnings.extend(time( + "After Check for Underconstrained Values", + options.print_codegen_timings, + || ssa.check_for_underconstrained_values(), + )); + } + + if !options.skip_brillig_constraints_check { + ssa_level_warnings.extend(time( + "After Check for Missing Brillig Call Constraints", + options.print_codegen_timings, + || ssa.check_for_missing_brillig_constraints(), + )); }; drop(ssa_gen_span_guard); @@ -173,9 +148,54 @@ pub(crate) fn optimize_into_acir( let artifacts = time("SSA to ACIR", options.print_codegen_timings, || { ssa.into_acir(&brillig, options.expression_width) })?; + Ok(ArtifactsAndWarnings(artifacts, ssa_level_warnings)) } +/// Run all SSA passes. +fn optimize_all(builder: SsaBuilder, options: &SsaEvaluatorOptions) -> Result { + Ok(builder + .run_pass(Ssa::defunctionalize, "Defunctionalization") + .run_pass(Ssa::remove_paired_rc, "Removing Paired rc_inc & rc_decs") + .run_pass(Ssa::separate_runtime, "Runtime Separation") + .run_pass(Ssa::resolve_is_unconstrained, "Resolving IsUnconstrained") + .run_pass(|ssa| ssa.inline_functions(options.inliner_aggressiveness), "Inlining (1st)") + // Run mem2reg with the CFG separated into blocks + .run_pass(Ssa::mem2reg, "Mem2Reg (1st)") + .run_pass(Ssa::simplify_cfg, "Simplifying (1st)") + .run_pass(Ssa::as_slice_optimization, "`as_slice` optimization") + .try_run_pass( + Ssa::evaluate_static_assert_and_assert_constant, + "`static_assert` and `assert_constant`", + )? + .run_pass(Ssa::loop_invariant_code_motion, "Loop Invariant Code Motion") + .try_run_pass( + |ssa| ssa.unroll_loops_iteratively(options.max_bytecode_increase_percent), + "Unrolling", + )? + .run_pass(Ssa::simplify_cfg, "Simplifying (2nd)") + .run_pass(Ssa::flatten_cfg, "Flattening") + .run_pass(Ssa::remove_bit_shifts, "After Removing Bit Shifts") + // Run mem2reg once more with the flattened CFG to catch any remaining loads/stores + .run_pass(Ssa::mem2reg, "Mem2Reg (2nd)") + // Run the inlining pass again to handle functions with `InlineType::NoPredicates`. + // Before flattening is run, we treat functions marked with the `InlineType::NoPredicates` as an entry point. + // This pass must come immediately following `mem2reg` as the succeeding passes + // may create an SSA which inlining fails to handle. + .run_pass( + |ssa| ssa.inline_functions_with_no_predicates(options.inliner_aggressiveness), + "Inlining (2nd)", + ) + .run_pass(Ssa::remove_if_else, "Remove IfElse") + .run_pass(Ssa::fold_constants, "Constant Folding") + .run_pass(Ssa::remove_enable_side_effects, "EnableSideEffectsIf removal") + .run_pass(Ssa::fold_constants_using_constraints, "Constraint Folding") + .run_pass(Ssa::dead_instruction_elimination, "Dead Instruction Elimination (1st)") + .run_pass(Ssa::simplify_cfg, "Simplifying:") + .run_pass(Ssa::array_set_optimization, "Array Set Optimizations") + .finish()) +} + // Helper to time SSA passes fn time(name: &str, print_timings: bool, f: impl FnOnce() -> T) -> T { let start_time = chrono::Utc::now().time(); @@ -449,7 +469,7 @@ impl SsaBuilder { } fn finish(self) -> Ssa { - self.ssa + self.ssa.generate_entry_point_index() } /// Runs the given SSA pass and prints the SSA afterward if `print_ssa_passes` is true. diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs index 7a4e336c33e..f092f63cd07 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs @@ -1,27 +1,30 @@ -//! This module defines an SSA pass that detects if the final function has any subgraphs independent from inputs and outputs. -//! If this is the case, then part of the final circuit can be completely replaced by any other passing circuit, since there are no constraints ensuring connections. -//! So the compiler informs the developer of this as a bug +//! This module defines security SSA passes detecting constraint problems leading to possible +//! soundness vulnerabilities. +//! The compiler informs the developer of these as bugs. use crate::errors::{InternalBug, SsaReport}; use crate::ssa::ir::basic_block::BasicBlockId; use crate::ssa::ir::function::RuntimeType; use crate::ssa::ir::function::{Function, FunctionId}; -use crate::ssa::ir::instruction::{Instruction, InstructionId, Intrinsic}; +use crate::ssa::ir::instruction::{Hint, Instruction, InstructionId, Intrinsic}; use crate::ssa::ir::value::{Value, ValueId}; use crate::ssa::ssa_gen::Ssa; use im::HashMap; use rayon::prelude::*; use std::collections::{BTreeMap, HashSet}; +use tracing::trace; impl Ssa { - /// Go through each top-level non-brillig function and detect if it has independent subgraphs + /// This function provides an SSA pass that detects if the final function has any subgraphs independent from inputs and outputs. + /// If this is the case, then part of the final circuit can be completely replaced by any other passing circuit, since there are no constraints ensuring connections. + /// Go through each top-level non-Brillig function and detect if it has independent subgraphs #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn check_for_underconstrained_values(&mut self) -> Vec { - let functions_id = self.functions.values().map(|f| f.id().to_usize()).collect::>(); - functions_id - .iter() + self.functions + .values() + .map(|f| f.id()) .par_bridge() .flat_map(|fid| { - let function_to_process = &self.functions[&FunctionId::new(*fid)]; + let function_to_process = &self.functions[&fid]; match function_to_process.runtime() { RuntimeType::Acir { .. } => check_for_underconstrained_values_within_function( function_to_process, @@ -32,6 +35,32 @@ impl Ssa { }) .collect() } + + /// Detect Brillig calls left unconstrained with manual asserts + /// and return a vector of bug reports if any have been found + pub(crate) fn check_for_missing_brillig_constraints(&mut self) -> Vec { + // Skip the check if there are no Brillig functions involved + if !self.functions.values().any(|func| func.runtime().is_brillig()) { + return vec![]; + }; + + self.functions + .values() + .map(|f| f.id()) + .par_bridge() + .flat_map(|fid| { + let function_to_process = &self.functions[&fid]; + match function_to_process.runtime() { + RuntimeType::Acir { .. } => { + let mut context = DependencyContext::default(); + context.build(function_to_process, &self.functions); + context.collect_warnings(function_to_process) + } + RuntimeType::Brillig(_) => Vec::new(), + } + }) + .collect() + } } /// Detect independent subgraphs (not connected to function inputs or outputs) and return a vector of bug reports if some are found @@ -63,6 +92,345 @@ fn check_for_underconstrained_values_within_function( } warnings } + +#[derive(Default)] +struct DependencyContext { + visited_blocks: HashSet, + block_queue: Vec, + // Map keeping track of values stored at memory locations + memory_slots: HashMap, + // Map of values resulting from array get instructions + // to the actual array values + array_elements: HashMap, + // Map of brillig call ids to sets of the value ids descending + // from their arguments and results + tainted: HashMap, +} + +/// Structure keeping track of value ids descending from Brillig calls' +/// arguments and results, also storing information on results +/// already properly constrained +#[derive(Clone, Debug)] +struct BrilligTaintedIds { + // Argument descendant value ids + arguments: HashSet, + // Results status + results: Vec, + // Initial result value ids + root_results: HashSet, +} + +#[derive(Clone, Debug)] +enum ResultStatus { + // Keep track of descendants until found constrained + Unconstrained { descendants: HashSet }, + Constrained, +} + +impl BrilligTaintedIds { + fn new(arguments: &[ValueId], results: &[ValueId]) -> Self { + BrilligTaintedIds { + arguments: HashSet::from_iter(arguments.iter().copied()), + results: results + .iter() + .map(|result| ResultStatus::Unconstrained { descendants: HashSet::from([*result]) }) + .collect(), + root_results: HashSet::from_iter(results.iter().copied()), + } + } + + /// Add children of a given parent to the tainted value set + /// (for arguments one set is enough, for results we keep them + /// separate as the forthcoming check considers the call covered + /// if all the results were properly covered) + fn update_children(&mut self, parents: &HashSet, children: &[ValueId]) { + if self.arguments.intersection(parents).next().is_some() { + self.arguments.extend(children); + } + for result_status in &mut self.results.iter_mut() { + match result_status { + // Skip updating results already found covered + ResultStatus::Constrained => { + continue; + } + ResultStatus::Unconstrained { descendants } => { + if descendants.intersection(parents).next().is_some() { + descendants.extend(children); + } + } + } + } + } + + /// If Brillig call is properly constrained by the given ids, return true + fn check_constrained(&self) -> bool { + // If every result has now been constrained, + // consider the call properly constrained + self.results.iter().all(|result| matches!(result, ResultStatus::Constrained)) + } + + /// Remember partial constraints (involving some of the results and an argument) + /// along the way to take them into final consideration + /// Generally, a valid partial constraint should link up a result descendant + /// and an argument descendant, although there are also edge cases mentioned below. + fn store_partial_constraints(&mut self, constrained_values: &HashSet) { + let mut results_involved: Vec = vec![]; + + // For a valid partial constraint, a value descending from + // one of the results should be constrained + for (i, result_status) in self.results.iter().enumerate() { + match result_status { + // Skip checking already covered results + ResultStatus::Constrained => { + continue; + } + ResultStatus::Unconstrained { descendants } => { + if descendants.intersection(constrained_values).next().is_some() { + results_involved.push(i); + } + } + } + } + + // Along with it, one of the argument descendants should be constrained + // (skipped if there were no arguments, or if an actual result and not a + // descendant has been constrained _alone_, e.g. against a constant) + if !results_involved.is_empty() + && (self.arguments.is_empty() + || (constrained_values.len() == 1 + && self.root_results.intersection(constrained_values).next().is_some()) + || self.arguments.intersection(constrained_values).next().is_some()) + { + // Remember the partial constraint, clearing the sets + results_involved.iter().for_each(|i| self.results[*i] = ResultStatus::Constrained); + } + } +} + +impl DependencyContext { + /// Build the dependency context of variable ValueIds, storing + /// information on value ids involved in unchecked Brillig calls + fn build(&mut self, function: &Function, all_functions: &BTreeMap) { + self.block_queue.push(function.entry_block()); + while let Some(block) = self.block_queue.pop() { + if self.visited_blocks.contains(&block) { + continue; + } + self.visited_blocks.insert(block); + self.process_instructions(block, function, all_functions); + } + } + + /// Go over the given block tracking Brillig calls and checking them against + /// following constraints + fn process_instructions( + &mut self, + block: BasicBlockId, + function: &Function, + all_functions: &BTreeMap, + ) { + trace!("processing instructions of block {} of function {}", block, function.id()); + + for instruction in function.dfg[block].instructions() { + let mut arguments = Vec::new(); + let mut results = Vec::new(); + + // Collect non-constant instruction arguments + function.dfg[*instruction].for_each_value(|value_id| { + if function.dfg.get_numeric_constant(value_id).is_none() { + arguments.push(function.dfg.resolve(value_id)); + } + }); + + // Collect non-constant instruction results + for value_id in function.dfg.instruction_results(*instruction).iter() { + if function.dfg.get_numeric_constant(*value_id).is_none() { + results.push(function.dfg.resolve(*value_id)); + } + } + + // Process instructions + match &function.dfg[*instruction] { + // For memory operations, we have to link up the stored value as a parent + // of one loaded from the same memory slot + Instruction::Store { address, value } => { + self.memory_slots.insert(*address, function.dfg.resolve(*value)); + } + Instruction::Load { address } => { + // Recall the value stored at address as parent for the results + if let Some(value_id) = self.memory_slots.get(address) { + self.update_children(&[*value_id], &results); + } else { + panic!("load instruction {} has attempted to access previously unused memory location", + instruction); + } + } + // Check the constrain instruction arguments against those + // involved in Brillig calls, remove covered calls + Instruction::Constrain(value_id1, value_id2, _) => { + self.clear_constrained( + &[function.dfg.resolve(*value_id1), function.dfg.resolve(*value_id2)], + function, + ); + } + // Consider range check to also be constraining + Instruction::RangeCheck { value, .. } => { + self.clear_constrained(&[function.dfg.resolve(*value)], function); + } + Instruction::Call { func: func_id, .. } => { + // For functions, we remove the first element of arguments, + // as .for_each_value() used previously also includes func_id + arguments.remove(0); + + match &function.dfg[*func_id] { + Value::Intrinsic(intrinsic) => match intrinsic { + Intrinsic::ApplyRangeConstraint | Intrinsic::AssertConstant => { + // Consider these intrinsic arguments constrained + self.clear_constrained(&arguments, function); + } + Intrinsic::AsWitness | Intrinsic::IsUnconstrained => { + // These intrinsics won't affect the dependency graph + } + Intrinsic::ArrayLen + | Intrinsic::ArrayRefCount + | Intrinsic::ArrayAsStrUnchecked + | Intrinsic::AsField + | Intrinsic::AsSlice + | Intrinsic::BlackBox(..) + | Intrinsic::DerivePedersenGenerators + | Intrinsic::FromField + | Intrinsic::Hint(..) + | Intrinsic::SlicePushBack + | Intrinsic::SlicePushFront + | Intrinsic::SlicePopBack + | Intrinsic::SlicePopFront + | Intrinsic::SliceRefCount + | Intrinsic::SliceInsert + | Intrinsic::SliceRemove + | Intrinsic::StaticAssert + | Intrinsic::StrAsBytes + | Intrinsic::ToBits(..) + | Intrinsic::ToRadix(..) + | Intrinsic::FieldLessThan => { + // Record all the function arguments as parents of the results + self.update_children(&arguments, &results); + } + }, + Value::Function(callee) => match all_functions[&callee].runtime() { + RuntimeType::Brillig(_) => { + // Record arguments/results for each Brillig call for the check + self.tainted.insert( + *instruction, + BrilligTaintedIds::new(&arguments, &results), + ); + } + RuntimeType::Acir(..) => { + // Record all the function arguments as parents of the results + self.update_children(&arguments, &results); + } + }, + Value::ForeignFunction(..) => { + panic!("should not be able to reach foreign function from non-Brillig functions, {func_id} in function {}", function.name()); + } + Value::Instruction { .. } + | Value::NumericConstant { .. } + | Value::Param { .. } => { + panic!( + "calling non-function value with ID {func_id} in function {}", + function.name() + ); + } + } + } + // For array get operations, we link the resulting values to + // the corresponding array value ids + // (this is required later because for now we consider array elements + // being constrained as valid as the whole arrays being constrained) + Instruction::ArrayGet { array, .. } => { + for result in &results { + self.array_elements.insert(*result, function.dfg.resolve(*array)); + } + // Record all the used arguments as parents of the results + self.update_children(&arguments, &results); + } + Instruction::ArraySet { .. } + | Instruction::Binary(..) + | Instruction::Cast(..) + | Instruction::IfElse { .. } + | Instruction::Not(..) + | Instruction::Truncate { .. } => { + // Record all the used arguments as parents of the results + self.update_children(&arguments, &results); + } + // These instructions won't affect the dependency graph + Instruction::Allocate { .. } + | Instruction::DecrementRc { .. } + | Instruction::EnableSideEffectsIf { .. } + | Instruction::IncrementRc { .. } + | Instruction::MakeArray { .. } => {} + } + } + + trace!("Number tainted Brillig calls: {}", self.tainted.len()); + } + + /// Every Brillig call not properly constrained should remain in the tainted set + /// at this point. For each, emit a corresponding warning. + fn collect_warnings(&mut self, function: &Function) -> Vec { + let warnings: Vec = self + .tainted + .keys() + .map(|brillig_call| { + SsaReport::Bug(InternalBug::UncheckedBrilligCall { + call_stack: function.dfg.get_call_stack(*brillig_call), + }) + }) + .collect(); + + trace!( + "making {} under constrained reports for function {}", + warnings.len(), + function.name() + ); + warnings + } + + /// Update sets of value ids that can be traced back to the Brillig calls being tracked + fn update_children(&mut self, parents: &[ValueId], children: &[ValueId]) { + let parents: HashSet<_> = HashSet::from_iter(parents.iter().copied()); + for (_, tainted_ids) in self.tainted.iter_mut() { + tainted_ids.update_children(&parents, children); + } + } + + /// Check if any of the recorded Brillig calls have been properly constrained + /// by given values after recording partial constraints, if so stop tracking them + fn clear_constrained(&mut self, constrained_values: &[ValueId], function: &Function) { + // Remove numeric constants + let constrained_values = + constrained_values.iter().filter(|v| function.dfg.get_numeric_constant(**v).is_none()); + + // For now, consider array element constraints to be array constraints + // TODO(https://github.com/noir-lang/noir/issues/6698): + // This probably has to be further looked into, to ensure _every_ element + // of an array result of a Brillig call has been constrained + let constrained_values: HashSet<_> = constrained_values + .map(|v| { + if let Some(parent_array) = self.array_elements.get(v) { + *parent_array + } else { + *v + } + }) + .collect(); + + self.tainted.iter_mut().for_each(|(_, tainted_ids)| { + tainted_ids.store_partial_constraints(&constrained_values); + }); + self.tainted.retain(|_, tainted_ids| !tainted_ids.check_constrained()); + } +} + #[derive(Default)] struct Context { visited_blocks: HashSet, @@ -75,7 +443,7 @@ struct Context { impl Context { /// Compute sets of variable ValueIds that are connected with constraints /// - /// Additionally, store information about brillig calls in the context + /// Additionally, store information about Brillig calls in the context fn compute_sets_of_connected_value_ids( &mut self, function: &Function, @@ -122,7 +490,7 @@ impl Context { connected_sets_indices } - /// Find which brillig calls separate this set from others and return bug warnings about them + /// Find which Brillig calls separate this set from others and return bug warnings about them fn find_disconnecting_brillig_calls_with_results_in_set( &self, current_set: &HashSet, @@ -133,7 +501,7 @@ impl Context { // Find brillig-generated values in the set let intersection = all_brillig_generated_values.intersection(current_set).copied(); - // Go through all brillig outputs in the set + // Go through all Brillig outputs in the set for brillig_output_in_set in intersection { // Get the inputs that correspond to the output let inputs: HashSet = @@ -155,7 +523,7 @@ impl Context { } /// Go through each instruction in the block and add a set of ValueIds connected through that instruction /// - /// Additionally, this function adds mappings of brillig return values to call arguments and instruction ids from calls to brillig functions in the block + /// Additionally, this function adds mappings of Brillig return values to call arguments and instruction ids from calls to Brillig functions in the block fn connect_value_ids_in_block( &mut self, function: &Function, @@ -209,6 +577,7 @@ impl Context { | Intrinsic::AsField | Intrinsic::AsSlice | Intrinsic::BlackBox(..) + | Intrinsic::Hint(Hint::BlackBox) | Intrinsic::DerivePedersenGenerators | Intrinsic::FromField | Intrinsic::SliceInsert @@ -228,7 +597,7 @@ impl Context { }, Value::Function(callee) => match all_functions[&callee].runtime() { RuntimeType::Brillig(_) => { - // For calls to brillig functions we memorize the mapping of results to argument ValueId's and InstructionId's + // For calls to Brillig functions we memorize the mapping of results to argument ValueId's and InstructionId's // The latter are needed to produce the callstack later for result in function.dfg.instruction_results(*instruction).iter().filter( @@ -248,7 +617,7 @@ impl Context { } }, Value::ForeignFunction(..) => { - panic!("Should not be able to reach foreign function from non-brillig functions, {func_id} in function {}", function.name()); + panic!("Should not be able to reach foreign function from non-Brillig functions, {func_id} in function {}", function.name()); } Value::Instruction { .. } | Value::NumericConstant { .. } @@ -354,83 +723,297 @@ impl Context { } #[cfg(test)] mod test { - use noirc_frontend::monomorphization::ast::InlineType; - - use crate::ssa::{ - function_builder::FunctionBuilder, - ir::{instruction::BinaryOp, map::Id, types::Type}, - }; + use crate::ssa::Ssa; + use tracing_test::traced_test; #[test] + #[traced_test] /// Test that a connected function raises no warnings fn test_simple_connected_function() { - // fn main { - // b0(v0: Field, v1: Field): - // v2 = add v0, 1 - // v3 = mul v1, 2 - // v4 = eq v2, v3 - // return v2 - // } - let main_id = Id::test_new(0); - let mut builder = FunctionBuilder::new("main".into(), main_id); - let v0 = builder.add_parameter(Type::field()); - let v1 = builder.add_parameter(Type::field()); - - let one = builder.field_constant(1u128); - let two = builder.field_constant(2u128); - - let v2 = builder.insert_binary(v0, BinaryOp::Add, one); - let v3 = builder.insert_binary(v1, BinaryOp::Mul, two); - let _v4 = builder.insert_binary(v2, BinaryOp::Eq, v3); - builder.terminate_with_return(vec![v2]); - - let mut ssa = builder.finish(); + let program = r#" + acir(inline) fn main f0 { + b0(v0: Field, v1: Field): + v2 = add v0, Field 1 + v3 = mul v1, Field 2 + v4 = eq v2, v3 + return v2 + } + "#; + + let mut ssa = Ssa::from_str(program).unwrap(); let ssa_level_warnings = ssa.check_for_underconstrained_values(); assert_eq!(ssa_level_warnings.len(), 0); } #[test] - /// Test where the results of a call to a brillig function are not connected to main function inputs or outputs + #[traced_test] + /// Test where the results of a call to a Brillig function are not connected to main function inputs or outputs /// This should be detected. fn test_simple_function_with_disconnected_part() { - // unconstrained fn br(v0: Field, v1: Field){ - // v2 = add v0, v1 - // return v2 - // } - // - // fn main { - // b0(v0: Field, v1: Field): - // v2 = add v0, 1 - // v3 = mul v1, 2 - // v4 = call br(v2, v3) - // v5 = add v4, 2 - // return - // } - let main_id = Id::test_new(0); - let mut builder = FunctionBuilder::new("main".into(), main_id); - let v0 = builder.add_parameter(Type::field()); - let v1 = builder.add_parameter(Type::field()); - - let one = builder.field_constant(1u128); - let two = builder.field_constant(2u128); - - let v2 = builder.insert_binary(v0, BinaryOp::Add, one); - let v3 = builder.insert_binary(v1, BinaryOp::Mul, two); - - let br_function_id = Id::test_new(1); - let br_function = builder.import_function(br_function_id); - let v4 = builder.insert_call(br_function, vec![v2, v3], vec![Type::field()])[0]; - let v5 = builder.insert_binary(v4, BinaryOp::Add, two); - builder.insert_constrain(v5, one, None); - builder.terminate_with_return(vec![]); - - builder.new_brillig_function("br".into(), br_function_id, InlineType::default()); - let v0 = builder.add_parameter(Type::field()); - let v1 = builder.add_parameter(Type::field()); - let v2 = builder.insert_binary(v0, BinaryOp::Add, v1); - builder.terminate_with_return(vec![v2]); - let mut ssa = builder.finish(); + let program = r#" + acir(inline) fn main f0 { + b0(v0: Field, v1: Field): + v2 = add v0, Field 1 + v3 = mul v1, Field 2 + v4 = call f1(v2, v3) -> Field + v5 = add v4, Field 2 + return + } + + brillig(inline) fn br f1 { + b0(v0: Field, v1: Field): + v2 = add v0, v1 + return v2 + } + "#; + + let mut ssa = Ssa::from_str(program).unwrap(); let ssa_level_warnings = ssa.check_for_underconstrained_values(); assert_eq!(ssa_level_warnings.len(), 1); } + + #[test] + #[traced_test] + /// Test where a call to a Brillig function is left unchecked with a later assert, + /// by example of the program illustrating issue #5425 (simplified variant). + fn test_underconstrained_value_detector_5425() { + /* + unconstrained fn maximum_price(options: [u32; 2]) -> u32 { + let mut maximum_option = options[0]; + if (options[1] > options[0]) { + maximum_option = options[1]; + } + maximum_option + } + + fn main(sandwiches: pub [u32; 2], drinks: pub [u32; 2], best_value: u32) { + let most_expensive_sandwich = maximum_price(sandwiches); + let mut sandwich_exists = false; + sandwich_exists |= (sandwiches[0] == most_expensive_sandwich); + sandwich_exists |= (sandwiches[1] == most_expensive_sandwich); + assert(sandwich_exists); + + let most_expensive_drink = maximum_price(drinks); + assert( + best_value + == (most_expensive_sandwich + most_expensive_drink) + ); + } + */ + + // The Brillig function is fake, for simplicity's sake + + let program = r#" + acir(inline) fn main f0 { + b0(v4: [u32; 2], v5: [u32; 2], v6: u32): + inc_rc v4 + inc_rc v5 + v8 = call f1(v4) -> u32 + v9 = allocate -> &mut u32 + store u1 0 at v9 + v10 = load v9 -> u1 + v11 = array_get v4, index u32 0 -> u32 + v12 = eq v11, v8 + v13 = or v10, v12 + store v13 at v9 + v14 = load v9 -> u1 + v15 = array_get v4, index u32 1 -> u32 + v16 = eq v15, v8 + v17 = or v14, v16 + store v17 at v9 + v18 = load v9 -> u1 + constrain v18 == u1 1 + v19 = call f1(v5) -> u32 + v20 = add v8, v19 + constrain v6 == v20 + dec_rc v4 + dec_rc v5 + return + } + + brillig(inline) fn maximum_price f1 { + b0(v0: [u32; 2]): + v2 = array_get v0, index u32 0 -> u32 + return v2 + } + "#; + + let mut ssa = Ssa::from_str(program).unwrap(); + let ssa_level_warnings = ssa.check_for_missing_brillig_constraints(); + assert_eq!(ssa_level_warnings.len(), 1); + } + + #[test] + #[traced_test] + /// Test where a call to a Brillig function returning multiple result values + /// is left unchecked with a later assert involving all the results + fn test_unchecked_multiple_results_brillig() { + // First call is constrained properly, involving both results + // Second call is insufficiently constrained, involving only one of the results + // The Brillig function is fake, for simplicity's sake + let program = r#" + acir(inline) fn main f0 { + b0(v0: u32): + v2, v3 = call f1(v0) -> (u32, u32) + v4 = mul v2, v3 + constrain v4 == v0 + v5, v6 = call f1(v0) -> (u32, u32) + v7 = mul v5, v5 + constrain v7 == v0 + return + } + + brillig(inline) fn factor f1 { + b0(v0: u32): + return u32 0, u32 0 + } + "#; + + let mut ssa = Ssa::from_str(program).unwrap(); + let ssa_level_warnings = ssa.check_for_missing_brillig_constraints(); + assert_eq!(ssa_level_warnings.len(), 1); + } + + #[test] + #[traced_test] + /// Test where a Brillig function is called with a constant argument + /// (should _not_ lead to a false positive failed check + /// if all the results are constrained) + fn test_checked_brillig_with_constant_arguments() { + // The call is constrained properly, involving both results + // (but the argument to the Brillig is a constant) + // The Brillig function is fake, for simplicity's sake + + let program = r#" + acir(inline) fn main f0 { + b0(v0: u32): + v3, v4 = call f1(Field 7) -> (u32, u32) + v5 = mul v3, v4 + constrain v5 == v0 + return + } + + brillig(inline) fn factor f1 { + b0(v0: Field): + return u32 0, u32 0 + } + "#; + + let mut ssa = Ssa::from_str(program).unwrap(); + let ssa_level_warnings = ssa.check_for_missing_brillig_constraints(); + assert_eq!(ssa_level_warnings.len(), 0); + } + + #[test] + #[traced_test] + /// Test where a Brillig function call is constrained with a range check + /// (should _not_ lead to a false positive failed check) + fn test_range_checked_brillig() { + // The call is constrained properly with a range check, involving + // both Brillig call argument and result + // The Brillig function is fake, for simplicity's sake + + let program = r#" + acir(inline) fn main f0 { + b0(v0: u32): + v2 = call f1(v0) -> u32 + v3 = add v2, v0 + range_check v3 to 32 bits + return + } + + brillig(inline) fn dummy f1 { + b0(v0: u32): + return u32 0 + } + "#; + + let mut ssa = Ssa::from_str(program).unwrap(); + let ssa_level_warnings = ssa.check_for_missing_brillig_constraints(); + assert_eq!(ssa_level_warnings.len(), 0); + } + + #[test] + #[traced_test] + /// Test where a Brillig nested type result is insufficiently constrained + /// (with a field constraint missing) + fn test_nested_type_result_brillig() { + /* + struct Animal { + legs: Field, + eyes: u8, + tag: Tag, + } + + struct Tag { + no: Field, + } + + unconstrained fn foo(bar: Field) -> Animal { + Animal { + legs: 4, + eyes: 2, + tag: Tag { no: bar } + } + } + + fn main(x: Field) -> pub Animal { + let dog = foo(x); + assert(dog.legs == 4); + assert(dog.tag.no == x); + + dog + } + */ + + let program = r#" + acir(inline) fn main f0 { + b0(v0: Field): + v2, v3, v4 = call f1(v0) -> (Field, u8, Field) + v6 = eq v2, Field 4 + constrain v2 == Field 4 + v10 = eq v4, v0 + constrain v4 == v0 + return v2, v3, v4 + } + + brillig(inline) fn foo f1 { + b0(v0: Field): + return Field 4, u8 2, v0 + } + "#; + + let mut ssa = Ssa::from_str(program).unwrap(); + let ssa_level_warnings = ssa.check_for_missing_brillig_constraints(); + assert_eq!(ssa_level_warnings.len(), 1); + } + + #[test] + #[traced_test] + /// Test where Brillig calls' root result values are constrained against + /// each other (covers a false negative edge case) + /// (https://github.com/noir-lang/noir/pull/6658#pullrequestreview-2482170066) + fn test_root_result_intersection_false_negative() { + let program = r#" + acir(inline) fn main f0 { + b0(v0: Field, v1: Field): + v3 = call f1(v0, v1) -> Field + v5 = call f1(v0, v1) -> Field + v6 = eq v3, v5 + constrain v3 == v5 + v8 = add v3, v5 + return v8 + } + + brillig(inline) fn foo f1 { + b0(v0: Field, v1: Field): + v2 = add v0, v1 + return v2 + } + "#; + + let mut ssa = Ssa::from_str(program).unwrap(); + let ssa_level_warnings = ssa.check_for_missing_brillig_constraints(); + assert_eq!(ssa_level_warnings.len(), 2); + } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs index bd2585a3bfa..1d18683ee9e 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs @@ -1,6 +1,10 @@ use std::{collections::BTreeMap, sync::Arc}; -use crate::ssa::ir::{function::RuntimeType, types::Type, value::ValueId}; +use crate::ssa::ir::{ + function::RuntimeType, + types::{NumericType, Type}, + value::ValueId, +}; use acvm::FieldElement; use fxhash::FxHashMap as HashMap; use noirc_frontend::ast; @@ -115,7 +119,7 @@ impl FunctionBuilder { /// Insert a value into a data bus builder fn add_to_data_bus(&mut self, value: ValueId, databus: &mut DataBusBuilder) { assert!(databus.databus.is_none(), "initializing finalized call data"); - let typ = self.current_function.dfg[value].get_type().clone(); + let typ = self.current_function.dfg[value].get_type().into_owned(); match typ { Type::Numeric(_) => { databus.values.push_back(value); @@ -128,10 +132,10 @@ impl FunctionBuilder { for _i in 0..len { for subitem_typ in typ.iter() { // load each element of the array, and add it to the databus - let index_var = self - .current_function - .dfg - .make_constant(FieldElement::from(index as i128), Type::length_type()); + let length_type = NumericType::length_type(); + let index_var = FieldElement::from(index as i128); + let index_var = + self.current_function.dfg.make_constant(index_var, length_type); let element = self.insert_array_get(value, index_var, subitem_typ.clone()); index += match subitem_typ { Type::Array(_, _) | Type::Slice(_) => subitem_typ.element_size(), diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs index 0ae61404442..fe654912ad3 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs @@ -21,6 +21,7 @@ use super::{ dfg::{CallStack, InsertInstructionResult}, function::RuntimeType, instruction::{ConstrainError, InstructionId, Intrinsic}, + types::NumericType, }, ssa_gen::Ssa, }; @@ -122,19 +123,19 @@ impl FunctionBuilder { pub(crate) fn numeric_constant( &mut self, value: impl Into, - typ: Type, + typ: NumericType, ) -> ValueId { self.current_function.dfg.make_constant(value.into(), typ) } /// Insert a numeric constant into the current function of type Field pub(crate) fn field_constant(&mut self, value: impl Into) -> ValueId { - self.numeric_constant(value.into(), Type::field()) + self.numeric_constant(value.into(), NumericType::NativeField) } /// Insert a numeric constant into the current function of type Type::length_type() pub(crate) fn length_constant(&mut self, value: impl Into) -> ValueId { - self.numeric_constant(value.into(), Type::length_type()) + self.numeric_constant(value.into(), NumericType::length_type()) } /// Returns the type of the given value. @@ -195,7 +196,7 @@ impl FunctionBuilder { } pub(crate) fn set_location(&mut self, location: Location) -> &mut FunctionBuilder { - self.call_stack = im::Vector::unit(location); + self.call_stack = CallStack::unit(location); self } @@ -251,7 +252,7 @@ impl FunctionBuilder { /// Insert a cast instruction at the end of the current block. /// Returns the result of the cast instruction. - pub(crate) fn insert_cast(&mut self, value: ValueId, typ: Type) -> ValueId { + pub(crate) fn insert_cast(&mut self, value: ValueId, typ: NumericType) -> ValueId { self.insert_instruction(Instruction::Cast(value, typ), None).first() } @@ -526,7 +527,7 @@ mod tests { use crate::ssa::ir::{ instruction::{Endian, Intrinsic}, map::Id, - types::Type, + types::{NumericType, Type}, }; use super::FunctionBuilder; @@ -538,12 +539,12 @@ mod tests { // let bits: [u1; 8] = x.to_le_bits(); let func_id = Id::test_new(0); let mut builder = FunctionBuilder::new("func".into(), func_id); - let one = builder.numeric_constant(FieldElement::one(), Type::bool()); - let zero = builder.numeric_constant(FieldElement::zero(), Type::bool()); + let one = builder.numeric_constant(FieldElement::one(), NumericType::bool()); + let zero = builder.numeric_constant(FieldElement::zero(), NumericType::bool()); let to_bits_id = builder.import_intrinsic_id(Intrinsic::ToBits(Endian::Little)); - let input = builder.numeric_constant(FieldElement::from(7_u128), Type::field()); - let length = builder.numeric_constant(FieldElement::from(8_u128), Type::field()); + let input = builder.field_constant(FieldElement::from(7_u128)); + let length = builder.field_constant(FieldElement::from(8_u128)); let result_types = vec![Type::Array(Arc::new(vec![Type::bool()]), 8)]; let call_results = builder.insert_call(to_bits_id, vec![input, length], result_types).into_owned(); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/cfg.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/cfg.rs index 38e6efa5b9a..2268e6b2191 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/cfg.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/cfg.rs @@ -231,7 +231,7 @@ mod tests { func.dfg[block2_id].set_terminator(TerminatorInstruction::Jmp { destination: ret_block_id, arguments: vec![], - call_stack: im::Vector::new(), + call_stack: CallStack::new(), }); func.dfg[block0_id].set_terminator(TerminatorInstruction::JmpIf { condition: cond, diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/dfg.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/dfg.rs index 827944e22d1..7546cba19d8 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/dfg.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/dfg.rs @@ -9,7 +9,7 @@ use super::{ Instruction, InstructionId, InstructionResultType, Intrinsic, TerminatorInstruction, }, map::DenseMap, - types::Type, + types::{NumericType, Type}, value::{Value, ValueId}, }; @@ -50,7 +50,7 @@ pub(crate) struct DataFlowGraph { /// Each constant is unique, attempting to insert the same constant /// twice will return the same ValueId. #[serde(skip)] - constants: HashMap<(FieldElement, Type), ValueId>, + constants: HashMap<(FieldElement, NumericType), ValueId>, /// Contains each function that has been imported into the current function. /// A unique `ValueId` for each function's [`Value::Function`] is stored so any given FunctionId @@ -97,7 +97,7 @@ pub(crate) struct DataFlowGraph { pub(crate) data_bus: DataBus, } -pub(crate) type CallStack = im::Vector; +pub(crate) type CallStack = super::list::List; impl DataFlowGraph { /// Creates a new basic block with no parameters. @@ -119,7 +119,7 @@ impl DataFlowGraph { let parameters = self.blocks[block].parameters(); let parameters = vecmap(parameters.iter().enumerate(), |(position, param)| { - let typ = self.values[*param].get_type().clone(); + let typ = self.values[*param].get_type().into_owned(); self.values.insert(Value::Param { block: new_block, position, typ }) }); @@ -233,11 +233,12 @@ impl DataFlowGraph { pub(crate) fn set_type_of_value(&mut self, value_id: ValueId, target_type: Type) { let value = &mut self.values[value_id]; match value { - Value::Instruction { typ, .. } - | Value::Param { typ, .. } - | Value::NumericConstant { typ, .. } => { + Value::Instruction { typ, .. } | Value::Param { typ, .. } => { *typ = target_type; } + Value::NumericConstant { typ, .. } => { + *typ = target_type.unwrap_numeric(); + } _ => { unreachable!("ICE: Cannot set type of {:?}", value); } @@ -257,11 +258,11 @@ impl DataFlowGraph { /// Creates a new constant value, or returns the Id to an existing one if /// one already exists. - pub(crate) fn make_constant(&mut self, constant: FieldElement, typ: Type) -> ValueId { - if let Some(id) = self.constants.get(&(constant, typ.clone())) { + pub(crate) fn make_constant(&mut self, constant: FieldElement, typ: NumericType) -> ValueId { + if let Some(id) = self.constants.get(&(constant, typ)) { return *id; } - let id = self.values.insert(Value::NumericConstant { constant, typ: typ.clone() }); + let id = self.values.insert(Value::NumericConstant { constant, typ }); self.constants.insert((constant, typ), id); id } @@ -342,7 +343,7 @@ impl DataFlowGraph { /// Returns the type of a given value pub(crate) fn type_of_value(&self, value: ValueId) -> Type { - self.values[value].get_type().clone() + self.values[value].get_type().into_owned() } /// Returns the maximum possible number of bits that `value` can potentially be. @@ -367,7 +368,7 @@ impl DataFlowGraph { /// True if the type of this value is Type::Reference. /// Using this method over type_of_value avoids cloning the value's type. pub(crate) fn value_is_reference(&self, value: ValueId) -> bool { - matches!(self.values[value].get_type(), Type::Reference(_)) + matches!(self.values[value].get_type().as_ref(), Type::Reference(_)) } /// Replaces an instruction result with a fresh id. @@ -425,9 +426,9 @@ impl DataFlowGraph { pub(crate) fn get_numeric_constant_with_type( &self, value: ValueId, - ) -> Option<(FieldElement, Type)> { + ) -> Option<(FieldElement, NumericType)> { match &self.values[self.resolve(value)] { - Value::NumericConstant { constant, typ } => Some((*constant, typ.clone())), + Value::NumericConstant { constant, typ } => Some((*constant, *typ)), _ => None, } } @@ -496,7 +497,7 @@ impl DataFlowGraph { pub(crate) fn get_value_call_stack(&self, value: ValueId) -> CallStack { match &self.values[self.resolve(value)] { Value::Instruction { instruction, .. } => self.get_call_stack(*instruction), - _ => im::Vector::new(), + _ => CallStack::new(), } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs index 76409f6a20a..fb35978d906 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs @@ -1,4 +1,3 @@ -use noirc_errors::Location; use serde::{Deserialize, Serialize}; use std::hash::{Hash, Hasher}; @@ -64,6 +63,7 @@ pub(crate) enum Intrinsic { ToBits(Endian), ToRadix(Endian), BlackBox(BlackBoxFunc), + Hint(Hint), FromField, AsField, AsWitness, @@ -95,6 +95,7 @@ impl std::fmt::Display for Intrinsic { Intrinsic::ToRadix(Endian::Big) => write!(f, "to_be_radix"), Intrinsic::ToRadix(Endian::Little) => write!(f, "to_le_radix"), Intrinsic::BlackBox(function) => write!(f, "{function}"), + Intrinsic::Hint(Hint::BlackBox) => write!(f, "black_box"), Intrinsic::FromField => write!(f, "from_field"), Intrinsic::AsField => write!(f, "as_field"), Intrinsic::AsWitness => write!(f, "as_witness"), @@ -144,6 +145,9 @@ impl Intrinsic { | Intrinsic::DerivePedersenGenerators | Intrinsic::FieldLessThan => false, + // Treat the black_box hint as-if it could potentially have side effects. + Intrinsic::Hint(Hint::BlackBox) => true, + // Some black box functions have side-effects Intrinsic::BlackBox(func) => matches!( func, @@ -214,6 +218,7 @@ impl Intrinsic { "is_unconstrained" => Some(Intrinsic::IsUnconstrained), "derive_pedersen_generators" => Some(Intrinsic::DerivePedersenGenerators), "field_less_than" => Some(Intrinsic::FieldLessThan), + "black_box" => Some(Intrinsic::Hint(Hint::BlackBox)), "array_refcount" => Some(Intrinsic::ArrayRefCount), "slice_refcount" => Some(Intrinsic::SliceRefCount), @@ -229,6 +234,16 @@ pub(crate) enum Endian { Little, } +/// Compiler hints. +#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Serialize, Deserialize)] +pub(crate) enum Hint { + /// Hint to the compiler to treat the call as having potential side effects, + /// so that the value passed to it can survive SSA passes without being + /// simplified out completely. This facilitates testing and reproducing + /// runtime behavior with constants. + BlackBox, +} + #[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize)] /// Instructions are used to perform tasks. /// The instructions that the IR is able to specify are listed below. @@ -236,8 +251,8 @@ pub(crate) enum Instruction { /// Binary Operations like +, -, *, /, ==, != Binary(Binary), - /// Converts `Value` into Typ - Cast(ValueId, Type), + /// Converts `Value` into the given NumericType + Cast(ValueId, NumericType), /// Computes a bit wise not Not(ValueId), @@ -339,9 +354,8 @@ impl Instruction { pub(crate) fn result_type(&self) -> InstructionResultType { match self { Instruction::Binary(binary) => binary.result_type(), - Instruction::Cast(_, typ) | Instruction::MakeArray { typ, .. } => { - InstructionResultType::Known(typ.clone()) - } + Instruction::Cast(_, typ) => InstructionResultType::Known(Type::Numeric(*typ)), + Instruction::MakeArray { typ, .. } => InstructionResultType::Known(typ.clone()), Instruction::Not(value) | Instruction::Truncate { value, .. } | Instruction::ArraySet { array: value, .. } @@ -587,7 +601,7 @@ impl Instruction { rhs: f(binary.rhs), operator: binary.operator, }), - Instruction::Cast(value, typ) => Instruction::Cast(f(*value), typ.clone()), + Instruction::Cast(value, typ) => Instruction::Cast(f(*value), *typ), Instruction::Not(value) => Instruction::Not(f(*value)), Instruction::Truncate { value, bit_size, max_bit_size } => Instruction::Truncate { value: f(*value), @@ -735,7 +749,7 @@ impl Instruction { use SimplifyResult::*; match self { Instruction::Binary(binary) => binary.simplify(dfg), - Instruction::Cast(value, typ) => simplify_cast(*value, typ, dfg), + Instruction::Cast(value, typ) => simplify_cast(*value, *typ, dfg), Instruction::Not(value) => { match &dfg[dfg.resolve(*value)] { // Limit optimizing ! on constants to only booleans. If we tried it on fields, @@ -744,7 +758,7 @@ impl Instruction { Value::NumericConstant { constant, typ } if typ.is_unsigned() => { // As we're casting to a `u128`, we need to clear out any upper bits that the NOT fills. let value = !constant.to_u128() % (1 << typ.bit_size()); - SimplifiedTo(dfg.make_constant(value.into(), typ.clone())) + SimplifiedTo(dfg.make_constant(value.into(), *typ)) } Value::Instruction { instruction, .. } => { // !!v => v @@ -1233,7 +1247,7 @@ impl TerminatorInstruction { } } - pub(crate) fn call_stack(&self) -> im::Vector { + pub(crate) fn call_stack(&self) -> CallStack { match self { TerminatorInstruction::JmpIf { call_stack, .. } | TerminatorInstruction::Jmp { call_stack, .. } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs index 487370488b9..81f2f3b1e01 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/binary.rs @@ -88,7 +88,7 @@ impl Binary { pub(super) fn simplify(&self, dfg: &mut DataFlowGraph) -> SimplifyResult { let lhs = dfg.get_numeric_constant(self.lhs); let rhs = dfg.get_numeric_constant(self.rhs); - let operand_type = dfg.type_of_value(self.lhs); + let operand_type = dfg.type_of_value(self.lhs).unwrap_numeric(); if let (Some(lhs), Some(rhs)) = (lhs, rhs) { return match eval_constant_binary_op(lhs, rhs, self.operator, operand_type) { @@ -168,11 +168,11 @@ impl Binary { } BinaryOp::Eq => { if dfg.resolve(self.lhs) == dfg.resolve(self.rhs) { - let one = dfg.make_constant(FieldElement::one(), Type::bool()); + let one = dfg.make_constant(FieldElement::one(), NumericType::bool()); return SimplifyResult::SimplifiedTo(one); } - if operand_type == Type::bool() { + if operand_type == NumericType::bool() { // Simplify forms of `(boolean == true)` into `boolean` if lhs_is_one { return SimplifyResult::SimplifiedTo(self.rhs); @@ -191,13 +191,13 @@ impl Binary { } BinaryOp::Lt => { if dfg.resolve(self.lhs) == dfg.resolve(self.rhs) { - let zero = dfg.make_constant(FieldElement::zero(), Type::bool()); + let zero = dfg.make_constant(FieldElement::zero(), NumericType::bool()); return SimplifyResult::SimplifiedTo(zero); } if operand_type.is_unsigned() { if rhs_is_zero { // Unsigned values cannot be less than zero. - let zero = dfg.make_constant(FieldElement::zero(), Type::bool()); + let zero = dfg.make_constant(FieldElement::zero(), NumericType::bool()); return SimplifyResult::SimplifiedTo(zero); } else if rhs_is_one { let zero = dfg.make_constant(FieldElement::zero(), operand_type); @@ -217,7 +217,7 @@ impl Binary { if dfg.resolve(self.lhs) == dfg.resolve(self.rhs) { return SimplifyResult::SimplifiedTo(self.lhs); } - if operand_type == Type::bool() { + if operand_type == NumericType::bool() { // Boolean AND is equivalent to multiplication, which is a cheaper operation. let instruction = Instruction::binary(BinaryOp::Mul, self.lhs, self.rhs); return SimplifyResult::SimplifiedToInstruction(instruction); @@ -256,6 +256,10 @@ impl Binary { if rhs_is_zero { return SimplifyResult::SimplifiedTo(self.lhs); } + if operand_type == NumericType::bool() && (lhs_is_one || rhs_is_one) { + let one = dfg.make_constant(FieldElement::one(), operand_type); + return SimplifyResult::SimplifiedTo(one); + } if dfg.resolve(self.lhs) == dfg.resolve(self.rhs) { return SimplifyResult::SimplifiedTo(self.lhs); } @@ -294,10 +298,10 @@ fn eval_constant_binary_op( lhs: FieldElement, rhs: FieldElement, operator: BinaryOp, - mut operand_type: Type, -) -> Option<(FieldElement, Type)> { - let value = match &operand_type { - Type::Numeric(NumericType::NativeField) => { + mut operand_type: NumericType, +) -> Option<(FieldElement, NumericType)> { + let value = match operand_type { + NumericType::NativeField => { // If the rhs of a division is zero, attempting to evaluate the division will cause a compiler panic. // Thus, we do not evaluate the division in this method, as we want to avoid triggering a panic, // and the operation should be handled by ACIR generation. @@ -306,11 +310,11 @@ fn eval_constant_binary_op( } operator.get_field_function()?(lhs, rhs) } - Type::Numeric(NumericType::Unsigned { bit_size }) => { + NumericType::Unsigned { bit_size } => { let function = operator.get_u128_function(); - let lhs = truncate(lhs.try_into_u128()?, *bit_size); - let rhs = truncate(rhs.try_into_u128()?, *bit_size); + let lhs = truncate(lhs.try_into_u128()?, bit_size); + let rhs = truncate(rhs.try_into_u128()?, bit_size); // The divisor is being truncated into the type of the operand, which can potentially // lead to the rhs being zero. @@ -322,16 +326,16 @@ fn eval_constant_binary_op( } let result = function(lhs, rhs)?; // Check for overflow - if result >= 1 << *bit_size { + if result >= 1 << bit_size { return None; } result.into() } - Type::Numeric(NumericType::Signed { bit_size }) => { + NumericType::Signed { bit_size } => { let function = operator.get_i128_function(); - let lhs = try_convert_field_element_to_signed_integer(lhs, *bit_size)?; - let rhs = try_convert_field_element_to_signed_integer(rhs, *bit_size)?; + let lhs = try_convert_field_element_to_signed_integer(lhs, bit_size)?; + let rhs = try_convert_field_element_to_signed_integer(rhs, bit_size)?; // The divisor is being truncated into the type of the operand, which can potentially // lead to the rhs being zero. // If the rhs of a division is zero, attempting to evaluate the division will cause a compiler panic. @@ -343,17 +347,16 @@ fn eval_constant_binary_op( let result = function(lhs, rhs)?; // Check for overflow - let two_pow_bit_size_minus_one = 1i128 << (*bit_size - 1); + let two_pow_bit_size_minus_one = 1i128 << (bit_size - 1); if result >= two_pow_bit_size_minus_one || result < -two_pow_bit_size_minus_one { return None; } - convert_signed_integer_to_field_element(result, *bit_size) + convert_signed_integer_to_field_element(result, bit_size) } - _ => return None, }; if matches!(operator, BinaryOp::Eq | BinaryOp::Lt) { - operand_type = Type::bool(); + operand_type = NumericType::bool(); } Some((value, operand_type)) diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs index a8db5e2ff94..02be0910a13 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -15,13 +15,13 @@ use crate::ssa::{ dfg::{CallStack, DataFlowGraph}, instruction::Intrinsic, map::Id, - types::Type, + types::{NumericType, Type}, value::{Value, ValueId}, }, opt::flatten_cfg::value_merger::ValueMerger, }; -use super::{Binary, BinaryOp, Endian, Instruction, SimplifyResult}; +use super::{Binary, BinaryOp, Endian, Hint, Instruction, SimplifyResult}; mod blackbox; @@ -61,7 +61,13 @@ pub(super) fn simplify_call( unreachable!("ICE: Intrinsic::ToRadix return type must be array") }; constant_to_radix(endian, field, 2, limb_count, |values| { - make_constant_array(dfg, values.into_iter(), Type::bool(), block, call_stack) + make_constant_array( + dfg, + values.into_iter(), + NumericType::bool(), + block, + call_stack, + ) }) } else { SimplifyResult::None @@ -81,7 +87,7 @@ pub(super) fn simplify_call( make_constant_array( dfg, values.into_iter(), - Type::unsigned(8), + NumericType::Unsigned { bit_size: 8 }, block, call_stack, ) @@ -93,7 +99,7 @@ pub(super) fn simplify_call( Intrinsic::ArrayLen => { if let Some(length) = dfg.try_get_array_length(arguments[0]) { let length = FieldElement::from(length as u128); - SimplifyResult::SimplifiedTo(dfg.make_constant(length, Type::length_type())) + SimplifyResult::SimplifiedTo(dfg.make_constant(length, NumericType::length_type())) } else if matches!(dfg.type_of_value(arguments[1]), Type::Slice(_)) { SimplifyResult::SimplifiedTo(arguments[0]) } else { @@ -116,7 +122,7 @@ pub(super) fn simplify_call( ); let slice_length_value = array.len() / elements_size; let slice_length = - dfg.make_constant(slice_length_value.into(), Type::length_type()); + dfg.make_constant(slice_length_value.into(), NumericType::length_type()); let new_slice = make_array(dfg, array, Type::Slice(inner_element_types), block, call_stack); SimplifyResult::SimplifiedToMultiple(vec![slice_length, new_slice]) @@ -326,14 +332,12 @@ pub(super) fn simplify_call( SimplifyResult::None } } + Intrinsic::Hint(Hint::BlackBox) => SimplifyResult::None, Intrinsic::BlackBox(bb_func) => { simplify_black_box_func(bb_func, arguments, dfg, block, call_stack) } Intrinsic::AsField => { - let instruction = Instruction::Cast( - arguments[0], - Type::Numeric(crate::ssa::ir::types::NumericType::NativeField), - ); + let instruction = Instruction::Cast(arguments[0], NumericType::NativeField); SimplifyResult::SimplifiedToInstruction(instruction) } Intrinsic::FromField => { @@ -354,7 +358,7 @@ pub(super) fn simplify_call( ) .first(); - let instruction = Instruction::Cast(truncated_value, target_type); + let instruction = Instruction::Cast(truncated_value, target_type.unwrap_numeric()); SimplifyResult::SimplifiedToInstruction(instruction) } Intrinsic::AsWitness => SimplifyResult::None, @@ -370,7 +374,7 @@ pub(super) fn simplify_call( if let Some(constants) = constant_args { let lhs = constants[0]; let rhs = constants[1]; - let result = dfg.make_constant((lhs < rhs).into(), Type::bool()); + let result = dfg.make_constant((lhs < rhs).into(), NumericType::bool()); SimplifyResult::SimplifiedTo(result) } else { SimplifyResult::None @@ -406,7 +410,7 @@ fn update_slice_length( operator: BinaryOp, block: BasicBlockId, ) -> ValueId { - let one = dfg.make_constant(FieldElement::one(), Type::length_type()); + let one = dfg.make_constant(FieldElement::one(), NumericType::length_type()); let instruction = Instruction::Binary(Binary { lhs: slice_len, operator, rhs: one }); let call_stack = dfg.get_value_call_stack(slice_len); dfg.insert_instruction_and_results(instruction, block, None, call_stack).first() @@ -421,7 +425,7 @@ fn simplify_slice_push_back( call_stack: CallStack, ) -> SimplifyResult { // The capacity must be an integer so that we can compare it against the slice length - let capacity = dfg.make_constant((slice.len() as u128).into(), Type::length_type()); + let capacity = dfg.make_constant((slice.len() as u128).into(), NumericType::length_type()); let len_equals_capacity_instr = Instruction::Binary(Binary { lhs: arguments[0], operator: BinaryOp::Eq, rhs: capacity }); let len_equals_capacity = dfg @@ -476,25 +480,20 @@ fn simplify_slice_push_back( } fn simplify_slice_pop_back( - element_type: Type, + slice_type: Type, arguments: &[ValueId], dfg: &mut DataFlowGraph, block: BasicBlockId, call_stack: CallStack, ) -> SimplifyResult { - let element_types = match element_type.clone() { - Type::Slice(element_types) | Type::Array(element_types, _) => element_types, - _ => { - unreachable!("ICE: Expected slice or array, but got {element_type}"); - } - }; - - let element_count = element_type.element_size(); + let element_types = slice_type.element_types(); + let element_count = element_types.len(); let mut results = VecDeque::with_capacity(element_count + 1); let new_slice_length = update_slice_length(arguments[0], dfg, BinaryOp::Sub, block); - let element_size = dfg.make_constant((element_count as u128).into(), Type::length_type()); + let element_size = + dfg.make_constant((element_count as u128).into(), NumericType::length_type()); let flattened_len_instr = Instruction::binary(BinaryOp::Mul, arguments[0], element_size); let mut flattened_len = dfg .insert_instruction_and_results(flattened_len_instr, block, None, call_stack.clone()) @@ -502,14 +501,17 @@ fn simplify_slice_pop_back( flattened_len = update_slice_length(flattened_len, dfg, BinaryOp::Sub, block); // We must pop multiple elements in the case of a slice of tuples - for _ in 0..element_count { + // Iterating through element types in reverse here since we're popping from the end + for element_type in element_types.iter().rev() { let get_last_elem_instr = Instruction::ArrayGet { array: arguments[1], index: flattened_len }; + + let element_type = Some(vec![element_type.clone()]); let get_last_elem = dfg .insert_instruction_and_results( get_last_elem_instr, block, - Some(element_types.to_vec()), + element_type, call_stack.clone(), ) .first(); @@ -568,7 +570,7 @@ fn simplify_black_box_func( let result_array = make_constant_array( dfg, state_values, - Type::unsigned(64), + NumericType::Unsigned { bit_size: 64 }, block, call_stack, ); @@ -628,14 +630,14 @@ fn simplify_black_box_func( fn make_constant_array( dfg: &mut DataFlowGraph, results: impl Iterator, - typ: Type, + typ: NumericType, block: BasicBlockId, call_stack: &CallStack, ) -> ValueId { let result_constants: im::Vector<_> = - results.map(|element| dfg.make_constant(element, typ.clone())).collect(); + results.map(|element| dfg.make_constant(element, typ)).collect(); - let typ = Type::Array(Arc::new(vec![typ]), result_constants.len() as u32); + let typ = Type::Array(Arc::new(vec![Type::Numeric(typ)]), result_constants.len() as u32); make_array(dfg, result_constants, typ, block, call_stack) } @@ -651,23 +653,6 @@ fn make_array( dfg.insert_instruction_and_results(instruction, block, None, call_stack).first() } -fn make_constant_slice( - dfg: &mut DataFlowGraph, - results: Vec, - typ: Type, - block: BasicBlockId, - call_stack: &CallStack, -) -> (ValueId, ValueId) { - let result_constants = vecmap(results, |element| dfg.make_constant(element, typ.clone())); - - let typ = Type::Slice(Arc::new(vec![typ])); - let length = FieldElement::from(result_constants.len() as u128); - let length = dfg.make_constant(length, Type::length_type()); - - let slice = make_array(dfg, result_constants.into(), typ, block, call_stack); - (length, slice) -} - /// Returns a slice (represented by a tuple (len, slice)) of constants corresponding to the limbs of the radix decomposition. fn constant_to_radix( endian: Endian, @@ -732,8 +717,8 @@ fn simplify_hash( let hash_values = hash.iter().map(|byte| FieldElement::from_be_bytes_reduce(&[*byte])); - let result_array = - make_constant_array(dfg, hash_values, Type::unsigned(8), block, call_stack); + let u8_type = NumericType::Unsigned { bit_size: 8 }; + let result_array = make_constant_array(dfg, hash_values, u8_type, block, call_stack); SimplifyResult::SimplifiedTo(result_array) } _ => SimplifyResult::None, @@ -781,7 +766,7 @@ fn simplify_signature( signature_verifier(&hashed_message, &public_key_x, &public_key_y, &signature) .expect("Rust solvable black box function should not fail"); - let valid_signature = dfg.make_constant(valid_signature.into(), Type::bool()); + let valid_signature = dfg.make_constant(valid_signature.into(), NumericType::bool()); SimplifyResult::SimplifiedTo(valid_signature) } _ => SimplifyResult::None, @@ -811,15 +796,15 @@ fn simplify_derive_generators( num_generators, starting_index.try_to_u32().expect("argument is declared as u32"), ); - let is_infinite = dfg.make_constant(FieldElement::zero(), Type::bool()); + let is_infinite = dfg.make_constant(FieldElement::zero(), NumericType::bool()); let mut results = Vec::new(); for gen in generators { let x_big: BigUint = gen.x.into(); let x = FieldElement::from_be_bytes_reduce(&x_big.to_bytes_be()); let y_big: BigUint = gen.y.into(); let y = FieldElement::from_be_bytes_reduce(&y_big.to_bytes_be()); - results.push(dfg.make_constant(x, Type::field())); - results.push(dfg.make_constant(y, Type::field())); + results.push(dfg.make_constant(x, NumericType::NativeField)); + results.push(dfg.make_constant(y, NumericType::NativeField)); results.push(is_infinite); } let len = results.len() as u32; diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call/blackbox.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call/blackbox.rs index b9faf1c46ec..a5de98cec7f 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call/blackbox.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/call/blackbox.rs @@ -2,10 +2,12 @@ use std::sync::Arc; use acvm::{acir::AcirField, BlackBoxFunctionSolver, BlackBoxResolutionError, FieldElement}; +use crate::ssa::ir::instruction::BlackBoxFunc; +use crate::ssa::ir::types::NumericType; use crate::ssa::ir::{ basic_block::BasicBlockId, dfg::{CallStack, DataFlowGraph}, - instruction::{Instruction, SimplifyResult}, + instruction::{Instruction, Intrinsic, SimplifyResult}, types::Type, value::ValueId, }; @@ -46,9 +48,10 @@ pub(super) fn simplify_ec_add( return SimplifyResult::None; }; - let result_x = dfg.make_constant(result_x, Type::field()); - let result_y = dfg.make_constant(result_y, Type::field()); - let result_is_infinity = dfg.make_constant(result_is_infinity, Type::field()); + let result_x = dfg.make_constant(result_x, NumericType::NativeField); + let result_y = dfg.make_constant(result_y, NumericType::NativeField); + let result_is_infinity = + dfg.make_constant(result_is_infinity, NumericType::NativeField); let typ = Type::Array(Arc::new(vec![Type::field()]), 3); @@ -70,52 +73,130 @@ pub(super) fn simplify_msm( block: BasicBlockId, call_stack: &CallStack, ) -> SimplifyResult { - // TODO: Handle MSMs where a subset of the terms are constant. + let mut is_constant; + match (dfg.get_array_constant(arguments[0]), dfg.get_array_constant(arguments[1])) { (Some((points, _)), Some((scalars, _))) => { - let Some(points) = points - .into_iter() - .map(|id| dfg.get_numeric_constant(id)) - .collect::>>() - else { - return SimplifyResult::None; - }; - - let Some(scalars) = scalars - .into_iter() - .map(|id| dfg.get_numeric_constant(id)) - .collect::>>() - else { - return SimplifyResult::None; - }; + // We decompose points and scalars into constant and non-constant parts in order to simplify MSMs where a subset of the terms are constant. + let mut constant_points = vec![]; + let mut constant_scalars_lo = vec![]; + let mut constant_scalars_hi = vec![]; + let mut var_points = vec![]; + let mut var_scalars = vec![]; + let len = scalars.len() / 2; + for i in 0..len { + match ( + dfg.get_numeric_constant(scalars[2 * i]), + dfg.get_numeric_constant(scalars[2 * i + 1]), + dfg.get_numeric_constant(points[3 * i]), + dfg.get_numeric_constant(points[3 * i + 1]), + dfg.get_numeric_constant(points[3 * i + 2]), + ) { + (Some(lo), Some(hi), _, _, _) if lo.is_zero() && hi.is_zero() => { + is_constant = true; + constant_scalars_lo.push(lo); + constant_scalars_hi.push(hi); + constant_points.push(FieldElement::zero()); + constant_points.push(FieldElement::zero()); + constant_points.push(FieldElement::one()); + } + (_, _, _, _, Some(infinity)) if infinity.is_one() => { + is_constant = true; + constant_scalars_lo.push(FieldElement::zero()); + constant_scalars_hi.push(FieldElement::zero()); + constant_points.push(FieldElement::zero()); + constant_points.push(FieldElement::zero()); + constant_points.push(FieldElement::one()); + } + (Some(lo), Some(hi), Some(x), Some(y), Some(infinity)) => { + is_constant = true; + constant_scalars_lo.push(lo); + constant_scalars_hi.push(hi); + constant_points.push(x); + constant_points.push(y); + constant_points.push(infinity); + } + _ => { + is_constant = false; + } + } - let mut scalars_lo = Vec::new(); - let mut scalars_hi = Vec::new(); - for (i, scalar) in scalars.into_iter().enumerate() { - if i % 2 == 0 { - scalars_lo.push(scalar); - } else { - scalars_hi.push(scalar); + if !is_constant { + var_points.push(points[3 * i]); + var_points.push(points[3 * i + 1]); + var_points.push(points[3 * i + 2]); + var_scalars.push(scalars[2 * i]); + var_scalars.push(scalars[2 * i + 1]); } } - let Ok((result_x, result_y, result_is_infinity)) = - solver.multi_scalar_mul(&points, &scalars_lo, &scalars_hi) - else { + // If there are no constant terms, we can't simplify + if constant_scalars_lo.is_empty() { + return SimplifyResult::None; + } + let Ok((result_x, result_y, result_is_infinity)) = solver.multi_scalar_mul( + &constant_points, + &constant_scalars_lo, + &constant_scalars_hi, + ) else { return SimplifyResult::None; }; - let result_x = dfg.make_constant(result_x, Type::field()); - let result_y = dfg.make_constant(result_y, Type::field()); - let result_is_infinity = dfg.make_constant(result_is_infinity, Type::field()); + // If there are no variable term, we can directly return the constant result + if var_scalars.is_empty() { + let result_x = dfg.make_constant(result_x, NumericType::NativeField); + let result_y = dfg.make_constant(result_y, NumericType::NativeField); + let result_is_infinity = + dfg.make_constant(result_is_infinity, NumericType::NativeField); - let elements = im::vector![result_x, result_y, result_is_infinity]; - let typ = Type::Array(Arc::new(vec![Type::field()]), 3); - let instruction = Instruction::MakeArray { elements, typ }; - let result_array = - dfg.insert_instruction_and_results(instruction, block, None, call_stack.clone()); + let elements = im::vector![result_x, result_y, result_is_infinity]; + let typ = Type::Array(Arc::new(vec![Type::field()]), 3); + let instruction = Instruction::MakeArray { elements, typ }; + let result_array = dfg.insert_instruction_and_results( + instruction, + block, + None, + call_stack.clone(), + ); - SimplifyResult::SimplifiedTo(result_array.first()) + return SimplifyResult::SimplifiedTo(result_array.first()); + } + // If there is only one non-null constant term, we cannot simplify + if constant_scalars_lo.len() == 1 && result_is_infinity != FieldElement::one() { + return SimplifyResult::None; + } + // Add the constant part back to the non-constant part, if it is not null + let one = dfg.make_constant(FieldElement::one(), NumericType::NativeField); + let zero = dfg.make_constant(FieldElement::zero(), NumericType::NativeField); + if result_is_infinity.is_zero() { + var_scalars.push(one); + var_scalars.push(zero); + let result_x = dfg.make_constant(result_x, NumericType::NativeField); + let result_y = dfg.make_constant(result_y, NumericType::NativeField); + + // Pushing a bool here is intentional, multi_scalar_mul takes two arguments: + // `points: [(Field, Field, bool); N]` and `scalars: [(Field, Field); N]`. + let result_is_infinity = dfg.make_constant(result_is_infinity, NumericType::bool()); + + var_points.push(result_x); + var_points.push(result_y); + var_points.push(result_is_infinity); + } + // Construct the simplified MSM expression + let typ = Type::Array(Arc::new(vec![Type::field()]), var_scalars.len() as u32); + let scalars = Instruction::MakeArray { elements: var_scalars.into(), typ }; + let scalars = dfg + .insert_instruction_and_results(scalars, block, None, call_stack.clone()) + .first(); + let typ = Type::Array(Arc::new(vec![Type::field()]), var_points.len() as u32); + let points = Instruction::MakeArray { elements: var_points.into(), typ }; + let points = + dfg.insert_instruction_and_results(points, block, None, call_stack.clone()).first(); + let msm = dfg.import_intrinsic(Intrinsic::BlackBox(BlackBoxFunc::MultiScalarMul)); + SimplifyResult::SimplifiedToInstruction(Instruction::Call { + func: msm, + arguments: vec![points, scalars], + }) } _ => SimplifyResult::None, } @@ -147,7 +228,7 @@ pub(super) fn simplify_poseidon2_permutation( }; let new_state = new_state.into_iter(); - let typ = Type::field(); + let typ = NumericType::NativeField; let result_array = make_constant_array(dfg, new_state, typ, block, call_stack); SimplifyResult::SimplifiedTo(result_array) @@ -172,7 +253,7 @@ pub(super) fn simplify_hash( let hash_values = hash.iter().map(|byte| FieldElement::from_be_bytes_reduce(&[*byte])); - let u8_type = Type::unsigned(8); + let u8_type = NumericType::Unsigned { bit_size: 8 }; let result_array = make_constant_array(dfg, hash_values, u8_type, block, call_stack); SimplifyResult::SimplifiedTo(result_array) } @@ -222,9 +303,99 @@ pub(super) fn simplify_signature( signature_verifier(&hashed_message, &public_key_x, &public_key_y, &signature) .expect("Rust solvable black box function should not fail"); - let valid_signature = dfg.make_constant(valid_signature.into(), Type::bool()); + let valid_signature = dfg.make_constant(valid_signature.into(), NumericType::bool()); SimplifyResult::SimplifiedTo(valid_signature) } _ => SimplifyResult::None, } } + +#[cfg(feature = "bn254")] +#[cfg(test)] +mod test { + use crate::ssa::opt::assert_normalized_ssa_equals; + use crate::ssa::Ssa; + + #[cfg(feature = "bn254")] + #[test] + fn full_constant_folding() { + let src = r#" + acir(inline) fn main f0 { + b0(): + v0 = make_array [Field 2, Field 3, Field 5, Field 5] : [Field; 4] + v1 = make_array [Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0, Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0] : [Field; 6] + v2 = call multi_scalar_mul (v1, v0) -> [Field; 3] + return v2 + }"#; + let ssa = Ssa::from_str(src).unwrap(); + + let expected_src = r#" + acir(inline) fn main f0 { + b0(): + v3 = make_array [Field 2, Field 3, Field 5, Field 5] : [Field; 4] + v7 = make_array [Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0, Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0] : [Field; 6] + v10 = make_array [Field 1478523918288173385110236399861791147958001875200066088686689589556927843200, Field 700144278551281040379388961242974992655630750193306467120985766322057145630, Field 0] : [Field; 3] + return v10 + } + "#; + assert_normalized_ssa_equals(ssa, expected_src); + } + + #[cfg(feature = "bn254")] + #[test] + fn simplify_zero() { + let src = r#" + acir(inline) fn main f0 { + b0(v0: Field, v1: Field): + v2 = make_array [v0, Field 0, Field 0, Field 0, v0, Field 0] : [Field; 6] + v3 = make_array [ + Field 0, Field 0, Field 1, v0, v1, Field 0, Field 1, v0, Field 0] : [Field; 9] + v4 = call multi_scalar_mul (v3, v2) -> [Field; 3] + + return v4 + + }"#; + let ssa = Ssa::from_str(src).unwrap(); + //First point is zero, second scalar is zero, so we should be left with the scalar mul of the last point. + let expected_src = r#" + acir(inline) fn main f0 { + b0(v0: Field, v1: Field): + v3 = make_array [v0, Field 0, Field 0, Field 0, v0, Field 0] : [Field; 6] + v5 = make_array [Field 0, Field 0, Field 1, v0, v1, Field 0, Field 1, v0, Field 0] : [Field; 9] + v6 = make_array [v0, Field 0] : [Field; 2] + v7 = make_array [Field 1, v0, Field 0] : [Field; 3] + v9 = call multi_scalar_mul(v7, v6) -> [Field; 3] + return v9 + } + "#; + assert_normalized_ssa_equals(ssa, expected_src); + } + + #[cfg(feature = "bn254")] + #[test] + fn partial_constant_folding() { + let src = r#" + acir(inline) fn main f0 { + b0(v0: Field, v1: Field): + v2 = make_array [Field 1, Field 0, v0, Field 0, Field 2, Field 0] : [Field; 6] + v3 = make_array [ + Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0, v0, v1, Field 0, Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0] : [Field; 9] + v4 = call multi_scalar_mul (v3, v2) -> [Field; 3] + return v4 + }"#; + let ssa = Ssa::from_str(src).unwrap(); + //First and last scalar/point are constant, so we should be left with the msm of the middle point and the folded constant point + let expected_src = r#" + acir(inline) fn main f0 { + b0(v0: Field, v1: Field): + v5 = make_array [Field 1, Field 0, v0, Field 0, Field 2, Field 0] : [Field; 6] + v7 = make_array [Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0, v0, v1, Field 0, Field 1, Field 17631683881184975370165255887551781615748388533673675138860, Field 0] : [Field; 9] + v8 = make_array [v0, Field 0, Field 1, Field 0] : [Field; 4] + v12 = make_array [v0, v1, Field 0, Field -3227352362257037263902424173275354266044964400219754872043023745437788450996, Field 8902249110305491597038405103722863701255802573786510474664632793109847672620, u1 0] : [Field; 6] + v14 = call multi_scalar_mul(v12, v8) -> [Field; 3] + return v14 + } + "#; + assert_normalized_ssa_equals(ssa, expected_src); + } +} diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/cast.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/cast.rs index ed588def1d7..ee2ab43aa5d 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/cast.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/cast.rs @@ -7,7 +7,7 @@ use super::{DataFlowGraph, Instruction, NumericType, SimplifyResult, Type, Value /// that value is returned. Otherwise None is returned. pub(super) fn simplify_cast( value: ValueId, - dst_typ: &Type, + dst_typ: NumericType, dfg: &mut DataFlowGraph, ) -> SimplifyResult { use SimplifyResult::*; @@ -15,60 +15,55 @@ pub(super) fn simplify_cast( if let Value::Instruction { instruction, .. } = &dfg[value] { if let Instruction::Cast(original_value, _) = &dfg[*instruction] { - return SimplifiedToInstruction(Instruction::Cast(*original_value, dst_typ.clone())); + return SimplifiedToInstruction(Instruction::Cast(*original_value, dst_typ)); } } if let Some(constant) = dfg.get_numeric_constant(value) { - let src_typ = dfg.type_of_value(value); + let src_typ = dfg.type_of_value(value).unwrap_numeric(); match (src_typ, dst_typ) { - (Type::Numeric(NumericType::NativeField), Type::Numeric(NumericType::NativeField)) => { + (NumericType::NativeField, NumericType::NativeField) => { // Field -> Field: use src value SimplifiedTo(value) } ( - Type::Numeric(NumericType::Unsigned { .. } | NumericType::Signed { .. }), - Type::Numeric(NumericType::NativeField), + NumericType::Unsigned { .. } | NumericType::Signed { .. }, + NumericType::NativeField, ) => { // Unsigned/Signed -> Field: redefine same constant as Field - SimplifiedTo(dfg.make_constant(constant, dst_typ.clone())) + SimplifiedTo(dfg.make_constant(constant, dst_typ)) } ( - Type::Numeric( - NumericType::NativeField - | NumericType::Unsigned { .. } - | NumericType::Signed { .. }, - ), - Type::Numeric(NumericType::Unsigned { bit_size }), + NumericType::NativeField + | NumericType::Unsigned { .. } + | NumericType::Signed { .. }, + NumericType::Unsigned { bit_size }, ) => { // Field/Unsigned -> unsigned: truncate - let integer_modulus = BigUint::from(2u128).pow(*bit_size); + let integer_modulus = BigUint::from(2u128).pow(bit_size); let constant: BigUint = BigUint::from_bytes_be(&constant.to_be_bytes()); let truncated = constant % integer_modulus; let truncated = FieldElement::from_be_bytes_reduce(&truncated.to_bytes_be()); - SimplifiedTo(dfg.make_constant(truncated, dst_typ.clone())) + SimplifiedTo(dfg.make_constant(truncated, dst_typ)) } ( - Type::Numeric( - NumericType::NativeField - | NumericType::Unsigned { .. } - | NumericType::Signed { .. }, - ), - Type::Numeric(NumericType::Signed { bit_size }), + NumericType::NativeField + | NumericType::Unsigned { .. } + | NumericType::Signed { .. }, + NumericType::Signed { bit_size }, ) => { // Field/Unsigned -> signed // We only simplify to signed when we are below the maximum signed integer of the destination type. - let integer_modulus = BigUint::from(2u128).pow(*bit_size - 1); + let integer_modulus = BigUint::from(2u128).pow(bit_size - 1); let constant_uint: BigUint = BigUint::from_bytes_be(&constant.to_be_bytes()); if constant_uint < integer_modulus { - SimplifiedTo(dfg.make_constant(constant, dst_typ.clone())) + SimplifiedTo(dfg.make_constant(constant, dst_typ)) } else { None } } - _ => None, } - } else if *dst_typ == dfg.type_of_value(value) { + } else if Type::Numeric(dst_typ) == dfg.type_of_value(value) { SimplifiedTo(value) } else { None diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/constrain.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/constrain.rs index 66f50440d64..5ae6a642a57 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/constrain.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction/constrain.rs @@ -1,5 +1,7 @@ use acvm::{acir::AcirField, FieldElement}; +use crate::ssa::ir::types::NumericType; + use super::{Binary, BinaryOp, ConstrainError, DataFlowGraph, Instruction, Type, Value, ValueId}; /// Try to decompose this constrain instruction. This constraint will be broken down such that it instead constrains @@ -20,7 +22,7 @@ pub(super) fn decompose_constrain( match (&dfg[lhs], &dfg[rhs]) { (Value::NumericConstant { constant, typ }, Value::Instruction { instruction, .. }) | (Value::Instruction { instruction, .. }, Value::NumericConstant { constant, typ }) - if *typ == Type::bool() => + if *typ == NumericType::bool() => { match dfg[*instruction] { Instruction::Binary(Binary { lhs, rhs, operator: BinaryOp::Eq }) @@ -61,7 +63,7 @@ pub(super) fn decompose_constrain( // Note that this doesn't remove the value `v2` as it may be used in other instructions, but it // will likely be removed through dead instruction elimination. let one = FieldElement::one(); - let one = dfg.make_constant(one, Type::bool()); + let one = dfg.make_constant(one, NumericType::bool()); [ decompose_constrain(lhs, one, msg, dfg), @@ -89,7 +91,7 @@ pub(super) fn decompose_constrain( // Note that this doesn't remove the value `v2` as it may be used in other instructions, but it // will likely be removed through dead instruction elimination. let zero = FieldElement::zero(); - let zero = dfg.make_constant(zero, dfg.type_of_value(lhs)); + let zero = dfg.make_constant(zero, dfg.type_of_value(lhs).unwrap_numeric()); [ decompose_constrain(lhs, zero, msg, dfg), @@ -112,7 +114,8 @@ pub(super) fn decompose_constrain( // Note that this doesn't remove the value `v1` as it may be used in other instructions, but it // will likely be removed through dead instruction elimination. let reversed_constant = FieldElement::from(!constant.is_one()); - let reversed_constant = dfg.make_constant(reversed_constant, Type::bool()); + let reversed_constant = + dfg.make_constant(reversed_constant, NumericType::bool()); decompose_constrain(value, reversed_constant, msg, dfg) } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/list.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/list.rs new file mode 100644 index 00000000000..9a84d304444 --- /dev/null +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/list.rs @@ -0,0 +1,187 @@ +use serde::{Deserialize, Serialize}; +use std::sync::Arc; + +/// A shared linked list type intended to be cloned +#[derive(Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] +pub struct List { + head: Arc>, + len: usize, +} + +#[derive(Default, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] +enum Node { + #[default] + Nil, + Cons(T, Arc>), +} + +impl Default for List { + fn default() -> Self { + List { head: Arc::new(Node::Nil), len: 0 } + } +} + +impl List { + pub fn new() -> Self { + Self::default() + } + + /// This is actually a push_front since we just create a new head for the + /// list. This is done so that the tail of the list can still be shared. + /// In the case of call stacks, the last node will be main, while the top + /// of the call stack will be the head of this list. + pub fn push_back(&mut self, value: T) { + self.len += 1; + self.head = Arc::new(Node::Cons(value, self.head.clone())); + } + + /// It is more efficient to iterate from the head of the list towards the tail. + /// For callstacks this means from the top of the call stack towards main. + fn iter_rev(&self) -> IterRev { + IterRev { head: &self.head, len: self.len } + } + + pub fn clear(&mut self) { + *self = Self::default(); + } + + pub fn append(&mut self, other: Self) + where + T: Copy + std::fmt::Debug, + { + let other = other.into_iter().collect::>(); + + for item in other { + self.push_back(item); + } + } + + pub fn len(&self) -> usize { + self.len + } + + pub fn is_empty(&self) -> bool { + self.len == 0 + } + + fn pop_front(&mut self) -> Option + where + T: Copy, + { + match self.head.as_ref() { + Node::Nil => None, + Node::Cons(value, rest) => { + let value = *value; + self.head = rest.clone(); + self.len -= 1; + Some(value) + } + } + } + + pub fn truncate(&mut self, len: usize) + where + T: Copy, + { + if self.len > len { + for _ in 0..self.len - len { + self.pop_front(); + } + } + } + + pub fn unit(item: T) -> Self { + let mut this = Self::default(); + this.push_back(item); + this + } + + pub fn back(&self) -> Option<&T> { + match self.head.as_ref() { + Node::Nil => None, + Node::Cons(item, _) => Some(item), + } + } +} + +pub struct IterRev<'a, T> { + head: &'a Node, + len: usize, +} + +impl IntoIterator for List +where + T: Copy + std::fmt::Debug, +{ + type Item = T; + + type IntoIter = std::iter::Rev>; + + fn into_iter(self) -> Self::IntoIter { + let items: Vec<_> = self.iter_rev().copied().collect(); + items.into_iter().rev() + } +} + +impl<'a, T> IntoIterator for &'a List { + type Item = &'a T; + + type IntoIter = std::iter::Rev< as IntoIterator>::IntoIter>; + + fn into_iter(self) -> Self::IntoIter { + let items: Vec<_> = self.iter_rev().collect(); + items.into_iter().rev() + } +} + +impl<'a, T> Iterator for IterRev<'a, T> { + type Item = &'a T; + + fn next(&mut self) -> Option { + match self.head { + Node::Nil => None, + Node::Cons(value, rest) => { + self.head = rest; + Some(value) + } + } + } + + fn size_hint(&self) -> (usize, Option) { + (0, Some(self.len)) + } +} + +impl<'a, T> ExactSizeIterator for IterRev<'a, T> {} + +impl std::fmt::Debug for List +where + T: std::fmt::Debug, +{ + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "[")?; + for (i, item) in self.iter_rev().enumerate() { + if i != 0 { + write!(f, ", ")?; + } + write!(f, "{item:?}")?; + } + write!(f, "]") + } +} + +impl std::fmt::Display for List +where + T: std::fmt::Display, +{ + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "[")?; + for (i, item) in self.iter_rev().enumerate() { + if i != 0 { + write!(f, ", ")?; + } + write!(f, "{item}")?; + } + write!(f, "]") + } +} diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/map.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/map.rs index 23f5380f030..0fb02f19b14 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/map.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/map.rs @@ -4,7 +4,7 @@ use std::{ collections::BTreeMap, hash::Hash, str::FromStr, - sync::atomic::{AtomicUsize, Ordering}, + sync::atomic::{AtomicU32, Ordering}, }; use thiserror::Error; @@ -18,7 +18,7 @@ use thiserror::Error; /// another map where it will likely be invalid. #[derive(Serialize, Deserialize)] pub(crate) struct Id { - index: usize, + index: u32, // If we do not skip this field it will simply serialize as `"_marker":null` which is useless extra data #[serde(skip)] _marker: std::marker::PhantomData, @@ -26,14 +26,15 @@ pub(crate) struct Id { impl Id { /// Constructs a new Id for the given index. - /// This constructor is deliberately private to prevent - /// constructing invalid IDs. - pub(crate) fn new(index: usize) -> Self { + /// + /// This is private so that we can guarantee ids created from this function + /// point to valid T values in their external maps. + fn new(index: u32) -> Self { Self { index, _marker: std::marker::PhantomData } } /// Returns the underlying index of this Id. - pub(crate) fn to_usize(self) -> usize { + pub(crate) fn to_u32(self) -> u32 { self.index } @@ -43,7 +44,7 @@ impl Id { /// as unlike DenseMap::push and SparseMap::push, the Ids created /// here are likely invalid for any particularly map. #[cfg(test)] - pub(crate) fn test_new(index: usize) -> Self { + pub(crate) fn test_new(index: u32) -> Self { Self::new(index) } } @@ -187,7 +188,7 @@ impl DenseMap { /// Adds an element to the map. /// Returns the identifier/reference to that element. pub(crate) fn insert(&mut self, element: T) -> Id { - let id = Id::new(self.storage.len()); + let id = Id::new(self.storage.len().try_into().unwrap()); self.storage.push(element); id } @@ -195,7 +196,7 @@ impl DenseMap { /// Given the Id of the element being created, adds the element /// returned by the given function to the map pub(crate) fn insert_with_id(&mut self, f: impl FnOnce(Id) -> T) -> Id { - let id = Id::new(self.storage.len()); + let id = Id::new(self.storage.len().try_into().unwrap()); self.storage.push(f(id)); id } @@ -204,7 +205,7 @@ impl DenseMap { /// /// The id-element pairs are ordered by the numeric values of the ids. pub(crate) fn iter(&self) -> impl ExactSizeIterator, &T)> { - let ids_iter = (0..self.storage.len()).map(|idx| Id::new(idx)); + let ids_iter = (0..self.storage.len() as u32).map(|idx| Id::new(idx)); ids_iter.zip(self.storage.iter()) } } @@ -219,13 +220,13 @@ impl std::ops::Index> for DenseMap { type Output = T; fn index(&self, id: Id) -> &Self::Output { - &self.storage[id.index] + &self.storage[id.index as usize] } } impl std::ops::IndexMut> for DenseMap { fn index_mut(&mut self, id: Id) -> &mut Self::Output { - &mut self.storage[id.index] + &mut self.storage[id.index as usize] } } @@ -253,7 +254,7 @@ impl SparseMap { /// Adds an element to the map. /// Returns the identifier/reference to that element. pub(crate) fn insert(&mut self, element: T) -> Id { - let id = Id::new(self.storage.len()); + let id = Id::new(self.storage.len().try_into().unwrap()); self.storage.insert(id, element); id } @@ -261,7 +262,7 @@ impl SparseMap { /// Given the Id of the element being created, adds the element /// returned by the given function to the map pub(crate) fn insert_with_id(&mut self, f: impl FnOnce(Id) -> T) -> Id { - let id = Id::new(self.storage.len()); + let id = Id::new(self.storage.len().try_into().unwrap()); self.storage.insert(id, f(id)); id } @@ -365,7 +366,7 @@ impl std::ops::Index<&K> for TwoWayMap { /// This type wraps an AtomicUsize so it can safely be used across threads. #[derive(Debug, Serialize, Deserialize)] pub(crate) struct AtomicCounter { - next: AtomicUsize, + next: AtomicU32, _marker: std::marker::PhantomData, } @@ -373,7 +374,7 @@ impl AtomicCounter { /// Create a new counter starting after the given Id. /// Use AtomicCounter::default() to start at zero. pub(crate) fn starting_after(id: Id) -> Self { - Self { next: AtomicUsize::new(id.index + 1), _marker: Default::default() } + Self { next: AtomicU32::new(id.index + 1), _marker: Default::default() } } /// Return the next fresh id diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/mod.rs index 3ef680dda0f..89ba22e8b79 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/mod.rs @@ -5,6 +5,7 @@ pub(crate) mod dom; pub(crate) mod function; pub(crate) mod function_inserter; pub(crate) mod instruction; +pub mod list; pub(crate) mod map; pub(crate) mod post_order; pub(crate) mod printer; diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/printer.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/printer.rs index aa2952d5abc..29e79728303 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/printer.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/printer.rs @@ -1,8 +1,5 @@ //! This file is for pretty-printing the SSA IR in a human-readable form for debugging. -use std::{ - collections::HashSet, - fmt::{Formatter, Result}, -}; +use std::fmt::{Formatter, Result}; use acvm::acir::AcirField; use im::Vector; @@ -21,28 +18,10 @@ use super::{ /// Helper function for Function's Display impl to pretty-print the function with the given formatter. pub(crate) fn display_function(function: &Function, f: &mut Formatter) -> Result { writeln!(f, "{} fn {} {} {{", function.runtime(), function.name(), function.id())?; - display_block_with_successors(function, function.entry_block(), &mut HashSet::new(), f)?; - write!(f, "}}") -} - -/// Displays a block followed by all of its successors recursively. -/// This uses a HashSet to keep track of the visited blocks. Otherwise -/// there would be infinite recursion for any loops in the IR. -pub(crate) fn display_block_with_successors( - function: &Function, - block_id: BasicBlockId, - visited: &mut HashSet, - f: &mut Formatter, -) -> Result { - display_block(function, block_id, f)?; - visited.insert(block_id); - - for successor in function.dfg[block_id].successors() { - if !visited.contains(&successor) { - display_block_with_successors(function, successor, visited, f)?; - } + for block_id in function.reachable_blocks() { + display_block(function, block_id, f)?; } - Ok(()) + write!(f, "}}") } /// Display a single block. This will not display the block's successors. diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/types.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/types.rs index 4e4f7e8aa62..0dd7fd92ee5 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/types.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/types.rs @@ -30,6 +30,31 @@ impl NumericType { } } + /// Creates a NumericType::Signed type + pub(crate) fn signed(bit_size: u32) -> NumericType { + NumericType::Signed { bit_size } + } + + /// Creates a NumericType::Unsigned type + pub(crate) fn unsigned(bit_size: u32) -> NumericType { + NumericType::Unsigned { bit_size } + } + + /// Creates the u1 type + pub(crate) fn bool() -> NumericType { + NumericType::Unsigned { bit_size: 1 } + } + + /// Creates the char type, represented as u8. + pub(crate) fn char() -> NumericType { + NumericType::Unsigned { bit_size: 8 } + } + + /// Creates the type of an array's length. + pub(crate) fn length_type() -> NumericType { + NumericType::Unsigned { bit_size: SSA_WORD_SIZE } + } + /// Returns None if the given Field value is within the numeric limits /// for the current NumericType. Otherwise returns a string describing /// the limits, as a range. @@ -63,6 +88,10 @@ impl NumericType { NumericType::NativeField => None, } } + + pub(crate) fn is_unsigned(&self) -> bool { + matches!(self, NumericType::Unsigned { .. }) + } } /// All types representable in the IR. @@ -125,6 +154,14 @@ impl Type { Type::unsigned(SSA_WORD_SIZE) } + /// Returns the inner NumericType if this is one, or panics otherwise + pub(crate) fn unwrap_numeric(&self) -> NumericType { + match self { + Type::Numeric(numeric) => *numeric, + other => panic!("Expected NumericType, found {other}"), + } + } + /// Returns the bit size of the provided numeric type. /// /// # Panics diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/value.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/value.rs index ef494200308..ec7a8e25246 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/value.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/value.rs @@ -1,3 +1,5 @@ +use std::borrow::Cow; + use acvm::FieldElement; use serde::{Deserialize, Serialize}; @@ -7,7 +9,7 @@ use super::{ function::FunctionId, instruction::{InstructionId, Intrinsic}, map::Id, - types::Type, + types::{NumericType, Type}, }; pub(crate) type ValueId = Id; @@ -34,7 +36,7 @@ pub(crate) enum Value { Param { block: BasicBlockId, position: usize, typ: Type }, /// This Value originates from a numeric constant - NumericConstant { constant: FieldElement, typ: Type }, + NumericConstant { constant: FieldElement, typ: NumericType }, /// This Value refers to a function in the IR. /// Functions always have the type Type::Function. @@ -55,14 +57,13 @@ pub(crate) enum Value { impl Value { /// Retrieves the type of this Value - pub(crate) fn get_type(&self) -> &Type { + pub(crate) fn get_type(&self) -> Cow { match self { - Value::Instruction { typ, .. } => typ, - Value::Param { typ, .. } => typ, - Value::NumericConstant { typ, .. } => typ, - Value::Function { .. } => &Type::Function, - Value::Intrinsic { .. } => &Type::Function, - Value::ForeignFunction { .. } => &Type::Function, + Value::Instruction { typ, .. } | Value::Param { typ, .. } => Cow::Borrowed(typ), + Value::NumericConstant { typ, .. } => Cow::Owned(Type::Numeric(*typ)), + Value::Function { .. } | Value::Intrinsic { .. } | Value::ForeignFunction { .. } => { + Cow::Owned(Type::Function) + } } } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/array_set.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/array_set.rs index 96de22600a4..09339cf0797 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/array_set.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/array_set.rs @@ -209,6 +209,8 @@ mod tests { b1(v0: u32): v8 = lt v0, u32 5 jmpif v8 then: b3, else: b2 + b2(): + return b3(): v9 = eq v0, u32 5 jmpif v9 then: b4, else: b5 @@ -224,8 +226,6 @@ mod tests { store v15 at v4 v17 = add v0, u32 1 jmp b1(v17) - b2(): - return } "; let ssa = Ssa::from_str(src).unwrap(); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/as_slice_length.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/as_slice_length.rs index 75cdea349b7..c6cdffd3bc3 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/as_slice_length.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/as_slice_length.rs @@ -2,7 +2,7 @@ use crate::ssa::{ ir::{ function::Function, instruction::{Instruction, InstructionId, Intrinsic}, - types::Type, + types::{NumericType, Type}, value::Value, }, ssa_gen::Ssa, @@ -71,7 +71,7 @@ fn replace_known_slice_lengths( // This isn't strictly necessary as a new result will be defined the next time for which the instruction // is reinserted but this avoids leaving the program in an invalid state. func.dfg.replace_result(instruction_id, original_slice_length); - let known_length = func.dfg.make_constant(known_length.into(), Type::length_type()); + let known_length = func.dfg.make_constant(known_length.into(), NumericType::length_type()); func.dfg.set_value_from_id(original_slice_length, known_length); }); } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs index 93ca428c6d0..e2379043541 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs @@ -43,7 +43,7 @@ use crate::{ dom::DominatorTree, function::{Function, FunctionId, RuntimeType}, instruction::{Instruction, InstructionId}, - types::Type, + types::{NumericType, Type}, value::{Value, ValueId}, }, ssa_gen::Ssa, @@ -125,11 +125,13 @@ impl Ssa { } // The ones that remain are never called: let's remove them. - for func_id in brillig_functions.keys() { + for (func_id, func) in &brillig_functions { // We never want to remove the main function (it could be `unconstrained` or it // could have been turned into brillig if `--force-brillig` was given). // We also don't want to remove entry points. - if self.main_id == *func_id || self.entry_point_to_generated_index.contains_key(func_id) + let runtime = func.runtime(); + if self.main_id == *func_id + || (runtime.is_entry_point() && matches!(runtime, RuntimeType::Acir(_))) { continue; } @@ -274,7 +276,7 @@ impl<'brillig> Context<'brillig> { // Default side effect condition variable with an enabled state. let mut side_effects_enabled_var = - function.dfg.make_constant(FieldElement::one(), Type::bool()); + function.dfg.make_constant(FieldElement::one(), NumericType::bool()); for instruction_id in instructions { self.fold_constants_into_instruction( @@ -655,7 +657,7 @@ impl<'brillig> Context<'brillig> { dfg: &mut DataFlowGraph, ) -> ValueId { match typ { - Type::Numeric(_) => { + Type::Numeric(typ) => { let memory = memory_values[*memory_index]; *memory_index += 1; @@ -829,7 +831,10 @@ mod test { use crate::ssa::{ function_builder::FunctionBuilder, - ir::{map::Id, types::Type}, + ir::{ + map::Id, + types::{NumericType, Type}, + }, opt::assert_normalized_ssa_equals, Ssa, }; @@ -853,7 +858,7 @@ mod test { assert_eq!(instructions.len(), 2); // The final return is not counted let v0 = main.parameters()[0]; - let two = main.dfg.make_constant(2_u128.into(), Type::field()); + let two = main.dfg.make_constant(2_u128.into(), NumericType::NativeField); main.dfg.set_value_from_id(v0, two); @@ -889,7 +894,7 @@ mod test { // Note that this constant guarantees that `v0/constant < 2^8`. We then do not need to truncate the result. let constant = 2_u128.pow(8); - let constant = main.dfg.make_constant(constant.into(), Type::unsigned(16)); + let constant = main.dfg.make_constant(constant.into(), NumericType::unsigned(16)); main.dfg.set_value_from_id(v1, constant); @@ -927,7 +932,7 @@ mod test { // Note that this constant does not guarantee that `v0/constant < 2^8`. We must then truncate the result. let constant = 2_u128.pow(8) - 1; - let constant = main.dfg.make_constant(constant.into(), Type::unsigned(16)); + let constant = main.dfg.make_constant(constant.into(), NumericType::unsigned(16)); main.dfg.set_value_from_id(v1, constant); @@ -1148,7 +1153,7 @@ mod test { // Compiling main let mut builder = FunctionBuilder::new("main".into(), main_id); let v0 = builder.add_parameter(Type::unsigned(64)); - let zero = builder.numeric_constant(0u128, Type::unsigned(64)); + let zero = builder.numeric_constant(0u128, NumericType::unsigned(64)); let typ = Type::Array(Arc::new(vec![Type::unsigned(64)]), 25); let array_contents = im::vector![ @@ -1521,18 +1526,18 @@ mod test { b0(v0: u32): v2 = eq v0, u32 0 jmpif v2 then: b4, else: b1 - b4(): - v5 = sub v0, u32 1 - jmp b5() - b5(): - return b1(): jmpif v0 then: b3, else: b2 + b2(): + jmp b5() b3(): v4 = sub v0, u32 1 // We can't hoist this because v0 is zero here and it will lead to an underflow jmp b5() - b2(): + b4(): + v5 = sub v0, u32 1 jmp b5() + b5(): + return } "; let ssa = Ssa::from_str(src).unwrap(); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs index cfeb8751f25..7d7798fd30a 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs @@ -130,13 +130,14 @@ impl DefunctionalizationContext { // Change the type of all the values that are not call targets to NativeField let value_ids = vecmap(func.dfg.values_iter(), |(id, _)| id); for value_id in value_ids { - if let Type::Function = &func.dfg[value_id].get_type() { + if let Type::Function = func.dfg[value_id].get_type().as_ref() { match &func.dfg[value_id] { // If the value is a static function, transform it to the function id Value::Function(id) => { if !call_target_values.contains(&value_id) { + let field = NumericType::NativeField; let new_value = - func.dfg.make_constant(function_id_to_field(*id), Type::field()); + func.dfg.make_constant(function_id_to_field(*id), field); func.dfg.set_value_from_id(value_id, new_value); } } @@ -267,7 +268,7 @@ fn create_apply_functions( } fn function_id_to_field(function_id: FunctionId) -> FieldElement { - (function_id.to_usize() as u128).into() + (function_id.to_u32() as u128).into() } /// Creates an apply function for the given signature and variants @@ -287,10 +288,8 @@ fn create_apply_function( let is_last = index == function_ids.len() - 1; let mut next_function_block = None; - let function_id_constant = function_builder.numeric_constant( - function_id_to_field(*function_id), - Type::Numeric(NumericType::NativeField), - ); + let function_id_constant = function_builder + .numeric_constant(function_id_to_field(*function_id), NumericType::NativeField); // If it's not the last function to dispatch, create an if statement if !is_last { diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs index f7ac6f7b313..675d7fd854e 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs @@ -1,23 +1,23 @@ //! Dead Instruction Elimination (DIE) pass: Removes any instruction without side-effects for //! which the results are unused. use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; -use im::Vector; -use noirc_errors::Location; use rayon::iter::{IntoParallelRefMutIterator, ParallelIterator}; use crate::ssa::{ ir::{ basic_block::{BasicBlock, BasicBlockId}, - dfg::DataFlowGraph, + dfg::{CallStack, DataFlowGraph}, function::Function, instruction::{BinaryOp, Instruction, InstructionId, Intrinsic}, post_order::PostOrder, - types::Type, + types::{NumericType, Type}, value::{Value, ValueId}, }, - ssa_gen::{Ssa, SSA_WORD_SIZE}, + ssa_gen::Ssa, }; +use super::rc::{pop_rc_for, RcInstruction}; + impl Ssa { /// Performs Dead Instruction Elimination (DIE) to remove any instructions with /// unused results. @@ -106,6 +106,8 @@ impl Context { let instructions_len = block.instructions().len(); + let mut rc_tracker = RcTracker::default(); + // Indexes of instructions that might be out of bounds. // We'll remove those, but before that we'll insert bounds checks for them. let mut possible_index_out_of_bounds_indexes = Vec::new(); @@ -133,8 +135,13 @@ impl Context { }); } } + + rc_tracker.track_inc_rcs_to_remove(*instruction_id, function); } + self.instructions_to_remove.extend(rc_tracker.get_non_mutated_arrays(&function.dfg)); + self.instructions_to_remove.extend(rc_tracker.rc_pairs_to_remove); + // If there are some instructions that might trigger an out of bounds error, // first add constrain checks. Then run the DIE pass again, which will remove those // but leave the constrains (any any value needed by those constrains) @@ -285,25 +292,25 @@ impl Context { let (lhs, rhs) = if function.dfg.get_numeric_constant(*index).is_some() { // If we are here it means the index is known but out of bounds. That's always an error! - let false_const = function.dfg.make_constant(false.into(), Type::bool()); - let true_const = function.dfg.make_constant(true.into(), Type::bool()); + let false_const = function.dfg.make_constant(false.into(), NumericType::bool()); + let true_const = function.dfg.make_constant(true.into(), NumericType::bool()); (false_const, true_const) } else { // `index` will be relative to the flattened array length, so we need to take that into account let array_length = function.dfg.type_of_value(*array).flattened_size(); // If we are here it means the index is dynamic, so let's add a check that it's less than length + let length_type = NumericType::length_type(); let index = function.dfg.insert_instruction_and_results( - Instruction::Cast(*index, Type::unsigned(SSA_WORD_SIZE)), + Instruction::Cast(*index, length_type), block_id, None, call_stack.clone(), ); let index = index.first(); - let array_typ = Type::unsigned(SSA_WORD_SIZE); let array_length = - function.dfg.make_constant((array_length as u128).into(), array_typ); + function.dfg.make_constant((array_length as u128).into(), length_type); let is_index_out_of_bounds = function.dfg.insert_instruction_and_results( Instruction::binary(BinaryOp::Lt, index, array_length), block_id, @@ -311,7 +318,7 @@ impl Context { call_stack.clone(), ); let is_index_out_of_bounds = is_index_out_of_bounds.first(); - let true_const = function.dfg.make_constant(true.into(), Type::bool()); + let true_const = function.dfg.make_constant(true.into(), NumericType::bool()); (is_index_out_of_bounds, true_const) }; @@ -484,7 +491,7 @@ fn apply_side_effects( rhs: ValueId, function: &mut Function, block_id: BasicBlockId, - call_stack: Vector, + call_stack: CallStack, ) -> (ValueId, ValueId) { // See if there's an active "enable side effects" condition let Some(condition) = side_effects_condition else { @@ -495,12 +502,9 @@ fn apply_side_effects( // Condition needs to be cast to argument type in order to multiply them together. // In our case, lhs is always a boolean. - let casted_condition = dfg.insert_instruction_and_results( - Instruction::Cast(condition, Type::bool()), - block_id, - None, - call_stack.clone(), - ); + let cast = Instruction::Cast(condition, NumericType::bool()); + let casted_condition = + dfg.insert_instruction_and_results(cast, block_id, None, call_stack.clone()); let casted_condition = casted_condition.first(); let lhs = dfg.insert_instruction_and_results( @@ -522,6 +526,112 @@ fn apply_side_effects( (lhs, rhs) } +#[derive(Default)] +struct RcTracker { + // We can track IncrementRc instructions per block to determine whether they are useless. + // IncrementRc and DecrementRc instructions are normally side effectual instructions, but we remove + // them if their value is not used anywhere in the function. However, even when their value is used, their existence + // is pointless logic if there is no array set between the increment and the decrement of the reference counter. + // We track per block whether an IncrementRc instruction has a paired DecrementRc instruction + // with the same value but no array set in between. + // If we see an inc/dec RC pair within a block we can safely remove both instructions. + rcs_with_possible_pairs: HashMap>, + rc_pairs_to_remove: HashSet, + // We also separately track all IncrementRc instructions and all array types which have been mutably borrowed. + // If an array is the same type as one of those non-mutated array types, we can safely remove all IncrementRc instructions on that array. + inc_rcs: HashMap>, + mutated_array_types: HashSet, + // The SSA often creates patterns where after simplifications we end up with repeat + // IncrementRc instructions on the same value. We track whether the previous instruction was an IncrementRc, + // and if the current instruction is also an IncrementRc on the same value we remove the current instruction. + // `None` if the previous instruction was anything other than an IncrementRc + previous_inc_rc: Option, +} + +impl RcTracker { + fn track_inc_rcs_to_remove(&mut self, instruction_id: InstructionId, function: &Function) { + let instruction = &function.dfg[instruction_id]; + + if let Instruction::IncrementRc { value } = instruction { + if let Some(previous_value) = self.previous_inc_rc { + if previous_value == *value { + self.rc_pairs_to_remove.insert(instruction_id); + } + } + self.previous_inc_rc = Some(*value); + } else { + self.previous_inc_rc = None; + } + + // DIE loops over a block in reverse order, so we insert an RC instruction for possible removal + // when we see a DecrementRc and check whether it was possibly mutated when we see an IncrementRc. + match instruction { + Instruction::IncrementRc { value } => { + if let Some(inc_rc) = + pop_rc_for(*value, function, &mut self.rcs_with_possible_pairs) + { + if !inc_rc.possibly_mutated { + self.rc_pairs_to_remove.insert(inc_rc.id); + self.rc_pairs_to_remove.insert(instruction_id); + } + } + + self.inc_rcs.entry(*value).or_default().insert(instruction_id); + } + Instruction::DecrementRc { value } => { + let typ = function.dfg.type_of_value(*value); + + // We assume arrays aren't mutated until we find an array_set + let dec_rc = + RcInstruction { id: instruction_id, array: *value, possibly_mutated: false }; + self.rcs_with_possible_pairs.entry(typ).or_default().push(dec_rc); + } + Instruction::ArraySet { array, .. } => { + let typ = function.dfg.type_of_value(*array); + if let Some(dec_rcs) = self.rcs_with_possible_pairs.get_mut(&typ) { + for dec_rc in dec_rcs { + dec_rc.possibly_mutated = true; + } + } + + self.mutated_array_types.insert(typ); + } + Instruction::Store { value, .. } => { + // We are very conservative and say that any store of an array type means it has the potential to be mutated. + let typ = function.dfg.type_of_value(*value); + if matches!(&typ, Type::Array(..) | Type::Slice(..)) { + self.mutated_array_types.insert(typ); + } + } + Instruction::Call { arguments, .. } => { + for arg in arguments { + let typ = function.dfg.type_of_value(*arg); + if matches!(&typ, Type::Array(..) | Type::Slice(..)) { + self.mutated_array_types.insert(typ); + } + } + } + _ => {} + } + } + + fn get_non_mutated_arrays(&self, dfg: &DataFlowGraph) -> HashSet { + self.inc_rcs + .keys() + .filter_map(|value| { + let typ = dfg.type_of_value(*value); + if !self.mutated_array_types.contains(&typ) { + Some(&self.inc_rcs[value]) + } else { + None + } + }) + .flatten() + .copied() + .collect() + } +} + #[cfg(test)] mod test { use std::sync::Arc; @@ -530,7 +640,10 @@ mod test { use crate::ssa::{ function_builder::FunctionBuilder, - ir::{map::Id, types::Type}, + ir::{ + map::Id, + types::{NumericType, Type}, + }, opt::assert_normalized_ssa_equals, Ssa, }; @@ -602,6 +715,30 @@ mod test { assert_normalized_ssa_equals(ssa, expected); } + #[test] + fn remove_useless_paired_rcs_even_when_used() { + let src = " + acir(inline) fn main f0 { + b0(v0: [Field; 2]): + inc_rc v0 + v2 = array_get v0, index u32 0 -> Field + dec_rc v0 + return v2 + } + "; + let ssa = Ssa::from_str(src).unwrap(); + + let expected = " + acir(inline) fn main f0 { + b0(v0: [Field; 2]): + v2 = array_get v0, index u32 0 -> Field + return v2 + } + "; + let ssa = ssa.dead_instruction_elimination(); + assert_normalized_ssa_equals(ssa, expected); + } + #[test] fn keep_paired_rcs_with_array_set() { let src = " @@ -639,7 +776,7 @@ mod test { // Compiling main let mut builder = FunctionBuilder::new("main".into(), main_id); - let zero = builder.numeric_constant(0u128, Type::unsigned(32)); + let zero = builder.numeric_constant(0u128, NumericType::unsigned(32)); let array_type = Type::Array(Arc::new(vec![Type::unsigned(32)]), 2); let v1 = builder.insert_make_array(vector![zero, zero], array_type.clone()); let v2 = builder.insert_allocate(array_type.clone()); @@ -652,7 +789,7 @@ mod test { builder.switch_to_block(b1); let v3 = builder.insert_load(v2, array_type); - let one = builder.numeric_constant(1u128, Type::unsigned(32)); + let one = builder.numeric_constant(1u128, NumericType::unsigned(32)); let v5 = builder.insert_array_set(v3, zero, one); builder.terminate_with_return(vec![v5]); @@ -671,6 +808,49 @@ mod test { assert_eq!(main.dfg[b1].instructions().len(), 2); } + #[test] + fn keep_inc_rc_on_borrowed_array_set() { + // acir(inline) fn main f0 { + // b0(v0: [u32; 2]): + // inc_rc v0 + // v3 = array_set v0, index u32 0, value u32 1 + // inc_rc v0 + // inc_rc v0 + // inc_rc v0 + // v4 = array_get v3, index u32 1 + // return v4 + // } + let src = " + acir(inline) fn main f0 { + b0(v0: [u32; 2]): + inc_rc v0 + v3 = array_set v0, index u32 0, value u32 1 + inc_rc v0 + inc_rc v0 + inc_rc v0 + v4 = array_get v3, index u32 1 -> u32 + return v4 + } + "; + let ssa = Ssa::from_str(src).unwrap(); + + // We expect the output to be unchanged + // Except for the repeated inc_rc instructions + let expected = " + acir(inline) fn main f0 { + b0(v0: [u32; 2]): + inc_rc v0 + v3 = array_set v0, index u32 0, value u32 1 + inc_rc v0 + v4 = array_get v3, index u32 1 -> u32 + return v4 + } + "; + + let ssa = ssa.dead_instruction_elimination(); + assert_normalized_ssa_equals(ssa, expected); + } + #[test] fn does_not_remove_inc_or_dec_rc_of_if_they_are_loaded_from_a_reference() { let src = " @@ -691,4 +871,69 @@ mod test { let ssa = ssa.dead_instruction_elimination(); assert_normalized_ssa_equals(ssa, src); } + + #[test] + fn remove_inc_rcs_that_are_never_mutably_borrowed() { + let src = " + acir(inline) fn main f0 { + b0(v0: [Field; 2]): + inc_rc v0 + inc_rc v0 + inc_rc v0 + v2 = array_get v0, index u32 0 -> Field + inc_rc v0 + return v2 + } + "; + + let ssa = Ssa::from_str(src).unwrap(); + let main = ssa.main(); + + // The instruction count never includes the terminator instruction + assert_eq!(main.dfg[main.entry_block()].instructions().len(), 5); + + let expected = " + acir(inline) fn main f0 { + b0(v0: [Field; 2]): + v2 = array_get v0, index u32 0 -> Field + return v2 + } + "; + + let ssa = ssa.dead_instruction_elimination(); + assert_normalized_ssa_equals(ssa, expected); + } + + #[test] + fn do_not_remove_inc_rc_if_used_as_call_arg() { + // We do not want to remove inc_rc instructions on values + // that are passed as call arguments. + // + // We could have previously inlined a function which does the following: + // - Accepts a mutable array as an argument + // - Writes to that array + // - Passes the new array to another call + // + // It is possible then that the mutation gets simplified out after inlining. + // If we then remove the inc_rc as we see no mutations to that array in the block, + // we may end up with an the incorrect reference count. + let src = " + brillig(inline) fn main f0 { + b0(v0: Field): + v4 = make_array [Field 0, Field 1, Field 2] : [Field; 3] + inc_rc v4 + v6 = call f1(v4) -> Field + constrain v0 == v6 + return + } + brillig(inline) fn foo f1 { + b0(v0: [Field; 3]): + return u32 1 + } + "; + + let ssa = Ssa::from_str(src).unwrap(); + let ssa = ssa.dead_instruction_elimination(); + assert_normalized_ssa_equals(ssa, src); + } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs index 3fbccf93ec9..9c3fd72f281 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs @@ -144,7 +144,7 @@ use crate::ssa::{ function::{Function, FunctionId, RuntimeType}, function_inserter::FunctionInserter, instruction::{BinaryOp, Instruction, InstructionId, Intrinsic, TerminatorInstruction}, - types::Type, + types::{NumericType, Type}, value::{Value, ValueId}, }, ssa_gen::Ssa, @@ -332,15 +332,12 @@ impl<'f> Context<'f> { for instruction in instructions.iter() { if self.is_no_predicate(no_predicates, instruction) { // disable side effect for no_predicate functions - let one = self - .inserter - .function - .dfg - .make_constant(FieldElement::one(), Type::unsigned(1)); + let bool_type = NumericType::bool(); + let one = self.inserter.function.dfg.make_constant(FieldElement::one(), bool_type); self.insert_instruction_with_typevars( Instruction::EnableSideEffectsIf { condition: one }, None, - im::Vector::new(), + CallStack::new(), ); self.push_instruction(*instruction); self.insert_current_side_effects_enabled(); @@ -540,7 +537,7 @@ impl<'f> Context<'f> { let else_condition = if let Some(branch) = cond_context.else_branch { branch.condition } else { - self.inserter.function.dfg.make_constant(FieldElement::zero(), Type::bool()) + self.inserter.function.dfg.make_constant(FieldElement::zero(), NumericType::bool()) }; let block = self.inserter.function.entry_block(); @@ -606,7 +603,7 @@ impl<'f> Context<'f> { let condition = match self.get_last_condition() { Some(cond) => cond, None => { - self.inserter.function.dfg.make_constant(FieldElement::one(), Type::unsigned(1)) + self.inserter.function.dfg.make_constant(FieldElement::one(), NumericType::bool()) } }; let enable_side_effects = Instruction::EnableSideEffectsIf { condition }; @@ -653,13 +650,9 @@ impl<'f> Context<'f> { // Condition needs to be cast to argument type in order to multiply them together. let argument_type = self.inserter.function.dfg.type_of_value(lhs); - // Sanity check that we're not constraining non-primitive types - assert!(matches!(argument_type, Type::Numeric(_))); - let casted_condition = self.insert_instruction( - Instruction::Cast(condition, argument_type), - call_stack.clone(), - ); + let cast = Instruction::Cast(condition, argument_type.unwrap_numeric()); + let casted_condition = self.insert_instruction(cast, call_stack.clone()); let lhs = self.insert_instruction( Instruction::binary(BinaryOp::Mul, lhs, casted_condition), @@ -708,10 +701,8 @@ impl<'f> Context<'f> { // Condition needs to be cast to argument type in order to multiply them together. let argument_type = self.inserter.function.dfg.type_of_value(value); - let casted_condition = self.insert_instruction( - Instruction::Cast(condition, argument_type), - call_stack.clone(), - ); + let cast = Instruction::Cast(condition, argument_type.unwrap_numeric()); + let casted_condition = self.insert_instruction(cast, call_stack.clone()); let value = self.insert_instruction( Instruction::binary(BinaryOp::Mul, value, casted_condition), @@ -725,10 +716,8 @@ impl<'f> Context<'f> { let field = arguments[0]; let argument_type = self.inserter.function.dfg.type_of_value(field); - let casted_condition = self.insert_instruction( - Instruction::Cast(condition, argument_type), - call_stack.clone(), - ); + let cast = Instruction::Cast(condition, argument_type.unwrap_numeric()); + let casted_condition = self.insert_instruction(cast, call_stack.clone()); let field = self.insert_instruction( Instruction::binary(BinaryOp::Mul, field, casted_condition), call_stack.clone(), diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs index 6ea235b9414..c2b071a9c9a 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg/value_merger.rs @@ -5,7 +5,7 @@ use crate::ssa::ir::{ basic_block::BasicBlockId, dfg::{CallStack, DataFlowGraph, InsertInstructionResult}, instruction::{BinaryOp, Instruction}, - types::Type, + types::{NumericType, Type}, value::{Value, ValueId}, }; @@ -95,8 +95,8 @@ impl<'a> ValueMerger<'a> { then_value: ValueId, else_value: ValueId, ) -> ValueId { - let then_type = dfg.type_of_value(then_value); - let else_type = dfg.type_of_value(else_value); + let then_type = dfg.type_of_value(then_value).unwrap_numeric(); + let else_type = dfg.type_of_value(else_value).unwrap_numeric(); assert_eq!( then_type, else_type, "Expected values merged to be of the same type but found {then_type} and {else_type}" @@ -112,22 +112,13 @@ impl<'a> ValueMerger<'a> { let call_stack = if then_call_stack.is_empty() { else_call_stack } else { then_call_stack }; // We must cast the bool conditions to the actual numeric type used by each value. - let then_condition = dfg - .insert_instruction_and_results( - Instruction::Cast(then_condition, then_type), - block, - None, - call_stack.clone(), - ) - .first(); - let else_condition = dfg - .insert_instruction_and_results( - Instruction::Cast(else_condition, else_type), - block, - None, - call_stack.clone(), - ) - .first(); + let cast = Instruction::Cast(then_condition, then_type); + let then_condition = + dfg.insert_instruction_and_results(cast, block, None, call_stack.clone()).first(); + + let cast = Instruction::Cast(else_condition, else_type); + let else_condition = + dfg.insert_instruction_and_results(cast, block, None, call_stack.clone()).first(); let mul = Instruction::binary(BinaryOp::Mul, then_condition, then_value); let then_value = @@ -175,7 +166,7 @@ impl<'a> ValueMerger<'a> { for (element_index, element_type) in element_types.iter().enumerate() { let index = ((i * element_types.len() as u32 + element_index as u32) as u128).into(); - let index = self.dfg.make_constant(index, Type::field()); + let index = self.dfg.make_constant(index, NumericType::NativeField); let typevars = Some(vec![element_type.clone()]); @@ -243,7 +234,7 @@ impl<'a> ValueMerger<'a> { for (element_index, element_type) in element_types.iter().enumerate() { let index_u32 = i * element_types.len() as u32 + element_index as u32; let index_value = (index_u32 as u128).into(); - let index = self.dfg.make_constant(index_value, Type::field()); + let index = self.dfg.make_constant(index_value, NumericType::NativeField); let typevars = Some(vec![element_type.clone()]); @@ -295,7 +286,7 @@ impl<'a> ValueMerger<'a> { match typ { Type::Numeric(numeric_type) => { let zero = FieldElement::zero(); - self.dfg.make_constant(zero, Type::Numeric(*numeric_type)) + self.dfg.make_constant(zero, *numeric_type) } Type::Array(element_types, len) => { let mut array = im::Vector::new(); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/hint.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/hint.rs new file mode 100644 index 00000000000..567a0795edc --- /dev/null +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/hint.rs @@ -0,0 +1,105 @@ +#[cfg(test)] +mod tests { + use acvm::acir::circuit::ExpressionWidth; + + use crate::{ + errors::RuntimeError, + ssa::{ + opt::assert_normalized_ssa_equals, optimize_all, Ssa, SsaBuilder, SsaEvaluatorOptions, + SsaLogging, + }, + }; + + fn run_all_passes(ssa: Ssa) -> Result { + let options = &SsaEvaluatorOptions { + ssa_logging: SsaLogging::None, + enable_brillig_logging: false, + force_brillig_output: false, + print_codegen_timings: false, + expression_width: ExpressionWidth::default(), + emit_ssa: None, + skip_underconstrained_check: true, + skip_brillig_constraints_check: true, + inliner_aggressiveness: 0, + max_bytecode_increase_percent: None, + }; + + let builder = SsaBuilder { + ssa, + ssa_logging: options.ssa_logging.clone(), + print_codegen_timings: false, + }; + + optimize_all(builder, options) + } + + /// Test that the `std::hint::black_box` function prevents some of the optimizations. + #[test] + fn test_black_box_hint() { + // fn main(sum: u32) { + // // This version simplifies into a single `constraint 50 == sum` + // assert_eq(loop(5, 10), sum); + // // This should preserve additions because `k` is opaque, as if it came from an input. + // assert_eq(loop(5, std::hint::black_box(10)), sum); + // } + // fn loop(n: u32, k: u32) -> u32 { + // let mut sum = 0; + // for _ in 0..n { + // sum = sum + k; + // } + // sum + // } + + // Initial SSA: + let src = " + acir(inline) fn main f0 { + b0(v0: u32): + v4 = call f1(u32 5, u32 10) -> u32 + v5 = eq v4, v0 + constrain v4 == v0 + v7 = call black_box(u32 10) -> u32 + v9 = call f1(u32 5, v7) -> u32 + v10 = eq v9, v0 + constrain v9 == v0 + return + } + acir(inline) fn loop f1 { + b0(v0: u32, v1: u32): + v3 = allocate -> &mut u32 + store u32 0 at v3 + jmp b1(u32 0) + b1(v2: u32): + v5 = lt v2, v0 + jmpif v5 then: b3, else: b2 + b3(): + v7 = load v3 -> u32 + v8 = add v7, v1 + store v8 at v3 + v10 = add v2, u32 1 + jmp b1(v10) + b2(): + v6 = load v3 -> u32 + return v6 + } + "; + + // After Array Set Optimizations: + let expected = " + acir(inline) fn main f0 { + b0(v0: u32): + constrain u32 50 == v0 + v4 = call black_box(u32 10) -> u32 + v5 = add v4, v4 + v6 = add v5, v4 + v7 = add v6, v4 + v8 = add v7, v4 + constrain v8 == u32 50 + return + } + "; + + let ssa = Ssa::from_str(src).unwrap(); + let ssa = run_all_passes(ssa).unwrap(); + assert_normalized_ssa_equals(ssa, expected); + } +} diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs index f91487fd73e..37659ec7c98 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/inlining.rs @@ -469,7 +469,7 @@ impl<'function> PerFunctionContext<'function> { unreachable!("All Value::Params should already be known from previous calls to translate_block. Unknown value {id} = {value:?}") } Value::NumericConstant { constant, typ } => { - self.context.builder.numeric_constant(*constant, typ.clone()) + self.context.builder.numeric_constant(*constant, *typ) } Value::Function(function) => self.context.builder.import_function(*function), Value::Intrinsic(intrinsic) => self.context.builder.import_intrinsic_id(*intrinsic), @@ -1062,10 +1062,10 @@ mod test { let join_block = builder.insert_block(); builder.terminate_with_jmpif(inner2_cond, then_block, else_block); builder.switch_to_block(then_block); - let one = builder.numeric_constant(FieldElement::one(), Type::field()); + let one = builder.field_constant(FieldElement::one()); builder.terminate_with_jmp(join_block, vec![one]); builder.switch_to_block(else_block); - let two = builder.numeric_constant(FieldElement::from(2_u128), Type::field()); + let two = builder.field_constant(FieldElement::from(2_u128)); builder.terminate_with_jmp(join_block, vec![two]); let join_param = builder.add_block_parameter(join_block, Type::field()); builder.switch_to_block(join_block); @@ -1177,17 +1177,16 @@ mod test { builder.terminate_with_return(v0); builder.new_brillig_function("bar".into(), bar_id, InlineType::default()); - let bar_v0 = - builder.numeric_constant(1_usize, Type::Numeric(NumericType::Unsigned { bit_size: 1 })); + let bar_v0 = builder.numeric_constant(1_usize, NumericType::bool()); let then_block = builder.insert_block(); let else_block = builder.insert_block(); let join_block = builder.insert_block(); builder.terminate_with_jmpif(bar_v0, then_block, else_block); builder.switch_to_block(then_block); - let one = builder.numeric_constant(FieldElement::one(), Type::field()); + let one = builder.field_constant(FieldElement::one()); builder.terminate_with_jmp(join_block, vec![one]); builder.switch_to_block(else_block); - let two = builder.numeric_constant(FieldElement::from(2_u128), Type::field()); + let two = builder.field_constant(FieldElement::from(2_u128)); builder.terminate_with_jmp(join_block, vec![two]); let join_param = builder.add_block_parameter(join_block, Type::field()); builder.switch_to_block(join_block); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs index 290d2a33846..7e4546083b8 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/loop_invariant.rs @@ -13,7 +13,7 @@ use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; use crate::ssa::{ ir::{ basic_block::BasicBlockId, - function::{Function, RuntimeType}, + function::Function, function_inserter::FunctionInserter, instruction::{Instruction, InstructionId}, types::Type, @@ -27,12 +27,7 @@ use super::unrolling::{Loop, Loops}; impl Ssa { #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn loop_invariant_code_motion(mut self) -> Ssa { - let brillig_functions = self - .functions - .iter_mut() - .filter(|(_, func)| matches!(func.runtime(), RuntimeType::Brillig(_))); - - for (_, function) in brillig_functions { + for function in self.functions.values_mut() { function.loop_invariant_code_motion(); } @@ -63,6 +58,7 @@ impl Loops { } context.map_dependent_instructions(); + context.inserter.map_data_bus_in_place(); } } @@ -113,6 +109,22 @@ impl<'f> LoopInvariantContext<'f> { if hoist_invariant { self.inserter.push_instruction(instruction_id, pre_header); + + // If we are hoisting a MakeArray instruction, + // we need to issue an extra inc_rc in case they are mutated afterward. + if matches!( + self.inserter.function.dfg[instruction_id], + Instruction::MakeArray { .. } + ) { + let result = + self.inserter.function.dfg.instruction_results(instruction_id)[0]; + let inc_rc = Instruction::IncrementRc { value: result }; + let call_stack = self.inserter.function.dfg.get_call_stack(instruction_id); + self.inserter + .function + .dfg + .insert_instruction_and_results(inc_rc, *block, None, call_stack); + } } else { self.inserter.push_instruction(instruction_id, *block); } @@ -190,6 +202,7 @@ impl<'f> LoopInvariantContext<'f> { }); let can_be_deduplicated = instruction.can_be_deduplicated(self.inserter.function, false) + || matches!(instruction, Instruction::MakeArray { .. }) || self.can_be_deduplicated_from_upper_bound(&instruction); is_loop_invariant && can_be_deduplicated @@ -251,13 +264,13 @@ mod test { b1(v2: u32): v5 = lt v2, u32 4 jmpif v5 then: b3, else: b2 + b2(): + return b3(): v6 = mul v0, v1 constrain v6 == u32 6 v8 = add v2, u32 1 jmp b1(v8) - b2(): - return } "; @@ -276,12 +289,12 @@ mod test { b1(v2: u32): v6 = lt v2, u32 4 jmpif v6 then: b3, else: b2 + b2(): + return b3(): constrain v3 == u32 6 v9 = add v2, u32 1 jmp b1(v9) - b2(): - return } "; @@ -300,21 +313,21 @@ mod test { b1(v2: u32): v6 = lt v2, u32 4 jmpif v6 then: b3, else: b2 + b2(): + return b3(): jmp b4(u32 0) b4(v3: u32): v7 = lt v3, u32 4 jmpif v7 then: b6, else: b5 + b5(): + v9 = add v2, u32 1 + jmp b1(v9) b6(): v10 = mul v0, v1 constrain v10 == u32 6 v12 = add v3, u32 1 jmp b4(v12) - b5(): - v9 = add v2, u32 1 - jmp b1(v9) - b2(): - return } "; @@ -333,20 +346,20 @@ mod test { b1(v2: u32): v7 = lt v2, u32 4 jmpif v7 then: b3, else: b2 + b2(): + return b3(): jmp b4(u32 0) b4(v3: u32): v8 = lt v3, u32 4 jmpif v8 then: b6, else: b5 + b5(): + v10 = add v2, u32 1 + jmp b1(v10) b6(): constrain v4 == u32 6 v12 = add v3, u32 1 jmp b4(v12) - b5(): - v10 = add v2, u32 1 - jmp b1(v10) - b2(): - return } "; @@ -374,6 +387,8 @@ mod test { b1(v2: u32): v5 = lt v2, u32 4 jmpif v5 then: b3, else: b2 + b2(): + return b3(): v6 = mul v0, v1 v7 = mul v6, v0 @@ -381,8 +396,6 @@ mod test { constrain v7 == u32 12 v9 = add v2, u32 1 jmp b1(v9) - b2(): - return } "; @@ -402,12 +415,12 @@ mod test { b1(v2: u32): v9 = lt v2, u32 4 jmpif v9 then: b3, else: b2 + b2(): + return b3(): constrain v4 == u32 12 v11 = add v2, u32 1 jmp b1(v11) - b2(): - return } "; @@ -431,17 +444,17 @@ mod test { b1(v2: u32): v7 = lt v2, u32 4 jmpif v7 then: b3, else: b2 + b2(): + v8 = load v5 -> [u32; 5] + v10 = array_get v8, index u32 2 -> u32 + constrain v10 == u32 3 + return b3(): v12 = load v5 -> [u32; 5] v13 = array_set v12, index v0, value v1 store v13 at v5 v15 = add v2, u32 1 jmp b1(v15) - b2(): - v8 = load v5 -> [u32; 5] - v10 = array_get v8, index u32 2 -> u32 - constrain v10 == u32 3 - return } "; @@ -485,16 +498,24 @@ mod test { b1(v2: u32): v9 = lt v2, u32 4 jmpif v9 then: b3, else: b2 + b2(): + return b3(): jmp b4(u32 0) b4(v3: u32): v10 = lt v3, u32 4 jmpif v10 then: b6, else: b5 + b5(): + v12 = add v2, u32 1 + jmp b1(v12) b6(): jmp b7(u32 0) b7(v4: u32): v13 = lt v4, u32 4 jmpif v13 then: b9, else: b8 + b8(): + v14 = add v3, u32 1 + jmp b4(v14) b9(): v15 = array_get v6, index v2 -> u32 v16 = eq v15, v0 @@ -504,14 +525,6 @@ mod test { constrain v17 == v0 v19 = add v4, u32 1 jmp b7(v19) - b8(): - v14 = add v3, u32 1 - jmp b4(v14) - b5(): - v12 = add v2, u32 1 - jmp b1(v12) - b2(): - return } "; @@ -526,6 +539,8 @@ mod test { b1(v2: u32): v9 = lt v2, u32 4 jmpif v9 then: b3, else: b2 + b2(): + return b3(): v10 = array_get v6, index v2 -> u32 v11 = eq v10, v0 @@ -533,6 +548,9 @@ mod test { b4(v3: u32): v12 = lt v3, u32 4 jmpif v12 then: b6, else: b5 + b5(): + v14 = add v2, u32 1 + jmp b1(v14) b6(): v15 = array_get v6, index v3 -> u32 v16 = eq v15, v0 @@ -540,18 +558,103 @@ mod test { b7(v4: u32): v17 = lt v4, u32 4 jmpif v17 then: b9, else: b8 + b8(): + v18 = add v3, u32 1 + jmp b4(v18) b9(): constrain v10 == v0 constrain v15 == v0 v19 = add v4, u32 1 jmp b7(v19) - b8(): - v18 = add v3, u32 1 - jmp b4(v18) - b5(): - v14 = add v2, u32 1 - jmp b1(v14) + } + "; + + let ssa = ssa.loop_invariant_code_motion(); + assert_normalized_ssa_equals(ssa, expected); + } + + #[test] + fn insert_inc_rc_when_moving_make_array() { + // SSA for the following program: + // + // unconstrained fn main(x: u32, y: u32) { + // let mut a1 = [1, 2, 3, 4, 5]; + // a1[x] = 64; + // for i in 0 .. 5 { + // let mut a2 = [1, 2, 3, 4, 5]; + // a2[y + i] = 128; + // foo(a2); + // } + // foo(a1); + // } + // + // We want to make sure move a loop invariant make_array instruction, + // to account for whether that array has been marked as mutable. + // To do so, we increment the reference counter on the array we are moving. + // In the SSA below, we want to move `v42` out of the loop. + let src = " + brillig(inline) fn main f0 { + b0(v0: u32, v1: u32): + v8 = make_array [Field 1, Field 2, Field 3, Field 4, Field 5] : [Field; 5] + v9 = allocate -> &mut [Field; 5] + v11 = array_set v8, index v0, value Field 64 + v13 = add v0, u32 1 + store v11 at v9 + jmp b1(u32 0) + b1(v2: u32): + v16 = lt v2, u32 5 + jmpif v16 then: b3, else: b2 + b2(): + v17 = load v9 -> [Field; 5] + call f1(v17) + return + b3(): + v19 = make_array [Field 1, Field 2, Field 3, Field 4, Field 5] : [Field; 5] + v20 = allocate -> &mut [Field; 5] + v21 = add v1, v2 + v23 = array_set v19, index v21, value Field 128 + call f1(v23) + v25 = add v2, u32 1 + jmp b1(v25) + } + brillig(inline) fn foo f1 { + b0(v0: [Field; 5]): + return + } + "; + + let ssa = Ssa::from_str(src).unwrap(); + + // We expect the `make_array` at the top of `b3` to be replaced with an `inc_rc` + // of the newly hoisted `make_array` at the end of `b0`. + let expected = " + brillig(inline) fn main f0 { + b0(v0: u32, v1: u32): + v8 = make_array [Field 1, Field 2, Field 3, Field 4, Field 5] : [Field; 5] + v9 = allocate -> &mut [Field; 5] + v11 = array_set v8, index v0, value Field 64 + v13 = add v0, u32 1 + store v11 at v9 + v14 = make_array [Field 1, Field 2, Field 3, Field 4, Field 5] : [Field; 5] + jmp b1(u32 0) + b1(v2: u32): + v17 = lt v2, u32 5 + jmpif v17 then: b3, else: b2 b2(): + v18 = load v9 -> [Field; 5] + call f1(v18) + return + b3(): + inc_rc v14 + v20 = allocate -> &mut [Field; 5] + v21 = add v1, v2 + v23 = array_set v14, index v21, value Field 128 + call f1(v23) + v25 = add v2, u32 1 + jmp b1(v25) + } + brillig(inline) fn foo f1 { + b0(v0: [Field; 5]): return } "; diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs index 53a31ae57c1..77ad53df9cf 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs @@ -1121,11 +1121,6 @@ mod tests { b1(v0: Field): v4 = eq v0, Field 0 jmpif v4 then: b3, else: b2 - b3(): - v11 = load v3 -> &mut Field - store Field 2 at v11 - v13 = add v0, Field 1 - jmp b1(v13) b2(): v5 = load v1 -> Field v7 = eq v5, Field 2 @@ -1135,6 +1130,11 @@ mod tests { v10 = eq v9, Field 2 constrain v9 == Field 2 return + b3(): + v11 = load v3 -> &mut Field + store Field 2 at v11 + v13 = add v0, Field 1 + jmp b1(v13) } "; @@ -1157,11 +1157,6 @@ mod tests { b1(v0: Field): v4 = eq v0, Field 0 jmpif v4 then: b3, else: b2 - b3(): - v13 = load v3 -> &mut Field - store Field 2 at v13 - v15 = add v0, Field 1 - jmp b1(v15) b2(): v5 = load v1 -> Field v7 = eq v5, Field 2 @@ -1173,6 +1168,11 @@ mod tests { v12 = eq v11, Field 2 constrain v11 == Field 2 return + b3(): + v13 = load v3 -> &mut Field + store Field 2 at v13 + v15 = add v0, Field 1 + jmp b1(v15) } acir(inline) fn foo f1 { b0(v0: &mut Field): @@ -1195,6 +1195,10 @@ mod tests { acir(inline) fn main f0 { b0(v0: u1): jmpif v0 then: b2, else: b1 + b1(): + v4 = allocate -> &mut Field + store Field 1 at v4 + jmp b3(v4, v4, v4) b2(): v6 = allocate -> &mut Field store Field 0 at v6 @@ -1212,10 +1216,6 @@ mod tests { constrain v11 == Field 1 constrain v13 == Field 3 return - b1(): - v4 = allocate -> &mut Field - store Field 1 at v4 - jmp b3(v4, v4, v4) } "; diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mod.rs index 06481a12f60..bd0c86570e2 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mod.rs @@ -11,6 +11,7 @@ mod constant_folding; mod defunctionalize; mod die; pub(crate) mod flatten_cfg; +mod hint; mod inlining; mod loop_invariant; mod mem2reg; diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/normalize_value_ids.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/normalize_value_ids.rs index a5b60fb5fcd..f5e96224260 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/normalize_value_ids.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/normalize_value_ids.rs @@ -177,7 +177,7 @@ impl IdMaps { } Value::NumericConstant { constant, typ } => { - new_function.dfg.make_constant(*constant, typ.clone()) + new_function.dfg.make_constant(*constant, *typ) } Value::Intrinsic(intrinsic) => new_function.dfg.import_intrinsic(*intrinsic), Value::ForeignFunction(name) => new_function.dfg.import_foreign_function(name), diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/rc.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/rc.rs index ffe4ada39b7..64f6e2ddfea 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/rc.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/rc.rs @@ -160,8 +160,12 @@ mod test { use crate::ssa::{ function_builder::FunctionBuilder, ir::{ - basic_block::BasicBlockId, dfg::DataFlowGraph, function::RuntimeType, - instruction::Instruction, map::Id, types::Type, + basic_block::BasicBlockId, + dfg::DataFlowGraph, + function::RuntimeType, + instruction::Instruction, + map::Id, + types::{NumericType, Type}, }, }; @@ -251,7 +255,7 @@ mod test { builder.insert_inc_rc(v0); let v2 = builder.insert_load(v1, array_type); - let zero = builder.numeric_constant(0u128, Type::unsigned(64)); + let zero = builder.numeric_constant(0u128, NumericType::unsigned(64)); let five = builder.field_constant(5u128); let v7 = builder.insert_array_set(v2, zero, five); @@ -302,7 +306,7 @@ mod test { builder.insert_store(v0, v1); let v2 = builder.insert_load(v1, array_type.clone()); - let zero = builder.numeric_constant(0u128, Type::unsigned(64)); + let zero = builder.numeric_constant(0u128, NumericType::unsigned(64)); let five = builder.field_constant(5u128); let v7 = builder.insert_array_set(v2, zero, five); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs index ccf5bd9d9f8..872c7920a77 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_bit_shifts.rs @@ -100,7 +100,7 @@ impl Context<'_> { bit_size: u32, ) -> ValueId { let base = self.field_constant(FieldElement::from(2_u128)); - let typ = self.function.dfg.type_of_value(lhs); + let typ = self.function.dfg.type_of_value(lhs).unwrap_numeric(); let (max_bit, pow) = if let Some(rhs_constant) = self.function.dfg.get_numeric_constant(rhs) { // Happy case is that we know precisely by how many bits the integer will @@ -115,29 +115,29 @@ impl Context<'_> { return InsertInstructionResult::SimplifiedTo(zero).first(); } } - let pow = self.numeric_constant(FieldElement::from(rhs_bit_size_pow_2), typ.clone()); + let pow = self.numeric_constant(FieldElement::from(rhs_bit_size_pow_2), typ); let max_lhs_bits = self.function.dfg.get_value_max_num_bits(lhs); (max_lhs_bits + bit_shift_size, pow) } else { // we use a predicate to nullify the result in case of overflow - let bit_size_var = - self.numeric_constant(FieldElement::from(bit_size as u128), Type::unsigned(8)); + let u8_type = NumericType::unsigned(8); + let bit_size_var = self.numeric_constant(FieldElement::from(bit_size as u128), u8_type); let overflow = self.insert_binary(rhs, BinaryOp::Lt, bit_size_var); - let predicate = self.insert_cast(overflow, typ.clone()); + let predicate = self.insert_cast(overflow, typ); // we can safely cast to unsigned because overflow_checks prevent bit-shift with a negative value - let rhs_unsigned = self.insert_cast(rhs, Type::unsigned(bit_size)); + let rhs_unsigned = self.insert_cast(rhs, NumericType::unsigned(bit_size)); let pow = self.pow(base, rhs_unsigned); - let pow = self.insert_cast(pow, typ.clone()); + let pow = self.insert_cast(pow, typ); (FieldElement::max_num_bits(), self.insert_binary(predicate, BinaryOp::Mul, pow)) }; if max_bit <= bit_size { self.insert_binary(lhs, BinaryOp::Mul, pow) } else { - let lhs_field = self.insert_cast(lhs, Type::field()); - let pow_field = self.insert_cast(pow, Type::field()); + let lhs_field = self.insert_cast(lhs, NumericType::NativeField); + let pow_field = self.insert_cast(pow, NumericType::NativeField); let result = self.insert_binary(lhs_field, BinaryOp::Mul, pow_field); let result = self.insert_truncate(result, bit_size, max_bit); self.insert_cast(result, typ) @@ -153,7 +153,7 @@ impl Context<'_> { rhs: ValueId, bit_size: u32, ) -> ValueId { - let lhs_typ = self.function.dfg.type_of_value(lhs); + let lhs_typ = self.function.dfg.type_of_value(lhs).unwrap_numeric(); let base = self.field_constant(FieldElement::from(2_u128)); let pow = self.pow(base, rhs); if lhs_typ.is_unsigned() { @@ -161,14 +161,14 @@ impl Context<'_> { self.insert_binary(lhs, BinaryOp::Div, pow) } else { // Get the sign of the operand; positive signed operand will just do a division as well - let zero = self.numeric_constant(FieldElement::zero(), Type::signed(bit_size)); + let zero = self.numeric_constant(FieldElement::zero(), NumericType::signed(bit_size)); let lhs_sign = self.insert_binary(lhs, BinaryOp::Lt, zero); - let lhs_sign_as_field = self.insert_cast(lhs_sign, Type::field()); - let lhs_as_field = self.insert_cast(lhs, Type::field()); + let lhs_sign_as_field = self.insert_cast(lhs_sign, NumericType::NativeField); + let lhs_as_field = self.insert_cast(lhs, NumericType::NativeField); // For negative numbers, convert to 1-complement using wrapping addition of a + 1 let one_complement = self.insert_binary(lhs_sign_as_field, BinaryOp::Add, lhs_as_field); let one_complement = self.insert_truncate(one_complement, bit_size, bit_size + 1); - let one_complement = self.insert_cast(one_complement, Type::signed(bit_size)); + let one_complement = self.insert_cast(one_complement, NumericType::signed(bit_size)); // Performs the division on the 1-complement (or the operand if positive) let shifted_complement = self.insert_binary(one_complement, BinaryOp::Div, pow); // Convert back to 2-complement representation if operand is negative @@ -203,8 +203,8 @@ impl Context<'_> { let idx = self.field_constant(FieldElement::from((bit_size - i) as i128)); let b = self.insert_array_get(rhs_bits, idx, Type::bool()); let not_b = self.insert_not(b); - let b = self.insert_cast(b, Type::field()); - let not_b = self.insert_cast(not_b, Type::field()); + let b = self.insert_cast(b, NumericType::NativeField); + let not_b = self.insert_cast(not_b, NumericType::NativeField); let r1 = self.insert_binary(a, BinaryOp::Mul, b); let r2 = self.insert_binary(r_squared, BinaryOp::Mul, not_b); r = self.insert_binary(r1, BinaryOp::Add, r2); @@ -216,14 +216,14 @@ impl Context<'_> { } pub(crate) fn field_constant(&mut self, constant: FieldElement) -> ValueId { - self.function.dfg.make_constant(constant, Type::field()) + self.function.dfg.make_constant(constant, NumericType::NativeField) } /// Insert a numeric constant into the current function pub(crate) fn numeric_constant( &mut self, value: impl Into, - typ: Type, + typ: NumericType, ) -> ValueId { self.function.dfg.make_constant(value.into(), typ) } @@ -260,7 +260,7 @@ impl Context<'_> { /// Insert a cast instruction at the end of the current block. /// Returns the result of the cast instruction. - pub(crate) fn insert_cast(&mut self, value: ValueId, typ: Type) -> ValueId { + pub(crate) fn insert_cast(&mut self, value: ValueId, typ: NumericType) -> ValueId { self.insert_instruction(Instruction::Cast(value, typ), None).first() } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs index f735d9300ce..e85e2c4a441 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_enable_side_effects.rs @@ -17,8 +17,8 @@ use crate::ssa::{ basic_block::BasicBlockId, dfg::DataFlowGraph, function::{Function, RuntimeType}, - instruction::{BinaryOp, Instruction, Intrinsic}, - types::Type, + instruction::{BinaryOp, Hint, Instruction, Intrinsic}, + types::NumericType, value::Value, }, ssa_gen::Ssa, @@ -70,7 +70,8 @@ impl Context { ) { let instructions = function.dfg[block].take_instructions(); - let mut active_condition = function.dfg.make_constant(FieldElement::one(), Type::bool()); + let one = FieldElement::one(); + let mut active_condition = function.dfg.make_constant(one, NumericType::bool()); let mut last_side_effects_enabled_instruction = None; let mut new_instructions = Vec::with_capacity(instructions.len()); @@ -174,6 +175,7 @@ impl Context { | Intrinsic::ToBits(_) | Intrinsic::ToRadix(_) | Intrinsic::BlackBox(_) + | Intrinsic::Hint(Hint::BlackBox) | Intrinsic::FromField | Intrinsic::AsField | Intrinsic::AsSlice @@ -202,7 +204,7 @@ mod test { ir::{ instruction::{BinaryOp, Instruction}, map::Id, - types::Type, + types::{NumericType, Type}, }, }; @@ -233,9 +235,9 @@ mod test { let mut builder = FunctionBuilder::new("main".into(), main_id); let v0 = builder.add_parameter(Type::field()); - let two = builder.numeric_constant(2u128, Type::field()); + let two = builder.field_constant(2u128); - let one = builder.numeric_constant(1u128, Type::bool()); + let one = builder.numeric_constant(1u128, NumericType::bool()); builder.insert_enable_side_effects_if(one); builder.insert_binary(v0, BinaryOp::Mul, two); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs index 02191801fcd..45b7f9072d8 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/remove_if_else.rs @@ -4,6 +4,8 @@ use acvm::{acir::AcirField, FieldElement}; use fxhash::FxHashMap as HashMap; use crate::ssa::ir::function::RuntimeType; +use crate::ssa::ir::instruction::Hint; +use crate::ssa::ir::types::NumericType; use crate::ssa::ir::value::ValueId; use crate::ssa::{ ir::{ @@ -62,7 +64,8 @@ impl Context { fn remove_if_else(&mut self, function: &mut Function) { let block = function.entry_block(); let instructions = function.dfg[block].take_instructions(); - let mut current_conditional = function.dfg.make_constant(FieldElement::one(), Type::bool()); + let one = FieldElement::one(); + let mut current_conditional = function.dfg.make_constant(one, NumericType::bool()); for instruction in instructions { match &function.dfg[instruction] { @@ -231,6 +234,7 @@ fn slice_capacity_change( | Intrinsic::ArrayAsStrUnchecked | Intrinsic::StrAsBytes | Intrinsic::BlackBox(_) + | Intrinsic::Hint(Hint::BlackBox) | Intrinsic::FromField | Intrinsic::AsField | Intrinsic::AsWitness diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/resolve_is_unconstrained.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/resolve_is_unconstrained.rs index 3d40c88d704..87e680932c6 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/resolve_is_unconstrained.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/resolve_is_unconstrained.rs @@ -2,12 +2,11 @@ use crate::ssa::{ ir::{ function::{Function, RuntimeType}, instruction::{Instruction, Intrinsic}, - types::Type, + types::NumericType, value::Value, }, ssa_gen::Ssa, }; -use acvm::FieldElement; use fxhash::FxHashSet as HashSet; impl Ssa { @@ -47,10 +46,9 @@ impl Function { // We replace the result with a fresh id. This will be unused, so the DIE pass will remove the leftover intrinsic call. self.dfg.replace_result(instruction_id, original_return_id); - let is_within_unconstrained = self.dfg.make_constant( - FieldElement::from(matches!(self.runtime(), RuntimeType::Brillig(_))), - Type::bool(), - ); + let is_unconstrained = matches!(self.runtime(), RuntimeType::Brillig(_)).into(); + let is_within_unconstrained = + self.dfg.make_constant(is_unconstrained, NumericType::bool()); // Replace all uses of the original return value with the constant self.dfg.set_value_from_id(original_return_id, is_within_unconstrained); } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs index c282e2df451..e7f8d227d28 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs @@ -442,14 +442,14 @@ mod test { store Field 0 at v1 v3 = not v0 jmpif v0 then: b2, else: b1 + b1(): + store Field 2 at v1 + jmp b2() b2(): v5 = load v1 -> Field v6 = eq v5, Field 2 constrain v5 == Field 2 return - b1(): - store Field 2 at v1 - jmp b2() }"; assert_normalized_ssa_equals(ssa.simplify_cfg(), expected); } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs index 1a13acc5435..7ef793a350b 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs @@ -279,10 +279,10 @@ impl Loop { &self, function: &Function, cfg: &ControlFlowGraph, - ) -> Result, CallStack> { - let pre_header = self.get_pre_header(function, cfg)?; - let jump_value = get_induction_variable(function, pre_header)?; - Ok(function.dfg.get_numeric_constant(jump_value)) + ) -> Option { + let pre_header = self.get_pre_header(function, cfg).ok()?; + let jump_value = get_induction_variable(function, pre_header).ok()?; + function.dfg.get_numeric_constant(jump_value) } /// Find the upper bound of the loop in the loop header and return it @@ -302,6 +302,11 @@ impl Loop { pub(super) fn get_const_upper_bound(&self, function: &Function) -> Option { let block = &function.dfg[self.header]; let instructions = block.instructions(); + if instructions.is_empty() { + // If the loop condition is constant time, the loop header will be + // simplified to a simple jump. + return None; + } assert_eq!( instructions.len(), 1, @@ -327,14 +332,10 @@ impl Loop { &self, function: &Function, cfg: &ControlFlowGraph, - ) -> Result, CallStack> { - let Some(lower) = self.get_const_lower_bound(function, cfg)? else { - return Ok(None); - }; - let Some(upper) = self.get_const_upper_bound(function) else { - return Ok(None); - }; - Ok(Some((lower, upper))) + ) -> Option<(FieldElement, FieldElement)> { + let lower = self.get_const_lower_bound(function, cfg)?; + let upper = self.get_const_upper_bound(function)?; + Some((lower, upper)) } /// Unroll a single loop in the function. @@ -547,9 +548,9 @@ impl Loop { &self, function: &Function, cfg: &ControlFlowGraph, - ) -> Result, CallStack> { + ) -> Option> { // We need to traverse blocks from the pre-header up to the block entry point. - let pre_header = self.get_pre_header(function, cfg)?; + let pre_header = self.get_pre_header(function, cfg).ok()?; let function_entry = function.entry_block(); // The algorithm in `find_blocks_in_loop` expects to collect the blocks between the header and the back-edge of the loop, @@ -557,22 +558,19 @@ impl Loop { let blocks = Self::find_blocks_in_loop(function_entry, pre_header, cfg).blocks; // Collect allocations in all blocks above the header. - let allocations = blocks.iter().flat_map(|b| { - function.dfg[*b] - .instructions() - .iter() + let allocations = blocks.iter().flat_map(|block| { + let instructions = function.dfg[*block].instructions().iter(); + instructions .filter(|i| matches!(&function.dfg[**i], Instruction::Allocate)) - .map(|i| { - // Get the value into which the allocation was stored. - function.dfg.instruction_results(*i)[0] - }) + // Get the value into which the allocation was stored. + .map(|i| function.dfg.instruction_results(*i)[0]) }); // Collect reference parameters of the function itself. let params = function.parameters().iter().filter(|p| function.dfg.value_is_reference(**p)).copied(); - Ok(params.chain(allocations).collect()) + Some(params.chain(allocations).collect()) } /// Count the number of load and store instructions of specific variables in the loop. @@ -603,13 +601,11 @@ impl Loop { /// Count the number of instructions in the loop, including the terminating jumps. fn count_all_instructions(&self, function: &Function) -> usize { - self.blocks - .iter() - .map(|block| { - let block = &function.dfg[*block]; - block.instructions().len() + block.terminator().map(|_| 1).unwrap_or_default() - }) - .sum() + let iter = self.blocks.iter().map(|block| { + let block = &function.dfg[*block]; + block.instructions().len() + block.terminator().is_some() as usize + }); + iter.sum() } /// Count the number of increments to the induction variable. @@ -640,18 +636,11 @@ impl Loop { function: &Function, cfg: &ControlFlowGraph, ) -> Option { - let Ok(Some((lower, upper))) = self.get_const_bounds(function, cfg) else { - return None; - }; - let Some(lower) = lower.try_to_u64() else { - return None; - }; - let Some(upper) = upper.try_to_u64() else { - return None; - }; - let Ok(refs) = self.find_pre_header_reference_values(function, cfg) else { - return None; - }; + let (lower, upper) = self.get_const_bounds(function, cfg)?; + let lower = lower.try_to_u64()?; + let upper = upper.try_to_u64()?; + let refs = self.find_pre_header_reference_values(function, cfg)?; + let (loads, stores) = self.count_loads_and_stores(function, &refs); let increments = self.count_induction_increments(function); let all_instructions = self.count_all_instructions(function); @@ -1142,7 +1131,6 @@ mod tests { let (lower, upper) = loops.yet_to_unroll[0] .get_const_bounds(function, &loops.cfg) - .expect("should find bounds") .expect("bounds are numeric const"); assert_eq!(lower, FieldElement::from(0u32)); @@ -1158,7 +1146,7 @@ mod tests { let refs = loop0.find_pre_header_reference_values(function, &loops.cfg).unwrap(); assert_eq!(refs.len(), 1); - assert!(refs.contains(&ValueId::new(2))); + assert!(refs.contains(&ValueId::test_new(2))); let (loads, stores) = loop0.count_loads_and_stores(function, &refs); assert_eq!(loads, 1); @@ -1337,12 +1325,15 @@ mod tests { b2(): v7 = eq v0, u32 2 jmpif v7 then: b7, else: b3 - b7(): - v18 = add v0, u32 1 - jmp b1(v18) b3(): v9 = eq v0, u32 5 jmpif v9 then: b5, else: b4 + b4(): + v10 = load v1 -> Field + v12 = add v10, Field 1 + store v12 at v1 + v14 = add v0, u32 1 + jmp b1(v14) b5(): jmp b6() b6(): @@ -1350,12 +1341,9 @@ mod tests { v17 = eq v15, Field 4 constrain v15 == Field 4 return - b4(): - v10 = load v1 -> Field - v12 = add v10, Field 1 - store v12 at v1 - v14 = add v0, u32 1 - jmp b1(v14) + b7(): + v18 = add v0, u32 1 + jmp b1(v18) } "; let ssa = Ssa::from_str(src).unwrap(); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/into_ssa.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/into_ssa.rs index e78cbbd75a1..7c7e977c6ce 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/into_ssa.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/into_ssa.rs @@ -57,7 +57,7 @@ impl Translator { // A FunctionBuilder must be created with a main Function, so here wer remove it // from the parsed SSA to avoid adding it twice later on. let main_function = parsed_ssa.functions.remove(0); - let main_id = FunctionId::new(0); + let main_id = FunctionId::test_new(0); let mut builder = FunctionBuilder::new(main_function.external_name.clone(), main_id); builder.set_runtime(main_function.runtime_type); @@ -65,7 +65,7 @@ impl Translator { let mut function_id_counter = 1; let mut functions = HashMap::new(); for function in &parsed_ssa.functions { - let function_id = FunctionId::new(function_id_counter); + let function_id = FunctionId::test_new(function_id_counter); function_id_counter += 1; functions.insert(function.internal_name.clone(), function_id); @@ -207,7 +207,7 @@ impl Translator { } ParsedInstruction::Cast { target, lhs, typ } => { let lhs = self.translate_value(lhs)?; - let value_id = self.builder.insert_cast(lhs, typ); + let value_id = self.builder.insert_cast(lhs, typ.unwrap_numeric()); self.define_variable(target, value_id)?; } ParsedInstruction::Constrain { lhs, rhs, assert_message } => { @@ -290,7 +290,7 @@ impl Translator { fn translate_value(&mut self, value: ParsedValue) -> Result { match value { ParsedValue::NumericConstant { constant, typ } => { - Ok(self.builder.numeric_constant(constant, typ)) + Ok(self.builder.numeric_constant(constant, typ.unwrap_numeric())) } ParsedValue::Variable(identifier) => self.lookup_variable(identifier), } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/tests.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/tests.rs index 6318f9dc56e..dab96dfa04f 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/tests.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/parser/tests.rs @@ -143,10 +143,10 @@ fn test_jmpif() { acir(inline) fn main f0 { b0(v0: Field): jmpif v0 then: b2, else: b1 - b2(): - return b1(): return + b2(): + return } "; assert_ssa_roundtrip(src); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs index 116e0de4ecd..7807658dabb 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs @@ -19,7 +19,6 @@ use crate::ssa::ir::types::{NumericType, Type}; use crate::ssa::ir::value::ValueId; use super::value::{Tree, Value, Values}; -use super::SSA_WORD_SIZE; use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; /// The FunctionContext is the main context object for translating a @@ -281,37 +280,33 @@ impl<'a> FunctionContext<'a> { &mut self, value: impl Into, negative: bool, - typ: Type, + numeric_type: NumericType, ) -> Result { let value = value.into(); - if let Type::Numeric(numeric_type) = typ { - if let Some(range) = numeric_type.value_is_outside_limits(value, negative) { - let call_stack = self.builder.get_call_stack(); - return Err(RuntimeError::IntegerOutOfBounds { - value: if negative { -value } else { value }, - typ: numeric_type, - range, - call_stack, - }); - } + if let Some(range) = numeric_type.value_is_outside_limits(value, negative) { + let call_stack = self.builder.get_call_stack(); + return Err(RuntimeError::IntegerOutOfBounds { + value: if negative { -value } else { value }, + typ: numeric_type, + range, + call_stack, + }); + } - let value = if negative { - match numeric_type { - NumericType::NativeField => -value, - NumericType::Signed { bit_size } | NumericType::Unsigned { bit_size } => { - let base = 1_u128 << bit_size; - FieldElement::from(base) - value - } + let value = if negative { + match numeric_type { + NumericType::NativeField => -value, + NumericType::Signed { bit_size } | NumericType::Unsigned { bit_size } => { + let base = 1_u128 << bit_size; + FieldElement::from(base) - value } - } else { - value - }; - - Ok(self.builder.numeric_constant(value, typ)) + } } else { - panic!("Expected type for numeric constant to be a numeric type, found {typ}"); - } + value + }; + + Ok(self.builder.numeric_constant(value, numeric_type)) } /// helper function which add instructions to the block computing the absolute value of the @@ -320,16 +315,16 @@ impl<'a> FunctionContext<'a> { assert_eq!(self.builder.type_of_value(sign), Type::bool()); // We compute the absolute value of lhs - let bit_width = - self.builder.numeric_constant(FieldElement::from(2_i128.pow(bit_size)), Type::field()); + let bit_width = FieldElement::from(2_i128.pow(bit_size)); + let bit_width = self.builder.numeric_constant(bit_width, NumericType::NativeField); let sign_not = self.builder.insert_not(sign); // We use unsafe casts here, this is fine as we're casting to a `field` type. - let as_field = self.builder.insert_cast(input, Type::field()); - let sign_field = self.builder.insert_cast(sign, Type::field()); + let as_field = self.builder.insert_cast(input, NumericType::NativeField); + let sign_field = self.builder.insert_cast(sign, NumericType::NativeField); let positive_predicate = self.builder.insert_binary(sign_field, BinaryOp::Mul, as_field); let two_complement = self.builder.insert_binary(bit_width, BinaryOp::Sub, as_field); - let sign_not_field = self.builder.insert_cast(sign_not, Type::field()); + let sign_not_field = self.builder.insert_cast(sign_not, NumericType::NativeField); let negative_predicate = self.builder.insert_binary(sign_not_field, BinaryOp::Mul, two_complement); self.builder.insert_binary(positive_predicate, BinaryOp::Add, negative_predicate) @@ -354,15 +349,18 @@ impl<'a> FunctionContext<'a> { operator: BinaryOpKind, location: Location, ) -> ValueId { - let result_type = self.builder.current_function.dfg.type_of_value(result); + let result_type = self.builder.current_function.dfg.type_of_value(result).unwrap_numeric(); match result_type { - Type::Numeric(NumericType::Signed { bit_size }) => { + NumericType::Signed { bit_size } => { match operator { BinaryOpKind::Add | BinaryOpKind::Subtract => { // Result is computed modulo the bit size let result = self.builder.insert_truncate(result, bit_size, bit_size + 1); - let result = - self.insert_safe_cast(result, Type::unsigned(bit_size), location); + let result = self.insert_safe_cast( + result, + NumericType::unsigned(bit_size), + location, + ); self.check_signed_overflow(result, lhs, rhs, operator, bit_size, location); self.insert_safe_cast(result, result_type, location) @@ -370,7 +368,7 @@ impl<'a> FunctionContext<'a> { BinaryOpKind::Multiply => { // Result is computed modulo the bit size let mut result = - self.builder.insert_cast(result, Type::unsigned(2 * bit_size)); + self.builder.insert_cast(result, NumericType::unsigned(2 * bit_size)); result = self.builder.insert_truncate(result, bit_size, 2 * bit_size); self.check_signed_overflow(result, lhs, rhs, operator, bit_size, location); @@ -382,7 +380,7 @@ impl<'a> FunctionContext<'a> { _ => unreachable!("operator {} should not overflow", operator), } } - Type::Numeric(NumericType::Unsigned { bit_size }) => { + NumericType::Unsigned { bit_size } => { let dfg = &self.builder.current_function.dfg; let max_lhs_bits = dfg.get_value_max_num_bits(lhs); @@ -410,7 +408,7 @@ impl<'a> FunctionContext<'a> { result } - _ => result, + NumericType::NativeField => result, } } @@ -425,11 +423,11 @@ impl<'a> FunctionContext<'a> { bit_size: u32, location: Location, ) -> ValueId { - let one = self.builder.numeric_constant(FieldElement::one(), Type::bool()); + let one = self.builder.numeric_constant(FieldElement::one(), NumericType::bool()); assert!(self.builder.current_function.dfg.type_of_value(rhs) == Type::unsigned(8)); - let max = - self.builder.numeric_constant(FieldElement::from(bit_size as i128), Type::unsigned(8)); + let bit_size_field = FieldElement::from(bit_size as i128); + let max = self.builder.numeric_constant(bit_size_field, NumericType::unsigned(8)); let overflow = self.builder.insert_binary(rhs, BinaryOp::Lt, max); self.builder.set_location(location).insert_constrain( overflow, @@ -463,11 +461,11 @@ impl<'a> FunctionContext<'a> { let is_sub = operator == BinaryOpKind::Subtract; let half_width = self.builder.numeric_constant( FieldElement::from(2_i128.pow(bit_size - 1)), - Type::unsigned(bit_size), + NumericType::unsigned(bit_size), ); // We compute the sign of the operands. The overflow checks for signed integers depends on these signs - let lhs_as_unsigned = self.insert_safe_cast(lhs, Type::unsigned(bit_size), location); - let rhs_as_unsigned = self.insert_safe_cast(rhs, Type::unsigned(bit_size), location); + let lhs_as_unsigned = self.insert_safe_cast(lhs, NumericType::unsigned(bit_size), location); + let rhs_as_unsigned = self.insert_safe_cast(rhs, NumericType::unsigned(bit_size), location); let lhs_sign = self.builder.insert_binary(lhs_as_unsigned, BinaryOp::Lt, half_width); let mut rhs_sign = self.builder.insert_binary(rhs_as_unsigned, BinaryOp::Lt, half_width); let message = if is_sub { @@ -505,18 +503,19 @@ impl<'a> FunctionContext<'a> { bit_size, Some("attempt to multiply with overflow".to_string()), ); - let product = self.builder.insert_cast(product_field, Type::unsigned(bit_size)); + let product = + self.builder.insert_cast(product_field, NumericType::unsigned(bit_size)); // Then we check the signed product fits in a signed integer of bit_size-bits let not_same = self.builder.insert_not(same_sign); let not_same_sign_field = - self.insert_safe_cast(not_same, Type::unsigned(bit_size), location); + self.insert_safe_cast(not_same, NumericType::unsigned(bit_size), location); let positive_maximum_with_offset = self.builder.insert_binary(half_width, BinaryOp::Add, not_same_sign_field); let product_overflow_check = self.builder.insert_binary(product, BinaryOp::Lt, positive_maximum_with_offset); - let one = self.builder.numeric_constant(FieldElement::one(), Type::bool()); + let one = self.builder.numeric_constant(FieldElement::one(), NumericType::bool()); self.builder.set_location(location).insert_constrain( product_overflow_check, one, @@ -595,7 +594,7 @@ impl<'a> FunctionContext<'a> { pub(super) fn insert_safe_cast( &mut self, mut value: ValueId, - typ: Type, + typ: NumericType, location: Location, ) -> ValueId { self.builder.set_location(location); @@ -614,7 +613,8 @@ impl<'a> FunctionContext<'a> { /// Create a const offset of an address for an array load or store pub(super) fn make_offset(&mut self, mut address: ValueId, offset: u128) -> ValueId { if offset != 0 { - let offset = self.builder.numeric_constant(offset, self.builder.type_of_value(address)); + let typ = self.builder.type_of_value(address).unwrap_numeric(); + let offset = self.builder.numeric_constant(offset, typ); address = self.builder.insert_binary(address, BinaryOp::Add, offset); } address @@ -622,7 +622,7 @@ impl<'a> FunctionContext<'a> { /// Array indexes are u32. This function casts values used as indexes to u32. pub(super) fn make_array_index(&mut self, index: ValueId) -> ValueId { - self.builder.insert_cast(index, Type::unsigned(SSA_WORD_SIZE)) + self.builder.insert_cast(index, NumericType::length_type()) } /// Define a local variable to be some Values that can later be retrieved @@ -870,12 +870,12 @@ impl<'a> FunctionContext<'a> { ) -> ValueId { let index = self.make_array_index(index); let element_size = - self.builder.numeric_constant(self.element_size(array), Type::unsigned(SSA_WORD_SIZE)); + self.builder.numeric_constant(self.element_size(array), NumericType::length_type()); // The actual base index is the user's index * the array element type's size let mut index = self.builder.set_location(location).insert_binary(index, BinaryOp::Mul, element_size); - let one = self.builder.numeric_constant(FieldElement::one(), Type::unsigned(SSA_WORD_SIZE)); + let one = self.builder.numeric_constant(FieldElement::one(), NumericType::length_type()); new_value.for_each(|value| { let value = value.eval(self); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs index 2fe0a38af00..536f2cdb477 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs @@ -2,6 +2,7 @@ pub(crate) mod context; mod program; mod value; +use noirc_frontend::token::FmtStrFragment; pub(crate) use program::Ssa; use context::SharedContext; @@ -22,6 +23,7 @@ use self::{ }; use super::ir::instruction::ErrorType; +use super::ir::types::NumericType; use super::{ function_builder::data_bus::DataBus, ir::{ @@ -222,18 +224,34 @@ impl<'a> FunctionContext<'a> { } ast::Literal::Integer(value, negative, typ, location) => { self.builder.set_location(*location); - let typ = Self::convert_non_tuple_type(typ); + let typ = Self::convert_non_tuple_type(typ).unwrap_numeric(); self.checked_numeric_constant(*value, *negative, typ).map(Into::into) } ast::Literal::Bool(value) => { // Don't need to call checked_numeric_constant here since `value` can only be true or false - Ok(self.builder.numeric_constant(*value as u128, Type::bool()).into()) + Ok(self.builder.numeric_constant(*value as u128, NumericType::bool()).into()) } ast::Literal::Str(string) => Ok(self.codegen_string(string)), - ast::Literal::FmtStr(string, number_of_fields, fields) => { + ast::Literal::FmtStr(fragments, number_of_fields, fields) => { + let mut string = String::new(); + for fragment in fragments { + match fragment { + FmtStrFragment::String(value) => { + // Escape curly braces in non-interpolations + let value = value.replace('{', "{{").replace('}', "}}"); + string.push_str(&value); + } + FmtStrFragment::Interpolation(value, _span) => { + string.push('{'); + string.push_str(value); + string.push('}'); + } + } + } + // A caller needs multiple pieces of information to make use of a format string // The message string, the number of fields to be formatted, and the fields themselves - let string = self.codegen_string(string); + let string = self.codegen_string(&string); let field_count = self.builder.length_constant(*number_of_fields as u128); let fields = self.codegen_expression(fields)?; @@ -255,7 +273,7 @@ impl<'a> FunctionContext<'a> { fn codegen_string(&mut self, string: &str) -> Values { let elements = vecmap(string.as_bytes(), |byte| { - let char = self.builder.numeric_constant(*byte as u128, Type::unsigned(8)); + let char = self.builder.numeric_constant(*byte as u128, NumericType::char()); (char.into(), false) }); let typ = Self::convert_non_tuple_type(&ast::Type::String(elements.len() as u32)); @@ -332,7 +350,7 @@ impl<'a> FunctionContext<'a> { UnaryOp::Minus => { let rhs = self.codegen_expression(&unary.rhs)?; let rhs = rhs.into_leaf().eval(self); - let typ = self.builder.type_of_value(rhs); + let typ = self.builder.type_of_value(rhs).unwrap_numeric(); let zero = self.builder.numeric_constant(0u128, typ); Ok(self.insert_binary( zero, @@ -426,7 +444,7 @@ impl<'a> FunctionContext<'a> { let index = self.make_array_index(index); let type_size = Self::convert_type(element_type).size_of_type(); let type_size = - self.builder.numeric_constant(type_size as u128, Type::unsigned(SSA_WORD_SIZE)); + self.builder.numeric_constant(type_size as u128, NumericType::length_type()); let base_index = self.builder.set_location(location).insert_binary(index, BinaryOp::Mul, type_size); @@ -465,7 +483,7 @@ impl<'a> FunctionContext<'a> { .make_array_index(length.expect("ICE: a length must be supplied for indexing slices")); let is_offset_out_of_bounds = self.builder.insert_binary(index, BinaryOp::Lt, array_len); - let true_const = self.builder.numeric_constant(true, Type::bool()); + let true_const = self.builder.numeric_constant(true, NumericType::bool()); self.builder.insert_constrain( is_offset_out_of_bounds, @@ -476,7 +494,7 @@ impl<'a> FunctionContext<'a> { fn codegen_cast(&mut self, cast: &ast::Cast) -> Result { let lhs = self.codegen_non_tuple_expression(&cast.lhs)?; - let typ = Self::convert_non_tuple_type(&cast.r#type); + let typ = Self::convert_non_tuple_type(&cast.r#type).unwrap_numeric(); Ok(self.insert_safe_cast(lhs, typ, cast.location).into()) } @@ -685,7 +703,9 @@ impl<'a> FunctionContext<'a> { // Don't mutate the reference count if we're assigning an array literal to a Let: // `let mut foo = [1, 2, 3];` // we consider the array to be moved, so we should have an initial rc of just 1. - let should_inc_rc = !let_expr.expression.is_array_or_slice_literal(); + // + // TODO: this exception breaks #6763 + let should_inc_rc = true; // !let_expr.expression.is_array_or_slice_literal(); values = values.map(|value| { let value = value.eval(self); @@ -713,7 +733,7 @@ impl<'a> FunctionContext<'a> { assert_payload: &Option>, ) -> Result { let expr = self.codegen_non_tuple_expression(expr)?; - let true_literal = self.builder.numeric_constant(true, Type::bool()); + let true_literal = self.builder.numeric_constant(true, NumericType::bool()); // Set the location here for any errors that may occur when we codegen the assert message self.builder.set_location(location); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/program.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/program.rs index 3dba6dc0a98..de01a4596ad 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/program.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ssa_gen/program.rs @@ -25,7 +25,7 @@ pub(crate) struct Ssa { /// This mapping is necessary to use the correct function pointer for an ACIR call, /// as the final program artifact will be a list of only entry point functions. #[serde(skip)] - pub(crate) entry_point_to_generated_index: BTreeMap, + entry_point_to_generated_index: BTreeMap, // We can skip serializing this field as the error selector types end up as part of the // ABI not the actual SSA IR. #[serde(skip)] @@ -47,25 +47,11 @@ impl Ssa { (f.id(), f) }); - let entry_point_to_generated_index = btree_map( - functions - .iter() - .filter(|(_, func)| { - let runtime = func.runtime(); - match func.runtime() { - RuntimeType::Acir(_) => runtime.is_entry_point() || func.id() == main_id, - RuntimeType::Brillig(_) => false, - } - }) - .enumerate(), - |(i, (id, _))| (*id, i as u32), - ); - Self { functions, main_id, next_id: AtomicCounter::starting_after(max_id), - entry_point_to_generated_index, + entry_point_to_generated_index: BTreeMap::new(), error_selector_to_type: error_types, } } @@ -98,6 +84,33 @@ impl Ssa { self.functions.insert(new_id, function); new_id } + pub(crate) fn generate_entry_point_index(mut self) -> Self { + self.entry_point_to_generated_index = btree_map( + self.functions + .iter() + .filter(|(_, func)| { + let runtime = func.runtime(); + match func.runtime() { + RuntimeType::Acir(_) => { + runtime.is_entry_point() || func.id() == self.main_id + } + RuntimeType::Brillig(_) => false, + } + }) + .enumerate(), + |(i, (id, _))| (*id, i as u32), + ); + self + } + + pub(crate) fn get_entry_point_index(&self, func_id: &FunctionId) -> Option { + // Ensure the map has been initialized + assert!( + !self.entry_point_to_generated_index.is_empty(), + "Trying to read uninitialized entry point index" + ); + self.entry_point_to_generated_index.get(func_id).copied() + } } impl Display for Ssa { diff --git a/noir/noir-repo/compiler/noirc_frontend/Cargo.toml b/noir/noir-repo/compiler/noirc_frontend/Cargo.toml index 5d1520af54f..5f8f02689c8 100644 --- a/noir/noir-repo/compiler/noirc_frontend/Cargo.toml +++ b/noir/noir-repo/compiler/noirc_frontend/Cargo.toml @@ -25,7 +25,6 @@ num-bigint.workspace = true num-traits.workspace = true rustc-hash = "1.1.0" small-ord-set = "0.1.3" -regex = "1.9.1" cfg-if.workspace = true tracing.workspace = true petgraph = "0.6" diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs index 2c8a9b6508d..ae622f46686 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/expression.rs @@ -10,7 +10,7 @@ use crate::ast::{ use crate::node_interner::{ ExprId, InternedExpressionKind, InternedStatementKind, QuotedTypeId, StructId, }; -use crate::token::{Attributes, FunctionAttribute, Token, Tokens}; +use crate::token::{Attributes, FmtStrFragment, FunctionAttribute, Token, Tokens}; use crate::{Kind, Type}; use acvm::{acir::AcirField, FieldElement}; use iter_extended::vecmap; @@ -210,8 +210,8 @@ impl ExpressionKind { ExpressionKind::Literal(Literal::RawStr(contents, hashes)) } - pub fn format_string(contents: String) -> ExpressionKind { - ExpressionKind::Literal(Literal::FmtStr(contents)) + pub fn format_string(fragments: Vec, length: u32) -> ExpressionKind { + ExpressionKind::Literal(Literal::FmtStr(fragments, length)) } pub fn constructor( @@ -434,7 +434,7 @@ pub enum Literal { Integer(FieldElement, /*sign*/ bool), // false for positive integer and true for negative Str(String), RawStr(String, u8), - FmtStr(String), + FmtStr(Vec, u32 /* length */), Unit, } @@ -669,7 +669,13 @@ impl Display for Literal { std::iter::once('#').cycle().take(*num_hashes as usize).collect(); write!(f, "r{hashes}\"{string}\"{hashes}") } - Literal::FmtStr(string) => write!(f, "f\"{string}\""), + Literal::FmtStr(fragments, _length) => { + write!(f, "f\"")?; + for fragment in fragments { + fragment.fmt(f)?; + } + write!(f, "\"") + } Literal::Unit => write!(f, "()"), } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/ast/visitor.rs b/noir/noir-repo/compiler/noirc_frontend/src/ast/visitor.rs index f149c998eca..2f60532980a 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/ast/visitor.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/ast/visitor.rs @@ -16,7 +16,7 @@ use crate::{ InternedUnresolvedTypeData, QuotedTypeId, }, parser::{Item, ItemKind, ParsedSubModule}, - token::{MetaAttribute, SecondaryAttribute, Tokens}, + token::{FmtStrFragment, MetaAttribute, SecondaryAttribute, Tokens}, ParsedModule, QuotedType, }; @@ -172,7 +172,7 @@ pub trait Visitor { fn visit_literal_raw_str(&mut self, _: &str, _: u8) {} - fn visit_literal_fmt_str(&mut self, _: &str) {} + fn visit_literal_fmt_str(&mut self, _: &[FmtStrFragment], _length: u32) {} fn visit_literal_unit(&mut self) {} @@ -900,7 +900,7 @@ impl Literal { Literal::Integer(value, negative) => visitor.visit_literal_integer(*value, *negative), Literal::Str(str) => visitor.visit_literal_str(str), Literal::RawStr(str, length) => visitor.visit_literal_raw_str(str, *length), - Literal::FmtStr(str) => visitor.visit_literal_fmt_str(str), + Literal::FmtStr(fragments, length) => visitor.visit_literal_fmt_str(fragments, *length), Literal::Unit => visitor.visit_literal_unit(), } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/comptime.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/comptime.rs index 962356d6dd9..fe8c8338b32 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/comptime.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/comptime.rs @@ -39,6 +39,8 @@ struct AttributeContext { attribute_module: LocalModuleId, } +type CollectedAttributes = Vec<(FuncId, Value, Vec, AttributeContext, Span)>; + impl AttributeContext { fn new(file: FileId, module: LocalModuleId) -> Self { Self { file, module, attribute_file: file, attribute_module: module } @@ -131,41 +133,37 @@ impl<'context> Elaborator<'context> { } } - fn run_comptime_attributes_on_item( + fn collect_comptime_attributes_on_item( &mut self, attributes: &[SecondaryAttribute], item: Value, - span: Span, attribute_context: AttributeContext, - generated_items: &mut CollectedItems, + attributes_to_run: &mut CollectedAttributes, ) { for attribute in attributes { - self.run_comptime_attribute_on_item( + self.collect_comptime_attribute_on_item( attribute, &item, - span, attribute_context, - generated_items, + attributes_to_run, ); } } - fn run_comptime_attribute_on_item( + fn collect_comptime_attribute_on_item( &mut self, attribute: &SecondaryAttribute, item: &Value, - span: Span, attribute_context: AttributeContext, - generated_items: &mut CollectedItems, + attributes_to_run: &mut CollectedAttributes, ) { if let SecondaryAttribute::Meta(attribute) = attribute { self.elaborate_in_comptime_context(|this| { - if let Err(error) = this.run_comptime_attribute_name_on_item( + if let Err(error) = this.collect_comptime_attribute_name_on_item( attribute, item.clone(), - span, attribute_context, - generated_items, + attributes_to_run, ) { this.errors.push(error); } @@ -173,22 +171,19 @@ impl<'context> Elaborator<'context> { } } - fn run_comptime_attribute_name_on_item( + /// Resolve an attribute to the function it refers to and add it to `attributes_to_run` + fn collect_comptime_attribute_name_on_item( &mut self, attribute: &MetaAttribute, item: Value, - span: Span, attribute_context: AttributeContext, - generated_items: &mut CollectedItems, + attributes_to_run: &mut CollectedAttributes, ) -> Result<(), (CompilationError, FileId)> { self.file = attribute_context.attribute_file; self.local_module = attribute_context.attribute_module; + let span = attribute.span; - let location = Location::new(attribute.span, self.file); - let function = Expression { - kind: ExpressionKind::Variable(attribute.name.clone()), - span: attribute.span, - }; + let function = Expression { kind: ExpressionKind::Variable(attribute.name.clone()), span }; let arguments = attribute.arguments.clone(); // Elaborate the function, rolling back any errors generated in case it is unknown @@ -200,32 +195,34 @@ impl<'context> Elaborator<'context> { let definition_id = match self.interner.expression(&function) { HirExpression::Ident(ident, _) => ident.id, _ => { - return Err(( - ResolverError::AttributeFunctionIsNotAPath { - function: function_string, - span: attribute.span, - } - .into(), - self.file, - )) + let error = + ResolverError::AttributeFunctionIsNotAPath { function: function_string, span }; + return Err((error.into(), self.file)); } }; let Some(definition) = self.interner.try_definition(definition_id) else { - return Err(( - ResolverError::AttributeFunctionNotInScope { - name: function_string, - span: attribute.span, - } - .into(), - self.file, - )); + let error = ResolverError::AttributeFunctionNotInScope { name: function_string, span }; + return Err((error.into(), self.file)); }; let DefinitionKind::Function(function) = definition.kind else { return Err((ResolverError::NonFunctionInAnnotation { span }.into(), self.file)); }; + attributes_to_run.push((function, item, arguments, attribute_context, span)); + Ok(()) + } + + fn run_attribute( + &mut self, + attribute_context: AttributeContext, + function: FuncId, + arguments: Vec, + item: Value, + location: Location, + generated_items: &mut CollectedItems, + ) -> Result<(), (CompilationError, FileId)> { self.file = attribute_context.file; self.local_module = attribute_context.module; @@ -237,10 +234,7 @@ impl<'context> Elaborator<'context> { arguments, location, ) - .map_err(|error| { - let file = error.get_location().file; - (error.into(), file) - })?; + .map_err(|error| error.into_compilation_error_pair())?; arguments.insert(0, (item, location)); @@ -496,65 +490,91 @@ impl<'context> Elaborator<'context> { } } - /// Run all the attributes on each item. The ordering is unspecified to users but currently - /// we run trait attributes first to (e.g.) register derive handlers before derive is - /// called on structs. - /// Returns any new items generated by attributes. + /// Run all the attributes on each item in the crate in source-order. + /// Source-order is defined as running all child modules before their parent modules are run. + /// Child modules of a parent are run in order of their `mod foo;` declarations in the parent. pub(super) fn run_attributes( &mut self, traits: &BTreeMap, types: &BTreeMap, functions: &[UnresolvedFunctions], module_attributes: &[ModuleAttribute], - ) -> CollectedItems { - let mut generated_items = CollectedItems::default(); + ) { + let mut attributes_to_run = Vec::new(); for (trait_id, trait_) in traits { let attributes = &trait_.trait_def.attributes; let item = Value::TraitDefinition(*trait_id); - let span = trait_.trait_def.span; let context = AttributeContext::new(trait_.file_id, trait_.module_id); - self.run_comptime_attributes_on_item( + self.collect_comptime_attributes_on_item( attributes, item, - span, context, - &mut generated_items, + &mut attributes_to_run, ); } for (struct_id, struct_def) in types { let attributes = &struct_def.struct_def.attributes; let item = Value::StructDefinition(*struct_id); - let span = struct_def.struct_def.span; let context = AttributeContext::new(struct_def.file_id, struct_def.module_id); - self.run_comptime_attributes_on_item( + self.collect_comptime_attributes_on_item( attributes, item, - span, context, - &mut generated_items, + &mut attributes_to_run, ); } - self.run_attributes_on_functions(functions, &mut generated_items); + self.collect_attributes_on_functions(functions, &mut attributes_to_run); + self.collect_attributes_on_modules(module_attributes, &mut attributes_to_run); + + self.sort_attributes_by_run_order(&mut attributes_to_run); - self.run_attributes_on_modules(module_attributes, &mut generated_items); + // run + for (attribute, item, args, context, span) in attributes_to_run { + let location = Location::new(span, context.attribute_file); - generated_items + let mut generated_items = CollectedItems::default(); + self.elaborate_in_comptime_context(|this| { + if let Err(error) = this.run_attribute( + context, + attribute, + args, + item, + location, + &mut generated_items, + ) { + this.errors.push(error); + } + }); + + if !generated_items.is_empty() { + self.elaborate_items(generated_items); + } + } } - fn run_attributes_on_modules( + fn sort_attributes_by_run_order(&self, attributes: &mut CollectedAttributes) { + let module_order = self.def_maps[&self.crate_id].get_module_topological_order(); + + // Sort each attribute by (module, location in file) so that we can execute in + // the order they were defined in, running attributes in child modules first. + attributes.sort_by_key(|(_, _, _, ctx, span)| { + (module_order[&ctx.attribute_module], span.start()) + }); + } + + fn collect_attributes_on_modules( &mut self, module_attributes: &[ModuleAttribute], - generated_items: &mut CollectedItems, + attributes_to_run: &mut CollectedAttributes, ) { for module_attribute in module_attributes { let local_id = module_attribute.module_id; let module_id = ModuleId { krate: self.crate_id, local_id }; let item = Value::ModuleDefinition(module_id); let attribute = &module_attribute.attribute; - let span = Span::default(); let context = AttributeContext { file: module_attribute.file_id, @@ -563,14 +583,14 @@ impl<'context> Elaborator<'context> { attribute_module: module_attribute.attribute_module_id, }; - self.run_comptime_attribute_on_item(attribute, &item, span, context, generated_items); + self.collect_comptime_attribute_on_item(attribute, &item, context, attributes_to_run); } } - fn run_attributes_on_functions( + fn collect_attributes_on_functions( &mut self, function_sets: &[UnresolvedFunctions], - generated_items: &mut CollectedItems, + attributes_to_run: &mut CollectedAttributes, ) { for function_set in function_sets { self.self_type = function_set.self_type.clone(); @@ -579,13 +599,11 @@ impl<'context> Elaborator<'context> { let context = AttributeContext::new(function_set.file_id, *local_module); let attributes = function.secondary_attributes(); let item = Value::FunctionDefinition(*function_id); - let span = function.span(); - self.run_comptime_attributes_on_item( + self.collect_comptime_attributes_on_item( attributes, item, - span, context, - generated_items, + attributes_to_run, ); } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs index f801c1817ef..b4ea06f1030 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/expressions.rs @@ -1,7 +1,6 @@ use acvm::{AcirField, FieldElement}; use iter_extended::vecmap; use noirc_errors::{Location, Span}; -use regex::Regex; use rustc_hash::FxHashSet as HashSet; use crate::{ @@ -29,7 +28,7 @@ use crate::{ traits::{ResolvedTraitBound, TraitConstraint}, }, node_interner::{DefinitionKind, ExprId, FuncId, InternedStatementKind, TraitMethodId}, - token::Tokens, + token::{FmtStrFragment, Tokens}, Kind, QuotedType, Shared, StructType, Type, }; @@ -167,7 +166,7 @@ impl<'context> Elaborator<'context> { let len = Type::Constant(str.len().into(), Kind::u32()); (Lit(HirLiteral::Str(str)), Type::String(Box::new(len))) } - Literal::FmtStr(str) => self.elaborate_fmt_string(str, span), + Literal::FmtStr(fragments, length) => self.elaborate_fmt_string(fragments, length), Literal::Array(array_literal) => { self.elaborate_array_literal(array_literal, span, true) } @@ -234,53 +233,50 @@ impl<'context> Elaborator<'context> { (HirExpression::Literal(constructor(expr)), typ) } - fn elaborate_fmt_string(&mut self, str: String, call_expr_span: Span) -> (HirExpression, Type) { - let re = Regex::new(r"\{([a-zA-Z0-9_]+)\}") - .expect("ICE: an invalid regex pattern was used for checking format strings"); - + fn elaborate_fmt_string( + &mut self, + fragments: Vec, + length: u32, + ) -> (HirExpression, Type) { let mut fmt_str_idents = Vec::new(); let mut capture_types = Vec::new(); - for field in re.find_iter(&str) { - let matched_str = field.as_str(); - let ident_name = &matched_str[1..(matched_str.len() - 1)]; - - let scope_tree = self.scopes.current_scope_tree(); - let variable = scope_tree.find(ident_name); - - let hir_ident = if let Some((old_value, _)) = variable { - old_value.num_times_used += 1; - old_value.ident.clone() - } else if let Ok((definition_id, _)) = - self.lookup_global(Path::from_single(ident_name.to_string(), call_expr_span)) - { - HirIdent::non_trait_method(definition_id, Location::new(call_expr_span, self.file)) - } else if ident_name.parse::().is_ok() { - self.push_err(ResolverError::NumericConstantInFormatString { - name: ident_name.to_owned(), - span: call_expr_span, - }); - continue; - } else { - self.push_err(ResolverError::VariableNotDeclared { - name: ident_name.to_owned(), - span: call_expr_span, - }); - continue; - }; + for fragment in &fragments { + if let FmtStrFragment::Interpolation(ident_name, string_span) = fragment { + let scope_tree = self.scopes.current_scope_tree(); + let variable = scope_tree.find(ident_name); + + let hir_ident = if let Some((old_value, _)) = variable { + old_value.num_times_used += 1; + old_value.ident.clone() + } else if let Ok((definition_id, _)) = + self.lookup_global(Path::from_single(ident_name.to_string(), *string_span)) + { + HirIdent::non_trait_method( + definition_id, + Location::new(*string_span, self.file), + ) + } else { + self.push_err(ResolverError::VariableNotDeclared { + name: ident_name.to_owned(), + span: *string_span, + }); + continue; + }; - let hir_expr = HirExpression::Ident(hir_ident.clone(), None); - let expr_id = self.interner.push_expr(hir_expr); - self.interner.push_expr_location(expr_id, call_expr_span, self.file); - let typ = self.type_check_variable(hir_ident, expr_id, None); - self.interner.push_expr_type(expr_id, typ.clone()); - capture_types.push(typ); - fmt_str_idents.push(expr_id); + let hir_expr = HirExpression::Ident(hir_ident.clone(), None); + let expr_id = self.interner.push_expr(hir_expr); + self.interner.push_expr_location(expr_id, *string_span, self.file); + let typ = self.type_check_variable(hir_ident, expr_id, None); + self.interner.push_expr_type(expr_id, typ.clone()); + capture_types.push(typ); + fmt_str_idents.push(expr_id); + } } - let len = Type::Constant(str.len().into(), Kind::u32()); + let len = Type::Constant(length.into(), Kind::u32()); let typ = Type::FmtString(Box::new(len), Box::new(Type::Tuple(capture_types))); - (HirExpression::Literal(HirLiteral::FmtStr(str, fmt_str_idents)), typ) + (HirExpression::Literal(HirLiteral::FmtStr(fragments, fmt_str_idents, length)), typ) } fn elaborate_prefix(&mut self, prefix: PrefixExpression, span: Span) -> (ExprId, Type) { @@ -350,6 +346,10 @@ impl<'context> Elaborator<'context> { Type::Array(_, base_type) => *base_type, Type::Slice(base_type) => *base_type, Type::Error => Type::Error, + Type::TypeVariable(_) => { + self.push_err(TypeCheckError::TypeAnnotationsNeededForIndex { span: lhs_span }); + Type::Error + } typ => { self.push_err(TypeCheckError::TypeMismatch { expected_typ: "Array".to_owned(), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs index 478504a79be..fe1d1e38e1a 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/mod.rs @@ -307,23 +307,13 @@ impl<'context> Elaborator<'context> { // We have to run any comptime attributes on functions before the function is elaborated // since the generated items are checked beforehand as well. - let generated_items = self.run_attributes( + self.run_attributes( &items.traits, &items.types, &items.functions, &items.module_attributes, ); - // After everything is collected, we can elaborate our generated items. - // It may be better to inline these within `items` entirely since elaborating them - // all here means any globals will not see these. Inlining them completely within `items` - // means we must be more careful about missing any additional items that need to be already - // elaborated. E.g. if a new struct is created, we've already passed the code path to - // elaborate them. - if !generated_items.is_empty() { - self.elaborate_items(generated_items); - } - for functions in items.functions { self.elaborate_functions(functions); } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/statements.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/statements.rs index 6ed8fee753c..93009f49071 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/statements.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/statements.rs @@ -183,20 +183,20 @@ impl<'context> Elaborator<'context> { } pub(super) fn elaborate_assign(&mut self, assign: AssignStatement) -> (HirStatement, Type) { - let span = assign.expression.span; + let expr_span = assign.expression.span; let (expression, expr_type) = self.elaborate_expression(assign.expression); - let (lvalue, lvalue_type, mutable) = self.elaborate_lvalue(assign.lvalue, span); + let (lvalue, lvalue_type, mutable) = self.elaborate_lvalue(assign.lvalue); if !mutable { let (name, span) = self.get_lvalue_name_and_span(&lvalue); self.push_err(TypeCheckError::VariableMustBeMutable { name, span }); } - self.unify_with_coercions(&expr_type, &lvalue_type, expression, span, || { + self.unify_with_coercions(&expr_type, &lvalue_type, expression, expr_span, || { TypeCheckError::TypeMismatchWithSource { actual: expr_type.clone(), expected: lvalue_type.clone(), - span, + span: expr_span, source: Source::Assignment, } }); @@ -296,7 +296,7 @@ impl<'context> Elaborator<'context> { } } - fn elaborate_lvalue(&mut self, lvalue: LValue, assign_span: Span) -> (HirLValue, Type, bool) { + fn elaborate_lvalue(&mut self, lvalue: LValue) -> (HirLValue, Type, bool) { match lvalue { LValue::Ident(ident) => { let mut mutable = true; @@ -330,7 +330,7 @@ impl<'context> Elaborator<'context> { (HirLValue::Ident(ident.clone(), typ.clone()), typ, mutable) } LValue::MemberAccess { object, field_name, span } => { - let (object, lhs_type, mut mutable) = self.elaborate_lvalue(*object, assign_span); + let (object, lhs_type, mut mutable) = self.elaborate_lvalue(*object); let mut object = Box::new(object); let field_name = field_name.clone(); @@ -374,8 +374,7 @@ impl<'context> Elaborator<'context> { expr_span, }); - let (mut lvalue, mut lvalue_type, mut mutable) = - self.elaborate_lvalue(*array, assign_span); + let (mut lvalue, mut lvalue_type, mut mutable) = self.elaborate_lvalue(*array); // Before we check that the lvalue is an array, try to dereference it as many times // as needed to unwrap any &mut wrappers. @@ -397,12 +396,15 @@ impl<'context> Elaborator<'context> { self.push_err(TypeCheckError::StringIndexAssign { span: lvalue_span }); Type::Error } + Type::TypeVariable(_) => { + self.push_err(TypeCheckError::TypeAnnotationsNeededForIndex { span }); + Type::Error + } other => { - // TODO: Need a better span here self.push_err(TypeCheckError::TypeMismatch { expected_typ: "array".to_string(), expr_typ: other.to_string(), - expr_span: assign_span, + expr_span: span, }); Type::Error } @@ -413,7 +415,7 @@ impl<'context> Elaborator<'context> { (HirLValue::Index { array, index, typ, location }, array_type, mutable) } LValue::Dereference(lvalue, span) => { - let (lvalue, reference_type, _) = self.elaborate_lvalue(*lvalue, assign_span); + let (lvalue, reference_type, _) = self.elaborate_lvalue(*lvalue); let lvalue = Box::new(lvalue); let location = Location::new(span, self.file); @@ -423,7 +425,7 @@ impl<'context> Elaborator<'context> { self.unify(&reference_type, &expected_type, || TypeCheckError::TypeMismatch { expected_typ: expected_type.to_string(), expr_typ: reference_type.to_string(), - expr_span: assign_span, + expr_span: span, }); // Dereferences are always mutable since we already type checked against a &mut T @@ -433,7 +435,7 @@ impl<'context> Elaborator<'context> { } LValue::Interned(id, span) => { let lvalue = self.interner.get_lvalue(id, span).clone(); - self.elaborate_lvalue(lvalue, assign_span) + self.elaborate_lvalue(lvalue) } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs index 0404ae3c2c0..2e4809f3511 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/elaborator/types.rs @@ -576,7 +576,7 @@ impl<'context> Elaborator<'context> { fn resolve_trait_static_method(&mut self, path: &Path) -> Option { let path_resolution = self.resolve_path(path.clone()).ok()?; let func_id = path_resolution.item.function_id()?; - let meta = self.interner.function_meta(&func_id); + let meta = self.interner.try_function_meta(&func_id)?; let the_trait = self.interner.get_trait(meta.trait_id?); let method = the_trait.find_method(path.last_name())?; let constraint = the_trait.as_constraint(path.span); diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/display.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/display.rs index 560d11cfa2e..29d1448f07e 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/display.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/display.rs @@ -661,7 +661,7 @@ fn remove_interned_in_literal(interner: &NodeInterner, literal: Literal) -> Lite | Literal::Integer(_, _) | Literal::Str(_) | Literal::RawStr(_, _) - | Literal::FmtStr(_) + | Literal::FmtStr(_, _) | Literal::Unit => literal, } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/errors.rs index 446c4dae2d3..3df20b39209 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/errors.rs @@ -240,6 +240,9 @@ pub enum InterpreterError { err: Box, location: Location, }, + CannotInterpretFormatStringWithErrors { + location: Location, + }, // These cases are not errors, they are just used to prevent us from running more code // until the loop can be resumed properly. These cases will never be displayed to users. @@ -315,7 +318,8 @@ impl InterpreterError { | InterpreterError::TypeAnnotationsNeededForMethodCall { location } | InterpreterError::CannotResolveExpression { location, .. } | InterpreterError::CannotSetFunctionBody { location, .. } - | InterpreterError::UnknownArrayLength { location, .. } => *location, + | InterpreterError::UnknownArrayLength { location, .. } + | InterpreterError::CannotInterpretFormatStringWithErrors { location } => *location, InterpreterError::FailedToParseMacro { error, file, .. } => { Location::new(error.span(), *file) @@ -664,6 +668,12 @@ impl<'a> From<&'a InterpreterError> for CustomDiagnostic { let secondary = format!("Evaluating the length failed with: `{err}`"); CustomDiagnostic::simple_error(msg, secondary, location.span) } + InterpreterError::CannotInterpretFormatStringWithErrors { location } => { + let msg = "Cannot interpret format string with errors".to_string(); + let secondary = + "Some of the variables to interpolate could not be evaluated".to_string(); + CustomDiagnostic::simple_error(msg, secondary, location.span) + } } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs index 5540a199cec..9338c0fc37f 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/hir_to_display_ast.rs @@ -121,9 +121,9 @@ impl HirExpression { HirExpression::Literal(HirLiteral::Str(string)) => { ExpressionKind::Literal(Literal::Str(string.clone())) } - HirExpression::Literal(HirLiteral::FmtStr(string, _exprs)) => { + HirExpression::Literal(HirLiteral::FmtStr(fragments, _exprs, length)) => { // TODO: Is throwing away the exprs here valid? - ExpressionKind::Literal(Literal::FmtStr(string.clone())) + ExpressionKind::Literal(Literal::FmtStr(fragments.clone(), *length)) } HirExpression::Literal(HirLiteral::Unit) => ExpressionKind::Literal(Literal::Unit), HirExpression::Block(expr) => ExpressionKind::Block(expr.to_display_ast(interner)), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs index 49fd86b73bb..dfa55a9d79b 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter.rs @@ -20,7 +20,7 @@ use crate::monomorphization::{ perform_impl_bindings, perform_instantiation_bindings, resolve_trait_method, undo_instantiation_bindings, }; -use crate::token::Tokens; +use crate::token::{FmtStrFragment, Tokens}; use crate::TypeVariable; use crate::{ hir_def::{ @@ -623,8 +623,8 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { self.evaluate_integer(value, is_negative, id) } HirLiteral::Str(string) => Ok(Value::String(Rc::new(string))), - HirLiteral::FmtStr(string, captures) => { - self.evaluate_format_string(string, captures, id) + HirLiteral::FmtStr(fragments, captures, _length) => { + self.evaluate_format_string(fragments, captures, id) } HirLiteral::Array(array) => self.evaluate_array(array, id), HirLiteral::Slice(array) => self.evaluate_slice(array, id), @@ -633,7 +633,7 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { fn evaluate_format_string( &mut self, - string: String, + fragments: Vec, captures: Vec, id: ExprId, ) -> IResult { @@ -644,13 +644,12 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { let mut values: VecDeque<_> = captures.into_iter().map(|capture| self.evaluate(capture)).collect::>()?; - for character in string.chars() { - match character { - '\\' => escaped = true, - '{' if !escaped => consuming = true, - '}' if !escaped && consuming => { - consuming = false; - + for fragment in fragments { + match fragment { + FmtStrFragment::String(string) => { + result.push_str(&string); + } + FmtStrFragment::Interpolation(_, span) => { if let Some(value) = values.pop_front() { // When interpolating a quoted value inside a format string, we don't include the // surrounding `quote {` ... `}` as if we are unquoting the quoted value inside the string. @@ -665,13 +664,15 @@ impl<'local, 'interner> Interpreter<'local, 'interner> { } else { result.push_str(&value.display(self.elaborator.interner).to_string()); } + } else { + // If we can't find a value for this fragment it means the interpolated value was not + // found or it errored. In this case we error here as well. + let location = self.elaborator.interner.expr_location(&id); + return Err(InterpreterError::CannotInterpretFormatStringWithErrors { + location, + }); } } - other if !consuming => { - escaped = false; - result.push(other); - } - _ => (), } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/foreign.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/foreign.rs index d2611f72535..99cc11ecd2a 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/foreign.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/comptime/interpreter/foreign.rs @@ -87,7 +87,6 @@ fn call_foreign( "sha256_compression" => sha256_compression(interner, args, location), _ => { let explanation = match name { - "schnorr_verify" => "Schnorr verification will be removed.".into(), "and" | "xor" => "It should be turned into a binary operation.".into(), "recursive_aggregation" => "A proof cannot be verified at comptime.".into(), _ => { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index 51e62599b05..33dab802b21 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -119,9 +119,11 @@ pub struct ModuleAttribute { pub file_id: FileId, // The module this attribute is attached to pub module_id: LocalModuleId, + // The file where the attribute exists (it could be the same as `file_id` - // or a different one if it's an inner attribute in a different file) + // or a different one if it is an outer attribute in the parent of the module it applies to) pub attribute_file_id: FileId, + // The module where the attribute is defined (similar to `attribute_file_id`, // it could be different than `module_id` for inner attributes) pub attribute_module_id: LocalModuleId, diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/mod.rs index 3bb16a92fdb..d9d6e150a7a 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/mod.rs @@ -289,6 +289,29 @@ impl CrateDefMap { String::new() } } + + /// Return a topological ordering of each module such that any child modules + /// are before their parent modules. Sibling modules will respect the ordering + /// declared from their parent module (the `mod foo; mod bar;` declarations). + pub fn get_module_topological_order(&self) -> HashMap { + let mut ordering = HashMap::default(); + self.topologically_sort_modules(self.root, &mut 0, &mut ordering); + ordering + } + + fn topologically_sort_modules( + &self, + current: LocalModuleId, + index: &mut usize, + ordering: &mut HashMap, + ) { + for child in &self.modules[current.0].child_declaration_order { + self.topologically_sort_modules(*child, index, ordering); + } + + ordering.insert(current, *index); + *index += 1; + } } /// Specifies a contract function and extra metadata that diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/module_data.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/module_data.rs index fe6fe8285d3..06188f3920b 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/module_data.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/def_map/module_data.rs @@ -14,6 +14,11 @@ pub struct ModuleData { pub parent: Option, pub children: HashMap, + /// Each child in the order they were declared in the parent module. + /// E.g. for a module containing `mod foo; mod bar; mod baz` this would + /// be `vec![foo, bar, baz]`. + pub child_declaration_order: Vec, + /// Contains all definitions visible to the current module. This includes /// all definitions in self.definitions as well as all imported definitions. scope: ItemScope, @@ -47,6 +52,7 @@ impl ModuleData { ModuleData { parent, children: HashMap::new(), + child_declaration_order: Vec::new(), scope: ItemScope::default(), definitions: ItemScope::default(), location, @@ -73,6 +79,10 @@ impl ModuleData { ) -> Result<(), (Ident, Ident)> { self.scope.add_definition(name.clone(), visibility, item_id, trait_id)?; + if let ModuleDefId::ModuleId(child) = item_id { + self.child_declaration_order.push(child.local_id); + } + // definitions is a subset of self.scope so it is expected if self.scope.define_func_def // returns without error, so will self.definitions.define_func_def. self.definitions.add_definition(name, visibility, item_id, trait_id) diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs index 5c8e0a1b53e..774836f8992 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/resolution/errors.rs @@ -77,8 +77,6 @@ pub enum ResolverError { MutableReferenceToImmutableVariable { variable: String, span: Span }, #[error("Mutable references to array indices are unsupported")] MutableReferenceToArrayElement { span: Span }, - #[error("Numeric constants should be printed without formatting braces")] - NumericConstantInFormatString { name: String, span: Span }, #[error("Closure environment must be a tuple or unit type")] InvalidClosureEnvironment { typ: Type, span: Span }, #[error("Nested slices, i.e. slices within an array or slice, are not supported")] @@ -378,11 +376,6 @@ impl<'a> From<&'a ResolverError> for Diagnostic { ResolverError::MutableReferenceToArrayElement { span } => { Diagnostic::simple_error("Mutable references to array elements are currently unsupported".into(), "Try storing the element in a fresh variable first".into(), *span) }, - ResolverError::NumericConstantInFormatString { name, span } => Diagnostic::simple_error( - format!("cannot find `{name}` in this scope "), - "Numeric constants should be printed without formatting braces".to_string(), - *span, - ), ResolverError::InvalidClosureEnvironment { span, typ } => Diagnostic::simple_error( format!("{typ} is not a valid closure environment type"), "Closure environment must be a tuple or unit type".to_string(), *span), diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs index dfa431157e3..15b8d50c78b 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir/type_check/errors.rs @@ -206,6 +206,8 @@ pub enum TypeCheckError { UnspecifiedType { span: Span }, #[error("Binding `{typ}` here to the `_` inside would create a cyclic type")] CyclicType { typ: Type, span: Span }, + #[error("Type annotations required before indexing this array or slice")] + TypeAnnotationsNeededForIndex { span: Span }, } #[derive(Debug, Clone, PartialEq, Eq)] @@ -520,6 +522,13 @@ impl<'a> From<&'a TypeCheckError> for Diagnostic { *span, ) }, + TypeCheckError::TypeAnnotationsNeededForIndex { span } => { + Diagnostic::simple_error( + "Type annotations required before indexing this array or slice".into(), + "Type annotations needed before this point, can't decide if this is an array or slice".into(), + *span, + ) + }, } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs index 5d3fe632a74..e243fc88cff 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/hir_def/expr.rs @@ -7,7 +7,7 @@ use crate::hir::type_check::generics::TraitGenerics; use crate::node_interner::{ DefinitionId, DefinitionKind, ExprId, FuncId, NodeInterner, StmtId, TraitMethodId, }; -use crate::token::Tokens; +use crate::token::{FmtStrFragment, Tokens}; use crate::Shared; use super::stmt::HirPattern; @@ -114,7 +114,7 @@ pub enum HirLiteral { Bool(bool), Integer(FieldElement, bool), //true for negative integer and false for positive Str(String), - FmtStr(String, Vec), + FmtStr(Vec, Vec, u32 /* length */), Unit, } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/lexer/errors.rs b/noir/noir-repo/compiler/noirc_frontend/src/lexer/errors.rs index 8d799ef35d1..f95ccba061a 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/lexer/errors.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/lexer/errors.rs @@ -30,6 +30,10 @@ pub enum LexerErrorKind { UnterminatedBlockComment { span: Span }, #[error("Unterminated string literal")] UnterminatedStringLiteral { span: Span }, + #[error("Invalid format string: expected '}}', found {found:?}")] + InvalidFormatString { found: char, span: Span }, + #[error("Invalid format string: expected letter or underscore, found '}}'")] + EmptyFormatStringInterpolation { span: Span }, #[error( "'\\{escaped}' is not a valid escape sequence. Use '\\' for a literal backslash character." )] @@ -68,6 +72,8 @@ impl LexerErrorKind { LexerErrorKind::LogicalAnd { span } => *span, LexerErrorKind::UnterminatedBlockComment { span } => *span, LexerErrorKind::UnterminatedStringLiteral { span } => *span, + LexerErrorKind::InvalidFormatString { span, .. } => *span, + LexerErrorKind::EmptyFormatStringInterpolation { span, .. } => *span, LexerErrorKind::InvalidEscape { span, .. } => *span, LexerErrorKind::InvalidQuoteDelimiter { delimiter } => delimiter.to_span(), LexerErrorKind::NonAsciiComment { span, .. } => *span, @@ -130,6 +136,32 @@ impl LexerErrorKind { LexerErrorKind::UnterminatedBlockComment { span } => ("Unterminated block comment".to_string(), "Unterminated block comment".to_string(), *span), LexerErrorKind::UnterminatedStringLiteral { span } => ("Unterminated string literal".to_string(), "Unterminated string literal".to_string(), *span), + LexerErrorKind::InvalidFormatString { found, span } => { + if found == &'}' { + ( + "Invalid format string: unmatched '}}' found".to_string(), + "If you intended to print '}', you can escape it using '}}'".to_string(), + *span, + ) + } else { + ( + format!("Invalid format string: expected '}}', found {found:?}"), + if found == &'.' { + "Field access isn't supported in format strings".to_string() + } else { + "If you intended to print '{', you can escape it using '{{'".to_string() + }, + *span, + ) + } + } + LexerErrorKind::EmptyFormatStringInterpolation { span } => { + ( + "Invalid format string: expected letter or underscore, found '}}'".to_string(), + "If you intended to print '{' or '}', you can escape them using '{{' and '}}' respectively".to_string(), + *span, + ) + } LexerErrorKind::InvalidEscape { escaped, span } => (format!("'\\{escaped}' is not a valid escape sequence. Use '\\' for a literal backslash character."), "Invalid escape sequence".to_string(), *span), LexerErrorKind::InvalidQuoteDelimiter { delimiter } => { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/lexer/lexer.rs b/noir/noir-repo/compiler/noirc_frontend/src/lexer/lexer.rs index 68dc142ff10..a5c4b2cd772 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/lexer/lexer.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/lexer/lexer.rs @@ -2,7 +2,7 @@ use crate::token::DocStyle; use super::{ errors::LexerErrorKind, - token::{IntType, Keyword, SpannedToken, Token, Tokens}, + token::{FmtStrFragment, IntType, Keyword, SpannedToken, Token, Tokens}, }; use acvm::{AcirField, FieldElement}; use noirc_errors::{Position, Span}; @@ -411,51 +411,190 @@ impl<'a> Lexer<'a> { let start = self.position; let mut string = String::new(); - while let Some(next) = self.next_char() { - let char = match next { - '"' => break, - '\\' => match self.next_char() { - Some('r') => '\r', - Some('n') => '\n', - Some('t') => '\t', - Some('0') => '\0', - Some('"') => '"', - Some('\\') => '\\', - Some(escaped) => { - let span = Span::inclusive(start, self.position); - return Err(LexerErrorKind::InvalidEscape { escaped, span }); - } - None => { - let span = Span::inclusive(start, self.position); - return Err(LexerErrorKind::UnterminatedStringLiteral { span }); - } - }, - other => other, - }; + loop { + if let Some(next) = self.next_char() { + let char = match next { + '"' => break, + '\\' => match self.next_char() { + Some('r') => '\r', + Some('n') => '\n', + Some('t') => '\t', + Some('0') => '\0', + Some('"') => '"', + Some('\\') => '\\', + Some(escaped) => { + let span = Span::inclusive(start, self.position); + return Err(LexerErrorKind::InvalidEscape { escaped, span }); + } + None => { + let span = Span::inclusive(start, self.position); + return Err(LexerErrorKind::UnterminatedStringLiteral { span }); + } + }, + other => other, + }; - string.push(char); + string.push(char); + } else { + let span = Span::inclusive(start, self.position); + return Err(LexerErrorKind::UnterminatedStringLiteral { span }); + } } let str_literal_token = Token::Str(string); - let end = self.position; Ok(str_literal_token.into_span(start, end)) } - // This differs from `eat_string_literal` in that we want the leading `f` to be captured in the Span fn eat_fmt_string(&mut self) -> SpannedTokenResult { let start = self.position; - self.next_char(); - let str_literal = self.eat_while(None, |ch| ch != '"'); + let mut fragments = Vec::new(); + let mut length = 0; + + loop { + // String fragment until '{' or '"' + let mut string = String::new(); + let mut found_curly = false; + + loop { + if let Some(next) = self.next_char() { + let char = match next { + '"' => break, + '\\' => match self.next_char() { + Some('r') => '\r', + Some('n') => '\n', + Some('t') => '\t', + Some('0') => '\0', + Some('"') => '"', + Some('\\') => '\\', + Some(escaped) => { + let span = Span::inclusive(start, self.position); + return Err(LexerErrorKind::InvalidEscape { escaped, span }); + } + None => { + let span = Span::inclusive(start, self.position); + return Err(LexerErrorKind::UnterminatedStringLiteral { span }); + } + }, + '{' if self.peek_char_is('{') => { + self.next_char(); + '{' + } + '}' if self.peek_char_is('}') => { + self.next_char(); + '}' + } + '}' => { + let error_position = self.position; + + // Keep consuming chars until we find the closing double quote + self.skip_until_string_end(); + + let span = Span::inclusive(error_position, error_position); + return Err(LexerErrorKind::InvalidFormatString { found: '}', span }); + } + '{' => { + found_curly = true; + break; + } + other => other, + }; + + string.push(char); + length += 1; + + if char == '{' || char == '}' { + // This might look a bit strange, but if there's `{{` or `}}` in the format string + // then it will be `{` and `}` in the string fragment respectively, but on the codegen + // phase it will be translated back to `{{` and `}}` to avoid executing an interpolation, + // thus the actual length of the codegen'd string will be one more than what we get here. + // + // We could just make the fragment include the double curly braces, but then the interpreter + // would need to undo the curly braces, so it's simpler to add them during codegen. + length += 1; + } + } else { + let span = Span::inclusive(start, self.position); + return Err(LexerErrorKind::UnterminatedStringLiteral { span }); + } + } + + if !string.is_empty() { + fragments.push(FmtStrFragment::String(string)); + } + + if !found_curly { + break; + } + + length += 1; // for the curly brace + + // Interpolation fragment until '}' or '"' + let mut string = String::new(); + let interpolation_start = self.position + 1; // + 1 because we are at '{' + let mut first_char = true; + while let Some(next) = self.next_char() { + let char = match next { + '}' => { + if string.is_empty() { + let error_position = self.position; + + // Keep consuming chars until we find the closing double quote + self.skip_until_string_end(); + + let span = Span::inclusive(error_position, error_position); + return Err(LexerErrorKind::EmptyFormatStringInterpolation { span }); + } + + break; + } + other => { + let is_valid_char = if first_char { + other.is_ascii_alphabetic() || other == '_' + } else { + other.is_ascii_alphanumeric() || other == '_' + }; + if !is_valid_char { + let error_position = self.position; + + // Keep consuming chars until we find the closing double quote + // (unless we bumped into a double quote now, in which case we are done) + if other != '"' { + self.skip_until_string_end(); + } - let str_literal_token = Token::FmtStr(str_literal); + let span = Span::inclusive(error_position, error_position); + return Err(LexerErrorKind::InvalidFormatString { found: other, span }); + } + first_char = false; + other + } + }; + length += 1; + string.push(char); + } + + length += 1; // for the closing curly brace - self.next_char(); // Advance past the closing quote + let interpolation_span = Span::from(interpolation_start..self.position); + fragments.push(FmtStrFragment::Interpolation(string, interpolation_span)); + } + let token = Token::FmtStr(fragments, length); let end = self.position; - Ok(str_literal_token.into_span(start, end)) + Ok(token.into_span(start, end)) + } + + fn skip_until_string_end(&mut self) { + while let Some(next) = self.next_char() { + if next == '\'' && self.peek_char_is('"') { + self.next_char(); + } else if next == '"' { + break; + } + } } fn eat_format_string_or_alpha_numeric(&mut self) -> SpannedTokenResult { @@ -962,6 +1101,155 @@ mod tests { } } + #[test] + fn test_eat_string_literal_with_escapes() { + let input = "let _word = \"hello\\n\\t\""; + + let expected = vec![ + Token::Keyword(Keyword::Let), + Token::Ident("_word".to_string()), + Token::Assign, + Token::Str("hello\n\t".to_string()), + ]; + let mut lexer = Lexer::new(input); + + for token in expected.into_iter() { + let got = lexer.next_token().unwrap(); + assert_eq!(got, token); + } + } + + #[test] + fn test_eat_string_literal_missing_double_quote() { + let input = "\"hello"; + let mut lexer = Lexer::new(input); + assert!(matches!( + lexer.next_token(), + Err(LexerErrorKind::UnterminatedStringLiteral { .. }) + )); + } + + #[test] + fn test_eat_fmt_string_literal_without_interpolations() { + let input = "let _word = f\"hello\""; + + let expected = vec![ + Token::Keyword(Keyword::Let), + Token::Ident("_word".to_string()), + Token::Assign, + Token::FmtStr(vec![FmtStrFragment::String("hello".to_string())], 5), + ]; + let mut lexer = Lexer::new(input); + + for token in expected.into_iter() { + let got = lexer.next_token().unwrap(); + assert_eq!(got, token); + } + } + + #[test] + fn test_eat_fmt_string_literal_with_escapes_without_interpolations() { + let input = "let _word = f\"hello\\n\\t{{x}}\""; + + let expected = vec![ + Token::Keyword(Keyword::Let), + Token::Ident("_word".to_string()), + Token::Assign, + Token::FmtStr(vec![FmtStrFragment::String("hello\n\t{x}".to_string())], 12), + ]; + let mut lexer = Lexer::new(input); + + for token in expected.into_iter() { + let got = lexer.next_token().unwrap(); + assert_eq!(got, token); + } + } + + #[test] + fn test_eat_fmt_string_literal_with_interpolations() { + let input = "let _word = f\"hello {world} and {_another} {vAr_123}\""; + + let expected = vec![ + Token::Keyword(Keyword::Let), + Token::Ident("_word".to_string()), + Token::Assign, + Token::FmtStr( + vec![ + FmtStrFragment::String("hello ".to_string()), + FmtStrFragment::Interpolation("world".to_string(), Span::from(21..26)), + FmtStrFragment::String(" and ".to_string()), + FmtStrFragment::Interpolation("_another".to_string(), Span::from(33..41)), + FmtStrFragment::String(" ".to_string()), + FmtStrFragment::Interpolation("vAr_123".to_string(), Span::from(44..51)), + ], + 38, + ), + ]; + let mut lexer = Lexer::new(input); + + for token in expected.into_iter() { + let got = lexer.next_token().unwrap().into_token(); + assert_eq!(got, token); + } + } + + #[test] + fn test_eat_fmt_string_literal_missing_double_quote() { + let input = "f\"hello"; + let mut lexer = Lexer::new(input); + assert!(matches!( + lexer.next_token(), + Err(LexerErrorKind::UnterminatedStringLiteral { .. }) + )); + } + + #[test] + fn test_eat_fmt_string_literal_invalid_char_in_interpolation() { + let input = "f\"hello {foo.bar}\" true"; + let mut lexer = Lexer::new(input); + assert!(matches!(lexer.next_token(), Err(LexerErrorKind::InvalidFormatString { .. }))); + + // Make sure the lexer went past the ending double quote for better recovery + let token = lexer.next_token().unwrap().into_token(); + assert!(matches!(token, Token::Bool(true))); + } + + #[test] + fn test_eat_fmt_string_literal_double_quote_inside_interpolation() { + let input = "f\"hello {world\" true"; + let mut lexer = Lexer::new(input); + assert!(matches!(lexer.next_token(), Err(LexerErrorKind::InvalidFormatString { .. }))); + + // Make sure the lexer stopped parsing the string literal when it found \" inside the interpolation + let token = lexer.next_token().unwrap().into_token(); + assert!(matches!(token, Token::Bool(true))); + } + + #[test] + fn test_eat_fmt_string_literal_unmatched_closing_curly() { + let input = "f\"hello }\" true"; + let mut lexer = Lexer::new(input); + assert!(matches!(lexer.next_token(), Err(LexerErrorKind::InvalidFormatString { .. }))); + + // Make sure the lexer went past the ending double quote for better recovery + let token = lexer.next_token().unwrap().into_token(); + assert!(matches!(token, Token::Bool(true))); + } + + #[test] + fn test_eat_fmt_string_literal_empty_interpolation() { + let input = "f\"{}\" true"; + let mut lexer = Lexer::new(input); + assert!(matches!( + lexer.next_token(), + Err(LexerErrorKind::EmptyFormatStringInterpolation { .. }) + )); + + // Make sure the lexer went past the ending double quote for better recovery + let token = lexer.next_token().unwrap().into_token(); + assert!(matches!(token, Token::Bool(true))); + } + #[test] fn test_eat_integer_literals() { let test_cases: Vec<(&str, Token)> = vec![ @@ -1151,7 +1439,7 @@ mod tests { format!("let s = r#####\"{s}\"#####;"), ], ), - (Some(Token::FmtStr("".to_string())), vec![format!("assert(x == y, f\"{s}\");")]), + (Some(Token::FmtStr(vec![], 0)), vec![format!("assert(x == y, f\"{s}\");")]), // expected token not found // (Some(Token::LineComment("".to_string(), None)), vec![ (None, vec![format!("//{s}"), format!("// {s}")]), @@ -1196,11 +1484,16 @@ mod tests { Err(LexerErrorKind::InvalidIntegerLiteral { .. }) | Err(LexerErrorKind::UnexpectedCharacter { .. }) | Err(LexerErrorKind::NonAsciiComment { .. }) - | Err(LexerErrorKind::UnterminatedBlockComment { .. }) => { + | Err(LexerErrorKind::UnterminatedBlockComment { .. }) + | Err(LexerErrorKind::UnterminatedStringLiteral { .. }) + | Err(LexerErrorKind::InvalidFormatString { .. }) => { expected_token_found = true; } Err(err) => { - panic!("Unexpected lexer error found: {:?}", err) + panic!( + "Unexpected lexer error found {:?} for input string {:?}", + err, blns_program_str + ) } } } diff --git a/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs b/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs index 836161c7c9f..f35515045db 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/lexer/token.rs @@ -25,7 +25,7 @@ pub enum BorrowedToken<'input> { Str(&'input str), /// the u8 is the number of hashes, i.e. r###.. RawStr(&'input str, u8), - FmtStr(&'input str), + FmtStr(&'input [FmtStrFragment], u32 /* length */), Keyword(Keyword), IntType(IntType), AttributeStart { @@ -136,7 +136,7 @@ pub enum Token { Str(String), /// the u8 is the number of hashes, i.e. r###.. RawStr(String, u8), - FmtStr(String), + FmtStr(Vec, u32 /* length */), Keyword(Keyword), IntType(IntType), AttributeStart { @@ -255,7 +255,7 @@ pub fn token_to_borrowed_token(token: &Token) -> BorrowedToken<'_> { Token::Int(n) => BorrowedToken::Int(*n), Token::Bool(b) => BorrowedToken::Bool(*b), Token::Str(ref b) => BorrowedToken::Str(b), - Token::FmtStr(ref b) => BorrowedToken::FmtStr(b), + Token::FmtStr(ref b, length) => BorrowedToken::FmtStr(b, *length), Token::RawStr(ref b, hashes) => BorrowedToken::RawStr(b, *hashes), Token::Keyword(k) => BorrowedToken::Keyword(*k), Token::AttributeStart { is_inner, is_tag } => { @@ -312,6 +312,35 @@ pub fn token_to_borrowed_token(token: &Token) -> BorrowedToken<'_> { } } +#[derive(Clone, PartialEq, Eq, Hash, Debug, PartialOrd, Ord)] +pub enum FmtStrFragment { + String(String), + Interpolation(String, Span), +} + +impl Display for FmtStrFragment { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + FmtStrFragment::String(string) => { + // Undo the escapes when displaying the fmt string + let string = string + .replace('{', "{{") + .replace('}', "}}") + .replace('\r', "\\r") + .replace('\n', "\\n") + .replace('\t', "\\t") + .replace('\0', "\\0") + .replace('\'', "\\'") + .replace('\"', "\\\""); + write!(f, "{}", string) + } + FmtStrFragment::Interpolation(string, _span) => { + write!(f, "{{{}}}", string) + } + } + } +} + #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, PartialOrd, Ord)] pub enum DocStyle { Outer, @@ -375,7 +404,7 @@ impl fmt::Display for Token { Token::Int(n) => write!(f, "{}", n), Token::Bool(b) => write!(f, "{b}"), Token::Str(ref b) => write!(f, "{b:?}"), - Token::FmtStr(ref b) => write!(f, "f{b:?}"), + Token::FmtStr(ref b, _length) => write!(f, "f{b:?}"), Token::RawStr(ref b, hashes) => { let h: String = std::iter::once('#').cycle().take(hashes as usize).collect(); write!(f, "r{h}{b:?}{h}") @@ -515,7 +544,7 @@ impl Token { | Token::Bool(_) | Token::Str(_) | Token::RawStr(..) - | Token::FmtStr(_) => TokenKind::Literal, + | Token::FmtStr(_, _) => TokenKind::Literal, Token::Keyword(_) => TokenKind::Keyword, Token::UnquoteMarker(_) => TokenKind::UnquoteMarker, Token::Quote(_) => TokenKind::Quote, diff --git a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs index 5d9b66f4f96..c9ae3438e42 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/ast.rs @@ -7,11 +7,11 @@ use noirc_errors::{ Location, }; -use crate::hir_def::function::FunctionSignature; use crate::{ ast::{BinaryOpKind, IntegerBitSize, Signedness, Visibility}, token::{Attributes, FunctionAttribute}, }; +use crate::{hir_def::function::FunctionSignature, token::FmtStrFragment}; use serde::{Deserialize, Serialize}; use super::HirType; @@ -112,7 +112,7 @@ pub enum Literal { Bool(bool), Unit, Str(String), - FmtStr(String, u64, Box), + FmtStr(Vec, u64, Box), } #[derive(Debug, Clone, Hash)] diff --git a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs index 050f844146a..b31a5744d09 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -12,6 +12,7 @@ use crate::ast::{FunctionKind, IntegerBitSize, Signedness, UnaryOp, Visibility}; use crate::hir::comptime::InterpreterError; use crate::hir::type_check::{NoMatchingImplFoundError, TypeCheckError}; use crate::node_interner::{ExprId, ImplSearchErrorKind}; +use crate::token::FmtStrFragment; use crate::{ debug::DebugInstrumenter, hir_def::{ @@ -417,10 +418,10 @@ impl<'interner> Monomorphizer<'interner> { let expr = match self.interner.expression(&expr) { HirExpression::Ident(ident, generics) => self.ident(ident, expr, generics)?, HirExpression::Literal(HirLiteral::Str(contents)) => Literal(Str(contents)), - HirExpression::Literal(HirLiteral::FmtStr(contents, idents)) => { + HirExpression::Literal(HirLiteral::FmtStr(fragments, idents, _length)) => { let fields = try_vecmap(idents, |ident| self.expr(ident))?; Literal(FmtStr( - contents, + fragments, fields.len() as u64, Box::new(ast::Expression::Tuple(fields)), )) @@ -1846,7 +1847,7 @@ impl<'interner> Monomorphizer<'interner> { _ => unreachable!("ICE: format string fields should be structured in a tuple, but got a {zeroed_tuple}"), }; ast::Expression::Literal(ast::Literal::FmtStr( - "\0".repeat(*length as usize), + vec![FmtStrFragment::String("\0".repeat(*length as usize))], fields_len, Box::new(zeroed_tuple), )) diff --git a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/printer.rs b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/printer.rs index b6421b26a03..9c1072a4117 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/printer.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/monomorphization/printer.rs @@ -105,9 +105,11 @@ impl AstPrinter { super::ast::Literal::Integer(x, _, _, _) => x.fmt(f), super::ast::Literal::Bool(x) => x.fmt(f), super::ast::Literal::Str(s) => s.fmt(f), - super::ast::Literal::FmtStr(s, _, _) => { + super::ast::Literal::FmtStr(fragments, _, _) => { write!(f, "f\"")?; - s.fmt(f)?; + for fragment in fragments { + fragment.fmt(f)?; + } write!(f, "\"") } super::ast::Literal::Unit => { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs index c2f7b781873..fcc58c5d833 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser.rs @@ -5,7 +5,7 @@ use noirc_errors::Span; use crate::{ ast::{Ident, ItemVisibility}, lexer::{Lexer, SpannedTokenResult}, - token::{IntType, Keyword, SpannedToken, Token, TokenKind, Tokens}, + token::{FmtStrFragment, IntType, Keyword, SpannedToken, Token, TokenKind, Tokens}, }; use super::{labels::ParsingRuleLabel, ParsedModule, ParserError, ParserErrorReason}; @@ -294,11 +294,11 @@ impl<'a> Parser<'a> { } } - fn eat_fmt_str(&mut self) -> Option { + fn eat_fmt_str(&mut self) -> Option<(Vec, u32)> { if matches!(self.token.token(), Token::FmtStr(..)) { let token = self.bump(); match token.into_token() { - Token::FmtStr(string) => Some(string), + Token::FmtStr(fragments, length) => Some((fragments, length)), _ => unreachable!(), } } else { diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/expression.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/expression.rs index e1ecc972eeb..526a0c3dd6e 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/expression.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/expression.rs @@ -577,7 +577,7 @@ impl<'a> Parser<'a> { /// BlockExpression = Block fn parse_literal(&mut self) -> Option { if let Some(bool) = self.eat_bool() { - return Some(ExpressionKind::Literal(Literal::Bool(bool))); + return Some(ExpressionKind::boolean(bool)); } if let Some(int) = self.eat_int() { @@ -585,15 +585,15 @@ impl<'a> Parser<'a> { } if let Some(string) = self.eat_str() { - return Some(ExpressionKind::Literal(Literal::Str(string))); + return Some(ExpressionKind::string(string)); } if let Some((string, n)) = self.eat_raw_str() { - return Some(ExpressionKind::Literal(Literal::RawStr(string, n))); + return Some(ExpressionKind::raw_string(string, n)); } - if let Some(string) = self.eat_fmt_str() { - return Some(ExpressionKind::Literal(Literal::FmtStr(string))); + if let Some((fragments, length)) = self.eat_fmt_str() { + return Some(ExpressionKind::format_string(fragments, length)); } if let Some(tokens) = self.eat_quote() { @@ -865,10 +865,11 @@ mod tests { fn parses_fmt_str() { let src = "f\"hello\""; let expr = parse_expression_no_errors(src); - let ExpressionKind::Literal(Literal::FmtStr(string)) = expr.kind else { + let ExpressionKind::Literal(Literal::FmtStr(fragments, length)) = expr.kind else { panic!("Expected format string literal"); }; - assert_eq!(string, "hello"); + assert_eq!(fragments[0].to_string(), "hello"); + assert_eq!(length, 5); } #[test] diff --git a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/types.rs b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/types.rs index 0de94a89be5..884db763698 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/types.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/parser/parser/types.rs @@ -15,7 +15,7 @@ impl<'a> Parser<'a> { typ } else { self.expected_label(ParsingRuleLabel::Type); - self.unspecified_type_at_previous_token_end() + UnresolvedTypeData::Error.with_span(self.span_at_previous_token_end()) } } @@ -660,6 +660,14 @@ mod tests { assert!(unconstrained); } + #[test] + fn parses_function_type_with_colon_in_parameter() { + let src = "fn(value: T) -> Field"; + let mut parser = Parser::for_str(src); + let _ = parser.parse_type_or_error(); + assert!(!parser.errors.is_empty()); + } + #[test] fn parses_trait_as_type_no_generics() { let src = "impl foo::Bar"; diff --git a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs index cba29d58ea3..8ddf1b571e6 100644 --- a/noir/noir-repo/compiler/noirc_frontend/src/tests.rs +++ b/noir/noir-repo/compiler/noirc_frontend/src/tests.rs @@ -1209,8 +1209,6 @@ fn resolve_fmt_strings() { let string = f"this is i: {i}"; println(string); - println(f"I want to print {0}"); - let new_val = 10; println(f"random_string{new_val}{new_val}"); } @@ -1220,7 +1218,7 @@ fn resolve_fmt_strings() { "#; let errors = get_program_errors(src); - assert!(errors.len() == 5, "Expected 5 errors, got: {:?}", errors); + assert!(errors.len() == 3, "Expected 5 errors, got: {:?}", errors); for (err, _file_id) in errors { match &err { @@ -1229,21 +1227,13 @@ fn resolve_fmt_strings() { }) => { assert_eq!(name, "i"); } - CompilationError::ResolverError(ResolverError::NumericConstantInFormatString { - name, - .. - }) => { - assert_eq!(name, "0"); - } CompilationError::TypeError(TypeCheckError::UnusedResultError { expr_type: _, expr_span, }) => { let a = src.get(expr_span.start() as usize..expr_span.end() as usize).unwrap(); assert!( - a == "println(string)" - || a == "println(f\"I want to print {0}\")" - || a == "println(f\"random_string{new_val}{new_val}\")" + a == "println(string)" || a == "println(f\"random_string{new_val}{new_val}\")" ); } _ => unimplemented!(), diff --git a/noir/noir-repo/compiler/noirc_printable_type/Cargo.toml b/noir/noir-repo/compiler/noirc_printable_type/Cargo.toml index 8bb56703e8a..8d0574aad64 100644 --- a/noir/noir-repo/compiler/noirc_printable_type/Cargo.toml +++ b/noir/noir-repo/compiler/noirc_printable_type/Cargo.toml @@ -14,7 +14,6 @@ workspace = true [dependencies] acvm.workspace = true iter-extended.workspace = true -regex = "1.9.1" serde.workspace = true serde_json.workspace = true thiserror.workspace = true diff --git a/noir/noir-repo/compiler/noirc_printable_type/src/lib.rs b/noir/noir-repo/compiler/noirc_printable_type/src/lib.rs index 838a2472125..6ae187da27f 100644 --- a/noir/noir-repo/compiler/noirc_printable_type/src/lib.rs +++ b/noir/noir-repo/compiler/noirc_printable_type/src/lib.rs @@ -2,7 +2,7 @@ use std::{collections::BTreeMap, str}; use acvm::{acir::AcirField, brillig_vm::brillig::ForeignCallParam}; use iter_extended::vecmap; -use regex::{Captures, Regex}; + use serde::{Deserialize, Serialize}; use thiserror::Error; @@ -253,24 +253,6 @@ fn to_string(value: &PrintableValue, typ: &PrintableType) -> Op Some(output) } -// Taken from Regex docs directly -fn replace_all( - re: &Regex, - haystack: &str, - mut replacement: impl FnMut(&Captures) -> Result, -) -> Result { - let mut new = String::with_capacity(haystack.len()); - let mut last_match = 0; - for caps in re.captures_iter(haystack) { - let m = caps.get(0).unwrap(); - new.push_str(&haystack[last_match..m.start()]); - new.push_str(&replacement(&caps)?); - last_match = m.end(); - } - new.push_str(&haystack[last_match..]); - Ok(new) -} - impl std::fmt::Display for PrintableValueDisplay { fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { @@ -279,18 +261,74 @@ impl std::fmt::Display for PrintableValueDisplay { write!(fmt, "{output_string}") } Self::FmtString(template, values) => { - let mut display_iter = values.iter(); - let re = Regex::new(r"\{([a-zA-Z0-9_]+)\}").map_err(|_| std::fmt::Error)?; + let mut values_iter = values.iter(); + write_template_replacing_interpolations(template, fmt, || { + values_iter.next().and_then(|(value, typ)| to_string(value, typ)) + }) + } + } + } +} - let formatted_str = replace_all(&re, template, |_: &Captures| { - let (value, typ) = display_iter.next().ok_or(std::fmt::Error)?; - to_string(value, typ).ok_or(std::fmt::Error) - })?; +fn write_template_replacing_interpolations( + template: &str, + fmt: &mut std::fmt::Formatter<'_>, + mut replacement: impl FnMut() -> Option, +) -> std::fmt::Result { + let mut last_index = 0; // How far we've written from the template + let mut char_indices = template.char_indices().peekable(); + while let Some((char_index, char)) = char_indices.next() { + // If we see a '}' it must be "}}" because the ones for interpolation are handled + // when we see '{' + if char == '}' { + // Write what we've seen so far in the template, including this '}' + write!(fmt, "{}", &template[last_index..=char_index])?; + + // Skip the second '}' + let (_, closing_curly) = char_indices.next().unwrap(); + assert_eq!(closing_curly, '}'); + + last_index = char_indices.peek().map(|(index, _)| *index).unwrap_or(template.len()); + continue; + } + + // Keep going forward until we find a '{' + if char != '{' { + continue; + } + + // We'll either have to write an interpolation or '{{' if it's an escape, + // so let's write what we've seen so far in the template. + write!(fmt, "{}", &template[last_index..char_index])?; - write!(fmt, "{formatted_str}") + // If it's '{{', write '{' and keep going + if char_indices.peek().map(|(_, char)| char) == Some(&'{') { + write!(fmt, "{{")?; + + // Skip the second '{' + char_indices.next().unwrap(); + + last_index = char_indices.peek().map(|(index, _)| *index).unwrap_or(template.len()); + continue; + } + + // Write the interpolation + if let Some(string) = replacement() { + write!(fmt, "{}", string)?; + } else { + return Err(std::fmt::Error); + } + + // Whatever was inside '{...}' doesn't matter, so skip until we find '}' + while let Some((_, char)) = char_indices.next() { + if char == '}' { + last_index = char_indices.peek().map(|(index, _)| *index).unwrap_or(template.len()); + break; } } } + + write!(fmt, "{}", &template[last_index..]) } /// This trims any leading zeroes. @@ -390,3 +428,41 @@ pub fn decode_string_value(field_elements: &[F]) -> String { let final_string = str::from_utf8(&string_as_slice).unwrap(); final_string.to_owned() } + +#[cfg(test)] +mod tests { + use acvm::FieldElement; + + use crate::{PrintableType, PrintableValue, PrintableValueDisplay}; + + #[test] + fn printable_value_display_to_string_without_interpolations() { + let template = "hello"; + let display = + PrintableValueDisplay::::FmtString(template.to_string(), vec![]); + assert_eq!(display.to_string(), template); + } + + #[test] + fn printable_value_display_to_string_with_curly_escapes() { + let template = "hello {{world}} {{{{double_escape}}}}"; + let expected = "hello {world} {{double_escape}}"; + let display = + PrintableValueDisplay::::FmtString(template.to_string(), vec![]); + assert_eq!(display.to_string(), expected); + } + + #[test] + fn printable_value_display_to_string_with_interpolations() { + let template = "hello {one} {{no}} {two} {{not_again}} {three} world"; + let values = vec![ + (PrintableValue::String("ONE".to_string()), PrintableType::String { length: 3 }), + (PrintableValue::String("TWO".to_string()), PrintableType::String { length: 3 }), + (PrintableValue::String("THREE".to_string()), PrintableType::String { length: 5 }), + ]; + let expected = "hello ONE {no} TWO {not_again} THREE world"; + let display = + PrintableValueDisplay::::FmtString(template.to_string(), values); + assert_eq!(display.to_string(), expected); + } +} diff --git a/noir/noir-repo/docs/docs/getting_started/noir_installation.md b/noir/noir-repo/docs/docs/getting_started/noir_installation.md index a5c7e649278..05f036d4f6d 100644 --- a/noir/noir-repo/docs/docs/getting_started/noir_installation.md +++ b/noir/noir-repo/docs/docs/getting_started/noir_installation.md @@ -93,7 +93,7 @@ step 2: Follow the [Noirup instructions](#installing-noirup). ## Setting up shell completions -Once `nargo` is installed, you can [set up shell completions for it](setting_up_shell_completions). +Once `nargo` is installed, you can [set up shell completions for it](setting_up_shell_completions.md). ## Uninstalling Nargo diff --git a/noir/noir-repo/docs/docs/index.mdx b/noir/noir-repo/docs/docs/index.mdx index a6bd306f91d..5c116a73b3f 100644 --- a/noir/noir-repo/docs/docs/index.mdx +++ b/noir/noir-repo/docs/docs/index.mdx @@ -21,8 +21,17 @@ sidebar_position: 0 import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; - -Noir Logo +import ThemedImage from '@theme/ThemedImage'; +import useBaseUrl from '@docusaurus/useBaseUrl'; + + Noir is an open-source Domain-Specific Language for safe and seamless construction of privacy-preserving Zero-Knowledge programs, requiring no previous knowledge on the underlying mathematics or cryptography. diff --git a/noir/noir-repo/docs/docs/noir/concepts/comptime.md b/noir/noir-repo/docs/docs/noir/concepts/comptime.md index 37457d47b46..9661dc1a6ca 100644 --- a/noir/noir-repo/docs/docs/noir/concepts/comptime.md +++ b/noir/noir-repo/docs/docs/noir/concepts/comptime.md @@ -41,7 +41,7 @@ Note that while in a `comptime` context, any runtime variables _local to the cur Evaluation rules of `comptime` follows the normal unconstrained evaluation rules for other Noir code. There are a few things to note though: - Certain built-in functions may not be available, although more may be added over time. -- Evaluation order of global items is currently unspecified. For example, given the following two functions we can't guarantee +- Evaluation order of `comptime {}` blocks within global items is currently unspecified. For example, given the following two functions we can't guarantee which `println` will execute first. The ordering of the two printouts will be arbitrary, but should be stable across multiple compilations with the same `nargo` version as long as the program is also unchanged. ```rust @@ -56,11 +56,14 @@ fn two() { - Since evaluation order is unspecified, care should be taken when using mutable globals so that they do not rely on a particular ordering. For example, using globals to generate unique ids should be fine but relying on certain ids always being produced (especially after edits to the program) should be avoided. -- Although most ordering of globals is unspecified, two are: +- Although the ordering of comptime code is usually unspecified, there are cases where it is: - Dependencies of a crate will always be evaluated before the dependent crate. - - Any annotations on a function will be run before the function itself is resolved. This is to allow the annotation to modify the function if necessary. Note that if the + - Any attributes on a function will be run before the function body is resolved. This is to allow the attribute to modify the function if necessary. Note that if the function itself was called at compile-time previously, it will already be resolved and cannot be modified. To prevent accidentally calling functions you wish to modify - at compile-time, it may be helpful to sort your `comptime` annotation functions into a different crate along with any dependencies they require. + at compile-time, it may be helpful to sort your `comptime` annotation functions into a different submodule crate along with any dependencies they require. + - Unlike raw `comptime {}` blocks, attributes on top-level items in the program do have a set evaluation order. Attributes within a module are evaluated top-down, and attributes + in different modules are evaluated submodule-first. Sibling modules to the same parent module are evaluated in order of the module declarations (`mod foo; mod bar;`) in their + parent module. ### Lowering @@ -89,7 +92,7 @@ fn main() { } ``` -Not all types of values can be lowered. For example, `Type`s and `TypeDefinition`s (among other types) cannot be lowered at all. +Not all types of values can be lowered. For example, references, `Type`s, and `TypeDefinition`s (among other types) cannot be lowered at all. ```rust fn main() { @@ -100,6 +103,19 @@ fn main() { comptime fn get_type() -> Type { ... } ``` +Values of certain types may also change type when they are lowered. For example, a comptime format string will already be +formatted, and thus lowers into a runtime string instead: + +```rust +fn main() { + let foo = comptime { + let i = 2; + f"i = {i}" + }; + assert_eq(foo, "i = 2"); +} +``` + --- ## (Quasi) Quote @@ -121,6 +137,21 @@ Calling such a function at compile-time without `!` will just return the `Quoted For those familiar with quoting from other languages (primarily lisps), Noir's `quote` is actually a _quasiquote_. This means we can escape the quoting by using the unquote operator to splice values in the middle of quoted code. +In addition to curly braces, you can also use square braces for the quote operator: + +```rust +comptime { + let q1 = quote { 1 }; + let q2 = quote [ 2 ]; + assert_eq(q1, q2); + + // Square braces can be used to quote mismatched curly braces if needed + let _ = quote[}]; +} +``` + +--- + ## Unquote The unquote operator `$` is usable within a `quote` expression. @@ -149,7 +180,7 @@ If it is an expression (even a parenthesized one), it will do nothing. Most like Unquoting can also be avoided by escaping the `$` with a backslash: -``` +```rust comptime { let x = quote { 1 + 2 }; @@ -158,26 +189,48 @@ comptime { } ``` +### Combining Tokens + +Note that `Quoted` is internally a series of separate tokens, and that all unquoting does is combine these token vectors. +This means that code which appears to append like a string actually appends like a vector internally: + +```rust +comptime { + let x = 3; + let q = quote { foo$x }; // This is [foo, 3], not [foo3] + + // Spaces are ignored in general, they're never part of a token + assert_eq(q, quote { foo 3 }); +} +``` + +If you do want string semantics, you can use format strings then convert back to a `Quoted` value with `.quoted_contents()`. +Note that formatting a quoted value with multiple tokens will always insert a space between each token. If this is +undesired, you'll need to only operate on quoted values containing a single token. To do this, you can iterate +over each token of a larger quoted value with `.tokens()`: + +#include_code concatenate-example noir_stdlib/src/meta/mod.nr rust + --- -## Annotations +## Attributes -Annotations provide a way to run a `comptime` function on an item in the program. -When you use an annotation, the function with the same name will be called with that item as an argument: +Attributes provide a way to run a `comptime` function on an item in the program. +When you use an attribute, the function with the same name will be called with that item as an argument: ```rust -#[my_struct_annotation] +#[my_struct_attribute] struct Foo {} -comptime fn my_struct_annotation(s: StructDefinition) { - println("Called my_struct_annotation!"); +comptime fn my_struct_attribute(s: StructDefinition) { + println("Called my_struct_attribute!"); } -#[my_function_annotation] +#[my_function_attribute] fn foo() {} -comptime fn my_function_annotation(f: FunctionDefinition) { - println("Called my_function_annotation!"); +comptime fn my_function_attribute(f: FunctionDefinition) { + println("Called my_function_attribute!"); } ``` @@ -190,15 +243,47 @@ For example, this is the mechanism used to insert additional trait implementatio ### Calling annotations with additional arguments -Arguments may optionally be given to annotations. -When this is done, these additional arguments are passed to the annotation function after the item argument. +Arguments may optionally be given to attributes. +When this is done, these additional arguments are passed to the attribute function after the item argument. #include_code annotation-arguments-example noir_stdlib/src/meta/mod.nr rust -We can also take any number of arguments by adding the `varargs` annotation: +We can also take any number of arguments by adding the `varargs` attribute: #include_code annotation-varargs-example noir_stdlib/src/meta/mod.nr rust +### Attribute Evaluation Order + +Unlike the evaluation order of stray `comptime {}` blocks within functions, attributes have a well-defined evaluation +order. Within a module, attributes are evaluated top to bottom. Between modules, attributes in child modules are evaluated +first. Attributes in sibling modules are resolved following the `mod foo; mod bar;` declaration order within their parent +modules. + +```rust +mod foo; // attributes in foo are run first +mod bar; // followed by attributes in bar + +// followed by any attributes in the parent module +#[derive(Eq)] +struct Baz {} +``` + +Note that because of this evaluation order, you may get an error trying to derive a trait for a struct whose fields +have not yet had the trait derived already: + +```rust +// Error! `Bar` field of `Foo` does not (yet) implement Eq! +#[derive(Eq)] +struct Foo { + bar: Bar +} + +#[derive(Eq)] +struct Bar {} +``` + +In this case, the issue can be resolved by rearranging the structs. + --- ## Comptime API diff --git a/noir/noir-repo/docs/docs/noir/modules_packages_crates/workspaces.md b/noir/noir-repo/docs/docs/noir/modules_packages_crates/workspaces.md index 513497f12bf..2fbf10aec52 100644 --- a/noir/noir-repo/docs/docs/noir/modules_packages_crates/workspaces.md +++ b/noir/noir-repo/docs/docs/noir/modules_packages_crates/workspaces.md @@ -33,10 +33,14 @@ members = ["crates/a", "crates/b"] default-member = "crates/a" ``` -`members` indicates which packages are included in the workspace. As such, all member packages of a workspace will be processed when the `--workspace` flag is used with various commands or if a `default-member` is not specified. +`members` indicates which packages are included in the workspace. As such, all member packages of a workspace will be processed when the `--workspace` flag is used with various commands or if a `default-member` is not specified. The `--package` option can be used to limit +the scope of some commands to a specific member of the workspace; otherwise these commands run on the package nearest on the path to the +current directory where `nargo` was invoked. `default-member` indicates which package various commands process by default. Libraries can be defined in a workspace. Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. + +Please note that nesting regular packages is not supported: certain commands work on the workspace level and will use the topmost Nargo.toml file they can find on the path; unless this is a workspace configuration with `members`, the command might run on some unintended package. diff --git a/noir/noir-repo/docs/docs/noir/standard_library/black_box_fns.md b/noir/noir-repo/docs/docs/noir/standard_library/black_box_fns.md index d6079ab182c..e9392b20a92 100644 --- a/noir/noir-repo/docs/docs/noir/standard_library/black_box_fns.md +++ b/noir/noir-repo/docs/docs/noir/standard_library/black_box_fns.md @@ -14,7 +14,6 @@ Here is a list of the current black box functions: - [AES128](./cryptographic_primitives/ciphers.mdx#aes128) - [SHA256](./cryptographic_primitives/hashes.mdx#sha256) -- [Schnorr signature verification](./cryptographic_primitives/schnorr.mdx) - [Blake2s](./cryptographic_primitives/hashes.mdx#blake2s) - [Blake3](./cryptographic_primitives/hashes.mdx#blake3) - [Pedersen Hash](./cryptographic_primitives/hashes.mdx#pedersen_hash) diff --git a/noir/noir-repo/docs/docs/noir/standard_library/cryptographic_primitives/schnorr.mdx b/noir/noir-repo/docs/docs/noir/standard_library/cryptographic_primitives/schnorr.mdx deleted file mode 100644 index 4c859043787..00000000000 --- a/noir/noir-repo/docs/docs/noir/standard_library/cryptographic_primitives/schnorr.mdx +++ /dev/null @@ -1,36 +0,0 @@ ---- -title: Schnorr Signatures -description: Learn how you can verify Schnorr signatures using Noir -keywords: [cryptographic primitives, Noir project, schnorr, signatures] -sidebar_position: 2 ---- - -import BlackBoxInfo from '@site/src/components/Notes/_blackbox'; - -## schnorr::verify_signature - -Verifier for Schnorr signatures over the embedded curve (for BN254 it is Grumpkin). - -#include_code schnorr_verify noir_stdlib/src/schnorr.nr rust - -where `_signature` can be generated like so using the npm package -[@noir-lang/barretenberg](https://www.npmjs.com/package/@noir-lang/barretenberg) - -```js -const { BarretenbergWasm } = require('@noir-lang/barretenberg/dest/wasm'); -const { Schnorr } = require('@noir-lang/barretenberg/dest/crypto/schnorr'); - -... - -const barretenberg = await BarretenbergWasm.new(); -const schnorr = new Schnorr(barretenberg); -const pubKey = schnorr.computePublicKey(privateKey); -const message = ... -const signature = Array.from( - schnorr.constructSignature(hash, privateKey).toBuffer() -); - -... -``` - - diff --git a/noir/noir-repo/docs/docs/noir/standard_library/meta/op.md b/noir/noir-repo/docs/docs/noir/standard_library/meta/op.md index 55d2d244344..90501e05fa4 100644 --- a/noir/noir-repo/docs/docs/noir/standard_library/meta/op.md +++ b/noir/noir-repo/docs/docs/noir/standard_library/meta/op.md @@ -142,7 +142,7 @@ Represents a binary operator. One of `+`, `-`, `*`, `/`, `%`, `==`, `!=`, `<`, ` #### is_shift_left -#include_code is_shift_right noir_stdlib/src/meta/op.nr rust +#include_code is_shift_left noir_stdlib/src/meta/op.nr rust `true` if this operator is `<<` diff --git a/noir/noir-repo/docs/docusaurus.config.ts b/noir/noir-repo/docs/docusaurus.config.ts index c53d11e3373..eee4e7f8236 100644 --- a/noir/noir-repo/docs/docusaurus.config.ts +++ b/noir/noir-repo/docs/docusaurus.config.ts @@ -57,8 +57,8 @@ export default { navbar: { logo: { alt: 'Noir Logo', - src: 'img/logo.svg', - srcDark: 'img/logoDark.svg', + src: 'img/logoDark.svg', + srcDark: 'img/logo.svg', href: '/', }, items: [ diff --git a/noir/noir-repo/docs/static/img/favicon.ico b/noir/noir-repo/docs/static/img/favicon.ico index 1c85cef482e..0abffba22ef 100644 Binary files a/noir/noir-repo/docs/static/img/favicon.ico and b/noir/noir-repo/docs/static/img/favicon.ico differ diff --git a/noir/noir-repo/docs/static/img/homepage_header_pic.png b/noir/noir-repo/docs/static/img/homepage_header_pic.png index d629e202232..f6bdd5378f6 100644 Binary files a/noir/noir-repo/docs/static/img/homepage_header_pic.png and b/noir/noir-repo/docs/static/img/homepage_header_pic.png differ diff --git a/noir/noir-repo/docs/static/img/logo.png b/noir/noir-repo/docs/static/img/logo.png index 83d3a66f2b1..0df14cf4f00 100644 Binary files a/noir/noir-repo/docs/static/img/logo.png and b/noir/noir-repo/docs/static/img/logo.png differ diff --git a/noir/noir-repo/docs/static/img/logo.svg b/noir/noir-repo/docs/static/img/logo.svg index 8095d6a169d..f6fadf87bcb 100644 --- a/noir/noir-repo/docs/static/img/logo.svg +++ b/noir/noir-repo/docs/static/img/logo.svg @@ -1,29 +1,14 @@ - - - - - - - - - - - - - - + + + + + + + + + \ No newline at end of file diff --git a/noir/noir-repo/docs/static/img/logoDark.png b/noir/noir-repo/docs/static/img/logoDark.png new file mode 100644 index 00000000000..25d554c486c Binary files /dev/null and b/noir/noir-repo/docs/static/img/logoDark.png differ diff --git a/noir/noir-repo/docs/static/img/logoDark.svg b/noir/noir-repo/docs/static/img/logoDark.svg index dee9f27a6a9..ac0f9897b30 100644 --- a/noir/noir-repo/docs/static/img/logoDark.svg +++ b/noir/noir-repo/docs/static/img/logoDark.svg @@ -1,29 +1,13 @@ - - - - - - - - - - - - - - + + + + + + + + + \ No newline at end of file diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.36.0/index.mdx b/noir/noir-repo/docs/versioned_docs/version-v0.36.0/index.mdx index a6bd306f91d..5c116a73b3f 100644 --- a/noir/noir-repo/docs/versioned_docs/version-v0.36.0/index.mdx +++ b/noir/noir-repo/docs/versioned_docs/version-v0.36.0/index.mdx @@ -21,8 +21,17 @@ sidebar_position: 0 import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; - -Noir Logo +import ThemedImage from '@theme/ThemedImage'; +import useBaseUrl from '@docusaurus/useBaseUrl'; + + Noir is an open-source Domain-Specific Language for safe and seamless construction of privacy-preserving Zero-Knowledge programs, requiring no previous knowledge on the underlying mathematics or cryptography. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.37.0/index.mdx b/noir/noir-repo/docs/versioned_docs/version-v0.37.0/index.mdx index a6bd306f91d..5c116a73b3f 100644 --- a/noir/noir-repo/docs/versioned_docs/version-v0.37.0/index.mdx +++ b/noir/noir-repo/docs/versioned_docs/version-v0.37.0/index.mdx @@ -21,8 +21,17 @@ sidebar_position: 0 import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; - -Noir Logo +import ThemedImage from '@theme/ThemedImage'; +import useBaseUrl from '@docusaurus/useBaseUrl'; + + Noir is an open-source Domain-Specific Language for safe and seamless construction of privacy-preserving Zero-Knowledge programs, requiring no previous knowledge on the underlying mathematics or cryptography. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.38.0/index.mdx b/noir/noir-repo/docs/versioned_docs/version-v0.38.0/index.mdx index a6bd306f91d..5c116a73b3f 100644 --- a/noir/noir-repo/docs/versioned_docs/version-v0.38.0/index.mdx +++ b/noir/noir-repo/docs/versioned_docs/version-v0.38.0/index.mdx @@ -21,8 +21,17 @@ sidebar_position: 0 import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; - -Noir Logo +import ThemedImage from '@theme/ThemedImage'; +import useBaseUrl from '@docusaurus/useBaseUrl'; + + Noir is an open-source Domain-Specific Language for safe and seamless construction of privacy-preserving Zero-Knowledge programs, requiring no previous knowledge on the underlying mathematics or cryptography. diff --git a/noir/noir-repo/docs/versioned_docs/version-v0.39.0/index.mdx b/noir/noir-repo/docs/versioned_docs/version-v0.39.0/index.mdx index a6bd306f91d..5c116a73b3f 100644 --- a/noir/noir-repo/docs/versioned_docs/version-v0.39.0/index.mdx +++ b/noir/noir-repo/docs/versioned_docs/version-v0.39.0/index.mdx @@ -21,8 +21,17 @@ sidebar_position: 0 import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; - -Noir Logo +import ThemedImage from '@theme/ThemedImage'; +import useBaseUrl from '@docusaurus/useBaseUrl'; + + Noir is an open-source Domain-Specific Language for safe and seamless construction of privacy-preserving Zero-Knowledge programs, requiring no previous knowledge on the underlying mathematics or cryptography. diff --git a/noir/noir-repo/docs/versioned_docs/version-v1.0.0-beta.0/index.mdx b/noir/noir-repo/docs/versioned_docs/version-v1.0.0-beta.0/index.mdx index a6bd306f91d..5c116a73b3f 100644 --- a/noir/noir-repo/docs/versioned_docs/version-v1.0.0-beta.0/index.mdx +++ b/noir/noir-repo/docs/versioned_docs/version-v1.0.0-beta.0/index.mdx @@ -21,8 +21,17 @@ sidebar_position: 0 import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; - -Noir Logo +import ThemedImage from '@theme/ThemedImage'; +import useBaseUrl from '@docusaurus/useBaseUrl'; + + Noir is an open-source Domain-Specific Language for safe and seamless construction of privacy-preserving Zero-Knowledge programs, requiring no previous knowledge on the underlying mathematics or cryptography. diff --git a/noir/noir-repo/noir-logo.png b/noir/noir-repo/noir-logo.png index eabb163ad73..0abffba22ef 100644 Binary files a/noir/noir-repo/noir-logo.png and b/noir/noir-repo/noir-logo.png differ diff --git a/noir/noir-repo/noir_stdlib/src/collections/map.nr b/noir/noir-repo/noir_stdlib/src/collections/map.nr index bcce08faab4..2b0da1b90ec 100644 --- a/noir/noir-repo/noir_stdlib/src/collections/map.nr +++ b/noir/noir-repo/noir_stdlib/src/collections/map.nr @@ -201,7 +201,10 @@ impl HashMap { } } - let msg = f"Amount of valid elements should have been {self._len} times, but got {entries.len()}."; + let self_len = self._len; + let entries_len = entries.len(); + let msg = + f"Amount of valid elements should have been {self_len} times, but got {entries_len}."; assert(entries.len() == self._len, msg); entries @@ -236,8 +239,10 @@ impl HashMap { } } + let self_len = self._len; + let keys_len = keys.len(); let msg = - f"Amount of valid elements should have been {self._len} times, but got {keys.len()}."; + f"Amount of valid elements should have been {self_len} times, but got {keys_len}."; assert(keys.len() == self._len, msg); keys @@ -271,8 +276,10 @@ impl HashMap { } } + let self_len = self._len; + let values_len = values.len(); let msg = - f"Amount of valid elements should have been {self._len} times, but got {values.len()}."; + f"Amount of valid elements should have been {self_len} times, but got {values_len}."; assert(values.len() == self._len, msg); values diff --git a/noir/noir-repo/noir_stdlib/src/collections/umap.nr b/noir/noir-repo/noir_stdlib/src/collections/umap.nr index 3e074551e9d..7aebeb437cf 100644 --- a/noir/noir-repo/noir_stdlib/src/collections/umap.nr +++ b/noir/noir-repo/noir_stdlib/src/collections/umap.nr @@ -138,7 +138,10 @@ impl UHashMap { } } - let msg = f"Amount of valid elements should have been {self._len} times, but got {entries.len()}."; + let self_len = self._len; + let entries_len = entries.len(); + let msg = + f"Amount of valid elements should have been {self_len} times, but got {entries_len}."; assert(entries.len() == self._len, msg); entries @@ -158,8 +161,10 @@ impl UHashMap { } } + let self_len = self._len; + let keys_len = keys.len(); let msg = - f"Amount of valid elements should have been {self._len} times, but got {keys.len()}."; + f"Amount of valid elements should have been {self_len} times, but got {keys_len}."; assert(keys.len() == self._len, msg); keys @@ -179,8 +184,10 @@ impl UHashMap { } } + let self_len = self._len; + let values_len = values.len(); let msg = - f"Amount of valid elements should have been {self._len} times, but got {values.len()}."; + f"Amount of valid elements should have been {self_len} times, but got {values_len}."; assert(values.len() == self._len, msg); values diff --git a/noir/noir-repo/noir_stdlib/src/hint.nr b/noir/noir-repo/noir_stdlib/src/hint.nr new file mode 100644 index 00000000000..25dcc7ec56e --- /dev/null +++ b/noir/noir-repo/noir_stdlib/src/hint.nr @@ -0,0 +1,6 @@ +/// An identity function that *hints* to the compiler to be maximally pessimistic about what `black_box` could do. +/// +/// This can be used to block the SSA optimization passes being applied to a value, which should help to prevent +/// test programs from being optimized down to nothing and have them resemble runtime code more closely. +#[builtin(black_box)] +pub fn black_box(value: T) -> T {} diff --git a/noir/noir-repo/noir_stdlib/src/lib.nr b/noir/noir-repo/noir_stdlib/src/lib.nr index 8e9dc13c13d..c3a289a8fa2 100644 --- a/noir/noir-repo/noir_stdlib/src/lib.nr +++ b/noir/noir-repo/noir_stdlib/src/lib.nr @@ -3,7 +3,6 @@ pub mod aes128; pub mod array; pub mod slice; pub mod merkle; -pub mod schnorr; pub mod ecdsa_secp256k1; pub mod ecdsa_secp256r1; pub mod embedded_curve_ops; @@ -27,6 +26,7 @@ pub mod meta; pub mod append; pub mod mem; pub mod panic; +pub mod hint; // Oracle calls are required to be wrapped in an unconstrained function // Thus, the only argument to the `println` oracle is expected to always be an ident @@ -115,4 +115,3 @@ pub fn wrapping_mul(x: T, y: T) -> T { #[builtin(as_witness)] pub fn as_witness(x: Field) {} - diff --git a/noir/noir-repo/noir_stdlib/src/meta/mod.nr b/noir/noir-repo/noir_stdlib/src/meta/mod.nr index 5102f0cf1fd..5d2164a510d 100644 --- a/noir/noir-repo/noir_stdlib/src/meta/mod.nr +++ b/noir/noir-repo/noir_stdlib/src/meta/mod.nr @@ -252,6 +252,41 @@ mod tests { fn do_nothing(_: Self) {} } + // docs:start:concatenate-example + comptime fn concatenate(q1: Quoted, q2: Quoted) -> Quoted { + assert(q1.tokens().len() <= 1); + assert(q2.tokens().len() <= 1); + + f"{q1}{q2}".quoted_contents() + } + + // The CtString type is also useful for a compile-time string of unbounded size + // so that you can append to it in a loop. + comptime fn double_spaced(q: Quoted) -> CtString { + let mut result = "".as_ctstring(); + + for token in q.tokens() { + if result != "".as_ctstring() { + result = result.append_str(" "); + } + result = result.append_fmtstr(f"{token}"); + } + + result + } + + #[test] + fn concatenate_test() { + comptime { + let result = concatenate(quote {foo}, quote {bar}); + assert_eq(result, quote {foobar}); + + let result = double_spaced(quote {foo bar 3}).as_quoted_str!(); + assert_eq(result, "foo bar 3"); + } + } + // docs:end:concatenate-example + // This function is just to remove unused warnings fn remove_unused_warnings() { let _: Bar = Bar { x: 1, y: [2, 3] }; diff --git a/noir/noir-repo/noir_stdlib/src/schnorr.nr b/noir/noir-repo/noir_stdlib/src/schnorr.nr deleted file mode 100644 index d9d494e3093..00000000000 --- a/noir/noir-repo/noir_stdlib/src/schnorr.nr +++ /dev/null @@ -1,95 +0,0 @@ -use crate::embedded_curve_ops::{EmbeddedCurvePoint, EmbeddedCurveScalar}; - -// docs:start:schnorr_verify -pub fn verify_signature( - public_key: EmbeddedCurvePoint, - signature: [u8; 64], - message: [u8; N], -) -> bool -// docs:end:schnorr_verify -{ - //scalar lo/hi from bytes - let sig_s = EmbeddedCurveScalar::from_bytes(signature, 0); - let sig_e = EmbeddedCurveScalar::from_bytes(signature, 32); - // pub_key is on Grumpkin curve - let mut is_ok = (public_key.y * public_key.y == public_key.x * public_key.x * public_key.x - 17) - & (!public_key.is_infinite); - - if ((sig_s.lo != 0) | (sig_s.hi != 0)) & ((sig_e.lo != 0) | (sig_e.hi != 0)) { - let (r_is_infinite, result) = - calculate_signature_challenge(public_key, sig_s, sig_e, message); - - is_ok &= !r_is_infinite; - for i in 0..32 { - is_ok &= result[i] == signature[32 + i]; - } - } else { - is_ok = false; - } - is_ok -} - -pub fn assert_valid_signature( - public_key: EmbeddedCurvePoint, - signature: [u8; 64], - message: [u8; N], -) { - //scalar lo/hi from bytes - let sig_s = EmbeddedCurveScalar::from_bytes(signature, 0); - let sig_e = EmbeddedCurveScalar::from_bytes(signature, 32); - - // assert pub_key is on Grumpkin curve - assert(public_key.y * public_key.y == public_key.x * public_key.x * public_key.x - 17); - assert(public_key.is_infinite == false); - // assert signature is not null - assert((sig_s.lo != 0) | (sig_s.hi != 0)); - assert((sig_e.lo != 0) | (sig_e.hi != 0)); - - let (r_is_infinite, result) = calculate_signature_challenge(public_key, sig_s, sig_e, message); - - assert(!r_is_infinite); - for i in 0..32 { - assert(result[i] == signature[32 + i]); - } -} - -fn calculate_signature_challenge( - public_key: EmbeddedCurvePoint, - sig_s: EmbeddedCurveScalar, - sig_e: EmbeddedCurveScalar, - message: [u8; N], -) -> (bool, [u8; 32]) { - let g1 = EmbeddedCurvePoint { - x: 1, - y: 17631683881184975370165255887551781615748388533673675138860, - is_infinite: false, - }; - let r = crate::embedded_curve_ops::multi_scalar_mul([g1, public_key], [sig_s, sig_e]); - // compare the _hashes_ rather than field elements modulo r - let pedersen_hash = crate::hash::pedersen_hash([r.x, public_key.x, public_key.y]); - let pde: [u8; 32] = pedersen_hash.to_be_bytes(); - - let mut hash_input = [0; N + 32]; - for i in 0..32 { - hash_input[i] = pde[i]; - } - for i in 0..N { - hash_input[32 + i] = message[i]; - } - - let result = crate::hash::blake2s(hash_input); - (r.is_infinite, result) -} - -#[test] -fn test_zero_signature() { - let public_key: EmbeddedCurvePoint = EmbeddedCurvePoint { - x: 1, - y: 17631683881184975370165255887551781615748388533673675138860, - is_infinite: false, - }; - let signature: [u8; 64] = [0; 64]; - let message: [u8; _] = [2; 64]; // every message - let verified = verify_signature(public_key, signature, message); - assert(!verified); -} diff --git a/noir/noir-repo/scripts/check-critical-libraries.sh b/noir/noir-repo/scripts/check-critical-libraries.sh new file mode 100755 index 00000000000..f8e474d23de --- /dev/null +++ b/noir/noir-repo/scripts/check-critical-libraries.sh @@ -0,0 +1,37 @@ +#!/usr/bin/env bash +set -e + +# Run relative to repo root +cd $(dirname "$0")/../ + +if [[ -z $1 ]]; then + echo "Must specify Noir release to test against" >&2 + echo "usage: ./check-critical-libraries.sh " >&2 + exit 1 +fi +noirup -v $1 + +CRITICAL_LIBRARIES=$(grep -v "^#\|^$" ./CRITICAL_NOIR_LIBRARIES) +readarray -t REPOS_TO_CHECK < <(echo "$CRITICAL_LIBRARIES") + +getLatestReleaseTagForRepo() { + REPO_NAME=$1 + TAG=$(gh release list -R $REPO_NAME --json 'tagName,isLatest' -q '.[] | select(.isLatest == true).tagName') + if [[ -z $TAG ]]; then + echo "$REPO_NAME has no valid release" >&2 + exit 1 + fi + echo $TAG +} + +for REPO in ${REPOS_TO_CHECK[@]}; do + echo $REPO + TMP_DIR=$(mktemp -d) + + TAG=$(getLatestReleaseTagForRepo $REPO) + git clone $REPO -c advice.detachedHead=false --depth 1 --branch $TAG $TMP_DIR + + nargo test -q --program-dir $TMP_DIR + + rm -rf $TMP_DIR +done diff --git a/noir/noir-repo/scripts/install_bb.sh b/noir/noir-repo/scripts/install_bb.sh index db98f17c503..3d1dc038ab8 100755 --- a/noir/noir-repo/scripts/install_bb.sh +++ b/noir/noir-repo/scripts/install_bb.sh @@ -1,6 +1,6 @@ #!/bin/bash -VERSION="0.63.0" +VERSION="0.66.0" BBUP_PATH=~/.bb/bbup diff --git a/noir/noir-repo/test_programs/compilation_report.sh b/noir/noir-repo/test_programs/compilation_report.sh new file mode 100755 index 00000000000..d050e7c9c34 --- /dev/null +++ b/noir/noir-repo/test_programs/compilation_report.sh @@ -0,0 +1,51 @@ +#!/usr/bin/env bash +set -e + +current_dir=$(pwd) +base_path="$current_dir/execution_success" + +# Tests to be profiled for compilation report +tests_to_profile=("sha256_regression" "regression_4709" "ram_blowup_regression") + +echo "{\"compilation_reports\": [ " > $current_dir/compilation_report.json + +# If there is an argument that means we want to generate a report for only the current directory +if [ "$#" -ne 0 ]; then + base_path="$current_dir" + tests_to_profile=(".") +fi + +ITER="1" +NUM_ARTIFACTS=${#tests_to_profile[@]} + +for dir in ${tests_to_profile[@]}; do + if [[ " ${excluded_dirs[@]} " =~ " ${dir} " ]]; then + continue + fi + + if [[ ${CI-false} = "true" ]] && [[ " ${ci_excluded_dirs[@]} " =~ " ${dir} " ]]; then + continue + fi + + cd $base_path/$dir + + # The default package to run is the supplied list hardcoded at the top of the script + PACKAGE_NAME=$dir + # Otherwise default to the current directory as the package we want to run + if [ "$#" -ne 0 ]; then + PACKAGE_NAME=$(basename $current_dir) + fi + + COMPILE_TIME=$((time nargo compile --force --silence-warnings) 2>&1 | grep real | grep -oE '[0-9]+m[0-9]+.[0-9]+s') + echo -e " {\n \"artifact_name\":\"$PACKAGE_NAME\",\n \"time\":\"$COMPILE_TIME\"" >> $current_dir/compilation_report.json + + if (($ITER == $NUM_ARTIFACTS)); then + echo "}" >> $current_dir/compilation_report.json + else + echo "}, " >> $current_dir/compilation_report.json + fi + + ITER=$(( $ITER + 1 )) +done + +echo "]}" >> $current_dir/compilation_report.json diff --git a/noir/noir-repo/test_programs/compile_success_empty/attribute_order/Nargo.toml b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/Nargo.toml new file mode 100644 index 00000000000..4471bed86dc --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "attribute_order" +type = "bin" +authors = [""] +compiler_version = ">=0.36.0" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/a.nr b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/a.nr new file mode 100644 index 00000000000..663643f1288 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/a.nr @@ -0,0 +1,13 @@ +// This is run fifth, after each child module finishes even though +// the function comes before the module declarations in the source. +#[crate::assert_run_order_function(4)] +pub fn foo() {} + +mod a_child1; +mod a_child2; + +#[crate::assert_run_order_struct(5)] +pub struct Bar {} + +#[crate::assert_run_order_trait(6)] +pub trait Foo {} diff --git a/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/a/a_child1.nr b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/a/a_child1.nr new file mode 100644 index 00000000000..834bbd43fb5 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/a/a_child1.nr @@ -0,0 +1,5 @@ +#[crate::assert_run_order_function(0)] +pub fn foo() {} + +#[crate::assert_run_order_struct(1)] +pub struct Foo {} diff --git a/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/a/a_child2.nr b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/a/a_child2.nr new file mode 100644 index 00000000000..3548f4450a5 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/a/a_child2.nr @@ -0,0 +1,5 @@ +#[crate::assert_run_order_trait(2)] +pub trait Foo {} + +#[crate::assert_run_order_function(3)] +pub fn foo() {} diff --git a/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/b/b_child1.nr b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/b/b_child1.nr new file mode 100644 index 00000000000..a8e7e898ec1 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/b/b_child1.nr @@ -0,0 +1,4 @@ +#[crate::assert_run_order_function(8)] +pub fn foo() {} + +#![crate::assert_run_order_module(9)] diff --git a/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/b/b_child2.nr b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/b/b_child2.nr new file mode 100644 index 00000000000..8e6d967707a --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/b/b_child2.nr @@ -0,0 +1,2 @@ +#[crate::assert_run_order_function(7)] +pub fn foo() {} diff --git a/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/b/mod.nr b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/b/mod.nr new file mode 100644 index 00000000000..77df04e15a9 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/b/mod.nr @@ -0,0 +1,25 @@ +// Declare these in reverse order to ensure the declaration +// order here is respected +mod b_child2; + +#[crate::assert_run_order_module(12)] +mod b_child1; + +#[crate::assert_run_order_function(13)] +pub fn foo() {} + +#![crate::assert_run_order_module(14)] + +// Test inline submodules as well +#[crate::assert_run_order_module(15)] +mod b_child3 { + #![crate::assert_run_order_module(10)] + + #[crate::assert_run_order_function(11)] + pub fn foo() {} +} + +#[crate::assert_run_order_function(16)] +pub fn bar() {} + +#![crate::assert_run_order_module(17)] diff --git a/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/main.nr new file mode 100644 index 00000000000..9d5ba32b58e --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/attribute_order/src/main.nr @@ -0,0 +1,29 @@ +mod a; +mod b; + +#[assert_run_order_function(18)] +fn main() { + assert_eq(attributes_run, 19); +} + +comptime mut global attributes_run: Field = 0; + +pub comptime fn assert_run_order_function(_f: FunctionDefinition, order: Field) { + assert_eq(order, attributes_run); + attributes_run += 1; +} + +pub comptime fn assert_run_order_struct(_s: StructDefinition, order: Field) { + assert_eq(order, attributes_run); + attributes_run += 1; +} + +pub comptime fn assert_run_order_trait(_t: TraitDefinition, order: Field) { + assert_eq(order, attributes_run); + attributes_run += 1; +} + +pub comptime fn assert_run_order_module(_m: Module, order: Field) { + assert_eq(order, attributes_run); + attributes_run += 1; +} diff --git a/noir/noir-repo/test_programs/compile_success_empty/attribute_sees_result_of_previous_attribute/Nargo.toml b/noir/noir-repo/test_programs/compile_success_empty/attribute_sees_result_of_previous_attribute/Nargo.toml new file mode 100644 index 00000000000..ad71067a58f --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/attribute_sees_result_of_previous_attribute/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "attribute_sees_results_of_previous_attribute" +type = "bin" +authors = [""] +compiler_version = ">=0.34.0" + +[dependencies] diff --git a/noir/noir-repo/test_programs/compile_success_empty/attribute_sees_result_of_previous_attribute/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/attribute_sees_result_of_previous_attribute/src/main.nr new file mode 100644 index 00000000000..561c3e12e0c --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_empty/attribute_sees_result_of_previous_attribute/src/main.nr @@ -0,0 +1,11 @@ +#[derive(Eq)] +pub struct Foo {} + +#[check_eq_derived_for_foo] +fn main() {} + +comptime fn check_eq_derived_for_foo(_f: FunctionDefinition) { + let foo = quote {Foo}.as_type(); + let eq = quote {Eq}.as_trait_constraint(); + assert(foo.implements(eq)); +} diff --git a/noir/noir-repo/test_programs/compile_success_empty/comptime_fmt_strings/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/comptime_fmt_strings/src/main.nr index ca337c822d8..8cdd15aaa0e 100644 --- a/noir/noir-repo/test_programs/compile_success_empty/comptime_fmt_strings/src/main.nr +++ b/noir/noir-repo/test_programs/compile_success_empty/comptime_fmt_strings/src/main.nr @@ -6,7 +6,7 @@ fn main() { // Can't print these at compile-time here since printing to stdout while // compiling breaks the test runner. - let s1 = f"x is {x}, fake interpolation: \{y}, y is {y}"; + let s1 = f"x is {x}, fake interpolation: {{y}}, y is {y}"; let s2 = std::mem::zeroed::>(); (s1, s2) }; diff --git a/noir/noir-repo/test_programs/compile_success_empty/derive_impl/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/derive_impl/src/main.nr index db3f78a35c6..4396169235d 100644 --- a/noir/noir-repo/test_programs/compile_success_empty/derive_impl/src/main.nr +++ b/noir/noir-repo/test_programs/compile_success_empty/derive_impl/src/main.nr @@ -20,15 +20,17 @@ comptime fn derive_default(typ: StructDefinition) -> Quoted { } } +// Bar needs to be defined before Foo so that its impls are defined before +// Foo's are. +#[derive_default] +struct Bar {} + #[derive_default] struct Foo { x: Field, y: Bar, } -#[derive_default] -struct Bar {} - comptime fn make_field_exprs(fields: [(Quoted, Type)]) -> [Quoted] { let mut result = &[]; for my_field in fields { diff --git a/noir/noir-repo/test_programs/compile_success_empty/schnorr_simplification/src/main.nr b/noir/noir-repo/test_programs/compile_success_empty/schnorr_simplification/src/main.nr deleted file mode 100644 index 53b71fc3842..00000000000 --- a/noir/noir-repo/test_programs/compile_success_empty/schnorr_simplification/src/main.nr +++ /dev/null @@ -1,21 +0,0 @@ -use std::embedded_curve_ops::EmbeddedCurvePoint; - -// Note: If main has any unsized types, then the verifier will never be able -// to figure out the circuit instance -fn main() { - let message = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]; - let pub_key = EmbeddedCurvePoint { - x: 0x04b260954662e97f00cab9adb773a259097f7a274b83b113532bce27fa3fb96a, - y: 0x2fd51571db6c08666b0edfbfbc57d432068bccd0110a39b166ab243da0037197, - is_infinite: false, - }; - let signature = [ - 1, 13, 119, 112, 212, 39, 233, 41, 84, 235, 255, 93, 245, 172, 186, 83, 157, 253, 76, 77, - 33, 128, 178, 15, 214, 67, 105, 107, 177, 234, 77, 48, 27, 237, 155, 84, 39, 84, 247, 27, - 22, 8, 176, 230, 24, 115, 145, 220, 254, 122, 135, 179, 171, 4, 214, 202, 64, 199, 19, 84, - 239, 138, 124, 12, - ]; - - let valid_signature = std::schnorr::verify_signature(pub_key, signature, message); - assert(valid_signature); -} diff --git a/noir/noir-repo/test_programs/compile_success_with_bug/underconstrained_value_detector_5425/Nargo.toml b/noir/noir-repo/test_programs/compile_success_with_bug/underconstrained_value_detector_5425/Nargo.toml new file mode 100644 index 00000000000..48ab5a0390b --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_with_bug/underconstrained_value_detector_5425/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "underconstrained_value_detector_5425" +type = "bin" +authors = [""] +compiler_version = ">=0.36.0" + +[dependencies] \ No newline at end of file diff --git a/noir/noir-repo/test_programs/compile_success_with_bug/underconstrained_value_detector_5425/src/main.nr b/noir/noir-repo/test_programs/compile_success_with_bug/underconstrained_value_detector_5425/src/main.nr new file mode 100644 index 00000000000..1d9269eebd5 --- /dev/null +++ b/noir/noir-repo/test_programs/compile_success_with_bug/underconstrained_value_detector_5425/src/main.nr @@ -0,0 +1,48 @@ +unconstrained fn maximum_price(options: [u32; 3]) -> u32 { + let mut maximum_option = 0; + for option in options { + if (option > maximum_option) { + maximum_option = option; + } + } + maximum_option +} + +fn main(sandwiches: pub [u32; 3], drinks: pub [u32; 3], snacks: pub [u32; 3], best_value: u32) { + unsafe { + let meal_deal_cost: u32 = 390; + let most_expensive_sandwich = maximum_price(sandwiches); + let mut sandwich_exists = false; + for sandwich_price in sandwiches { + assert(sandwich_price <= most_expensive_sandwich); + sandwich_exists |= sandwich_price == most_expensive_sandwich; + } + + // maximum_price(sandwiches) is properly constrained with this assert, + // linking the result to the arguments + assert(sandwich_exists); + + let most_expensive_drink = maximum_price(drinks); + let mut drink_exists = false; + for drink_price in drinks { + assert(drink_price <= most_expensive_drink); + drink_exists |= drink_price == most_expensive_drink; + } + + // maximum_price(drinks) is properly constrained with this assert, + // linking the result to the arguments + assert(drink_exists); + + let most_expensive_snack = maximum_price(snacks); + // maximum_price(snacks)'s result isn't constrained against `snacks` + // in any way, triggering the missing Brillig constraint check + + assert( + best_value + == ( + most_expensive_sandwich + most_expensive_drink + most_expensive_snack + - meal_deal_cost + ), + ); + } +} diff --git a/noir/noir-repo/test_programs/execution_success/derive/src/main.nr b/noir/noir-repo/test_programs/execution_success/derive/src/main.nr index 6900aa6aead..f7d4f6b607a 100644 --- a/noir/noir-repo/test_programs/execution_success/derive/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/derive/src/main.nr @@ -25,6 +25,14 @@ comptime fn derive_do_nothing(s: StructDefinition) -> Quoted { // Test stdlib derive fns & multiple traits // - We can derive Ord and Hash even though std::cmp::Ordering and std::hash::Hasher aren't imported +// - We need to define MyOtherOtherStruct first since MyOtherStruct references it as a field and +// attributes are run in reading order. If it were defined afterward, the derived Eq impl for MyOtherStruct +// would error that MyOtherOtherStruct doesn't (yet) implement Eq. +#[derive(Eq, Default, Hash, Ord)] +struct MyOtherOtherStruct { + x: T, +} + #[derive(Eq, Default, Hash, Ord)] struct MyOtherStruct { field1: A, @@ -32,11 +40,6 @@ struct MyOtherStruct { field3: MyOtherOtherStruct, } -#[derive(Eq, Default, Hash, Ord)] -struct MyOtherOtherStruct { - x: T, -} - #[derive(Eq, Default, Hash, Ord)] struct EmptyStruct {} diff --git a/noir/noir-repo/test_programs/execution_success/diamond_deps_0/Prover.toml b/noir/noir-repo/test_programs/execution_success/diamond_deps_0/Prover.toml index a713241e7dd..2bad103177c 100644 --- a/noir/noir-repo/test_programs/execution_success/diamond_deps_0/Prover.toml +++ b/noir/noir-repo/test_programs/execution_success/diamond_deps_0/Prover.toml @@ -1,3 +1,3 @@ x = 1 y = 1 -return = 5 \ No newline at end of file +return = 7 diff --git a/noir/noir-repo/test_programs/execution_success/embedded_curve_ops/src/main.nr b/noir/noir-repo/test_programs/execution_success/embedded_curve_ops/src/main.nr index e69184b9c96..85cf60dc796 100644 --- a/noir/noir-repo/test_programs/execution_success/embedded_curve_ops/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/embedded_curve_ops/src/main.nr @@ -20,4 +20,22 @@ fn main(priv_key: Field, pub_x: pub Field, pub_y: pub Field) { // The results should be double the g1 point because the scalars are 1 and we pass in g1 twice assert(double.x == res.x); + + // Tests for #6549 + let const_scalar1 = std::embedded_curve_ops::EmbeddedCurveScalar { lo: 23, hi: 0 }; + let const_scalar2 = std::embedded_curve_ops::EmbeddedCurveScalar { lo: 0, hi: 23 }; + let const_scalar3 = std::embedded_curve_ops::EmbeddedCurveScalar { lo: 13, hi: 4 }; + let partial_mul = std::embedded_curve_ops::multi_scalar_mul( + [g1, double, pub_point, g1, g1], + [scalar, const_scalar1, scalar, const_scalar2, const_scalar3], + ); + assert(partial_mul.x == 0x2024c4eebfbc8a20018f8c95c7aab77c6f34f10cf785a6f04e97452d8708fda7); + // Check simplification by zero + let zero_point = std::embedded_curve_ops::EmbeddedCurvePoint { x: 0, y: 0, is_infinite: true }; + let const_zero = std::embedded_curve_ops::EmbeddedCurveScalar { lo: 0, hi: 0 }; + let partial_mul = std::embedded_curve_ops::multi_scalar_mul( + [zero_point, double, g1], + [scalar, const_zero, scalar], + ); + assert(partial_mul == g1); } diff --git a/noir/noir-repo/test_programs/execution_success/hashmap/src/main.nr b/noir/noir-repo/test_programs/execution_success/hashmap/src/main.nr index cfd4e4a9136..aab531ea559 100644 --- a/noir/noir-repo/test_programs/execution_success/hashmap/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/hashmap/src/main.nr @@ -104,10 +104,11 @@ fn test_insert_and_methods(input: [Entry; HASHMAP_LEN]) { hashmap.insert(entry.key, entry.value); } - assert(hashmap.len() == HASHMAP_LEN, "hashmap.len() does not match input lenght."); + assert(hashmap.len() == HASHMAP_LEN, "hashmap.len() does not match input length."); for entry in input { - assert(hashmap.contains_key(entry.key), f"Not found inserted key {entry.key}."); + let entry_key = entry.key; + assert(hashmap.contains_key(entry.key), f"Not found inserted key {entry_key}."); } hashmap.clear(); diff --git a/noir/noir-repo/test_programs/execution_success/schnorr/Nargo.toml b/noir/noir-repo/test_programs/execution_success/hint_black_box/Nargo.toml similarity index 69% rename from noir/noir-repo/test_programs/execution_success/schnorr/Nargo.toml rename to noir/noir-repo/test_programs/execution_success/hint_black_box/Nargo.toml index aa24a2f3caf..8a49ec25494 100644 --- a/noir/noir-repo/test_programs/execution_success/schnorr/Nargo.toml +++ b/noir/noir-repo/test_programs/execution_success/hint_black_box/Nargo.toml @@ -1,5 +1,5 @@ [package] -name = "schnorr" +name = "hint_black_box" type = "bin" authors = [""] diff --git a/noir/noir-repo/test_programs/execution_success/hint_black_box/Prover.toml b/noir/noir-repo/test_programs/execution_success/hint_black_box/Prover.toml new file mode 100644 index 00000000000..67dda9c2b68 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/hint_black_box/Prover.toml @@ -0,0 +1,3 @@ +# 5 * a = b +a = 10 +b = 50 diff --git a/noir/noir-repo/test_programs/execution_success/hint_black_box/src/main.nr b/noir/noir-repo/test_programs/execution_success/hint_black_box/src/main.nr new file mode 100644 index 00000000000..1109c54301f --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/hint_black_box/src/main.nr @@ -0,0 +1,90 @@ +use std::hint::black_box; + +fn main(a: u32, b: u32) { + // This version unrolls into a number of additions + assert_eq(loop(5, a), b); + // This version simplifies into a single `constraint 50 == b` + assert_eq(loop(5, 10), b); + // This version should not simplify down to a single constraint, + // it should treat 10 as opaque: + assert_eq(loop(5, black_box(10)), b); + + // Check array handling. + let arr = [a, a, a, a, a]; + + assert_eq(array_sum(arr), b); + assert_eq(array_sum(black_box(arr)), b); + + assert_eq(slice_sum(arr.as_slice()), b); + assert_eq(slice_sum(black_box(arr).as_slice()), b); + + // This doesn't work because by calling `black_box` on a slice the compiler + // loses track of the length, and then cannot unroll the loop for ACIR. + //assert_eq(slice_sum(black_box(arr.as_slice())), b); + + // But we can pass a blackboxed slice to Brillig. + let s = unsafe { brillig_slice_sum(black_box(arr.as_slice())) }; + assert_eq(s, b); + + let mut d = b; + // This gets completely eliminated: + let mut c = 0; + set_ref(&mut c, &mut d); + assert_eq(c, b); + + // This way the constraint is preserved: + let mut c = 0; + set_ref(&mut c, &mut black_box(d)); + assert_eq(c, b); + + // A reference over the output of black box is not the original variable: + let mut c = 0; + set_ref(&mut black_box(c), &mut d); + assert_eq(c, 0); + + // This would cause a causes a crash during SSA passes unless it's a Brillig runtime: + // > Could not resolve some references to the array. All references must be resolved at compile time + // The SSA cannot have Allocate by the time we start generating ACIR, but `black_box` prevents them + // from being optimised away during SSA passes. + // If we use `--force-brillig` then the it doesn't crash but the assertion fails because `mem2reg` + // eliminates the storing to the reference. + //let mut c = 0; + //set_ref(black_box(&mut c), black_box(&mut d)); + //assert_eq(c, b); +} + +fn loop(n: u32, k: u32) -> u32 { + let mut sum = 0; + for _ in 0..n { + sum = sum + k; + } + sum +} + +fn array_sum(xs: [u32; N]) -> u32 { + let mut sum = 0; + for i in 0..N { + sum = sum + xs[i]; + } + sum +} + +fn slice_sum(xs: [u32]) -> u32 { + let mut sum = 0; + for x in xs { + sum = sum + x; + } + sum +} + +unconstrained fn brillig_slice_sum(xs: [u32]) -> u32 { + let mut sum = 0; + for x in xs { + sum = sum + x; + } + sum +} + +fn set_ref(c: &mut u32, b: &mut u32) { + *c = *b; +} diff --git a/noir/noir-repo/test_programs/execution_success/reference_counts/src/main.nr b/noir/noir-repo/test_programs/execution_success/reference_counts/src/main.nr index 8de4d0f2508..68b9f2407b9 100644 --- a/noir/noir-repo/test_programs/execution_success/reference_counts/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/reference_counts/src/main.nr @@ -2,7 +2,7 @@ use std::mem::array_refcount; fn main() { let mut array = [0, 1, 2]; - assert_refcount(array, 1); + assert_refcount(array, 2); borrow(array, array_refcount(array)); borrow_mut(&mut array, array_refcount(array)); diff --git a/noir/noir-repo/test_programs/execution_success/regression_6451/src/main.nr b/noir/noir-repo/test_programs/execution_success/regression_6451/src/main.nr index fbee6956dfa..b13b6c25a7e 100644 --- a/noir/noir-repo/test_programs/execution_success/regression_6451/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/regression_6451/src/main.nr @@ -9,7 +9,7 @@ fn main(x: Field) { value += term2; value.assert_max_bit_size::<1>(); - // Regression test for Aztec Packages issue #6451 + // Regression test for #6447 (Aztec Packages issue #9488) let y = unsafe { empty(x + 1) }; let z = y + x + 1; let z1 = z + y; diff --git a/noir/noir-repo/test_programs/compile_success_empty/schnorr_simplification/Nargo.toml b/noir/noir-repo/test_programs/execution_success/regression_6674_1/Nargo.toml similarity index 62% rename from noir/noir-repo/test_programs/compile_success_empty/schnorr_simplification/Nargo.toml rename to noir/noir-repo/test_programs/execution_success/regression_6674_1/Nargo.toml index 599f06ac3d2..ad87f9deb46 100644 --- a/noir/noir-repo/test_programs/compile_success_empty/schnorr_simplification/Nargo.toml +++ b/noir/noir-repo/test_programs/execution_success/regression_6674_1/Nargo.toml @@ -1,5 +1,5 @@ [package] -name = "schnorr_simplification" +name = "regression_6674_1" type = "bin" authors = [""] diff --git a/noir/noir-repo/test_programs/execution_success/regression_6674_1/src/main.nr b/noir/noir-repo/test_programs/execution_success/regression_6674_1/src/main.nr new file mode 100644 index 00000000000..70315c16b78 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_6674_1/src/main.nr @@ -0,0 +1,85 @@ +use std::mem::zeroed; + +pub struct BoundedVec4 { + storage: [Field; 4], + len: u32, +} + +impl BoundedVec4 { + pub fn new() -> Self { + BoundedVec4 { storage: [0; 4], len: 0 } + } + + pub fn push(&mut self, elem: Field) { + self.storage[self.len] = elem; + self.len += 1; + } +} + +pub struct PrivateKernelCircuitPublicInputs { + pub l2_to_l1_msgs: [Field; 4], + pub public_call_requests: [Field; 4], +} + +pub struct FixtureBuilder { + pub public_call_requests: BoundedVec4, + pub counter: Field, +} + +impl FixtureBuilder { + pub fn new() -> Self { + FixtureBuilder { public_call_requests: zeroed(), counter: 0 } + } + + pub fn append_public_call_requests_inner(&mut self) { + self.public_call_requests.push(self.next_counter()); + } + + pub fn append_public_call_requests(&mut self) { + for _ in 0..4 { + // Note that here we push via a method call + self.append_public_call_requests_inner(); + } + } + + fn next_counter(&mut self) -> Field { + let counter = self.counter; + self.counter += 1; + counter + } +} + +pub struct PrivateKernelCircuitPublicInputsComposer { + pub l2_to_l1_msgs: [Field; 4], + pub public_call_requests: [Field; 4], +} + +pub unconstrained fn sort_by(array: [Field; 4]) -> [Field; 4] { + let result = array; + get_sorting_index(array); + result +} + +unconstrained fn get_sorting_index(array: [Field; 4]) { + let _ = [0; 4]; + let mut a = array; + for i in 1..4 { + for j in 0..i { + a[i] = a[j]; + } + } +} + +unconstrained fn main() { + let mut previous_kernel = FixtureBuilder::new(); + previous_kernel.append_public_call_requests(); + + let mut output_composer = PrivateKernelCircuitPublicInputsComposer { + l2_to_l1_msgs: [0; 4], + public_call_requests: previous_kernel.public_call_requests.storage, + }; + output_composer.l2_to_l1_msgs = sort_by(output_composer.l2_to_l1_msgs); + output_composer.public_call_requests = sort_by(output_composer.public_call_requests); + + assert_eq(previous_kernel.public_call_requests.storage[1], 1, "equality"); +} diff --git a/noir/noir-repo/test_programs/execution_success/regression_6674_2/Nargo.toml b/noir/noir-repo/test_programs/execution_success/regression_6674_2/Nargo.toml new file mode 100644 index 00000000000..666765c8172 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_6674_2/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "regression_6674_2" +type = "bin" +authors = [""] + +[dependencies] diff --git a/noir/noir-repo/test_programs/execution_success/regression_6674_2/src/main.nr b/noir/noir-repo/test_programs/execution_success/regression_6674_2/src/main.nr new file mode 100644 index 00000000000..42ad4fa4031 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_6674_2/src/main.nr @@ -0,0 +1,87 @@ +use std::mem::zeroed; + +pub struct BoundedVec4 { + storage: [Field; 4], + len: u32, +} + +impl BoundedVec4 { + pub fn new() -> Self { + BoundedVec4 { storage: [0; 4], len: 0 } + } + + pub fn push(&mut self, elem: Field) { + self.storage[self.len] = elem; + self.len += 1; + } +} + +pub struct PrivateKernelCircuitPublicInputs { + pub l2_to_l1_msgs: [Field; 4], + pub public_call_requests: [Field; 4], +} + +pub struct FixtureBuilder { + pub public_call_requests: BoundedVec4, + pub counter: Field, +} + +impl FixtureBuilder { + pub fn new() -> Self { + FixtureBuilder { public_call_requests: zeroed(), counter: 0 } + } + + pub fn append_public_call_requests(&mut self) { + for _ in 0..4 { + // Note that here we push directly, not through a method call + self.public_call_requests.push(self.next_counter()); + } + } + + fn next_counter(&mut self) -> Field { + let counter = self.counter; + self.counter += 1; + counter + } +} + +pub struct PrivateKernelCircuitPublicInputsComposer { + pub l2_to_l1_msgs: [Field; 4], + pub public_call_requests: [Field; 4], +} + +impl PrivateKernelCircuitPublicInputsComposer { + pub unconstrained fn sort_ordered_values(&mut self) { + self.l2_to_l1_msgs = sort_by(self.l2_to_l1_msgs); + self.public_call_requests = sort_by(self.public_call_requests); + } +} + +pub unconstrained fn sort_by(array: [Field; 4]) -> [Field; 4] { + let result = array; + get_sorting_index(array); + result +} + +unconstrained fn get_sorting_index(array: [Field; 4]) { + let _ = [0; 4]; + let mut a = array; + for i in 1..4 { + for j in 0..i { + a[i] = a[j]; + } + } +} + +unconstrained fn main() { + let mut previous_kernel = FixtureBuilder::new(); + previous_kernel.append_public_call_requests(); + + let mut output_composer = PrivateKernelCircuitPublicInputsComposer { + l2_to_l1_msgs: [0; 4], + public_call_requests: previous_kernel.public_call_requests.storage, + }; + output_composer.sort_ordered_values(); + + assert_eq(previous_kernel.public_call_requests.storage[1], 1, "equality"); +} diff --git a/noir/noir-repo/test_programs/execution_success/regression_6674_3/Nargo.toml b/noir/noir-repo/test_programs/execution_success/regression_6674_3/Nargo.toml new file mode 100644 index 00000000000..7b396f63693 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_6674_3/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "regression_6674_3" +type = "bin" +authors = [""] + +[dependencies] diff --git a/noir/noir-repo/test_programs/execution_success/regression_6674_3/src/main.nr b/noir/noir-repo/test_programs/execution_success/regression_6674_3/src/main.nr new file mode 100644 index 00000000000..2c87a4c679c --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_6674_3/src/main.nr @@ -0,0 +1,191 @@ +use std::mem::zeroed; + +pub struct PrivateAccumulatedData { + pub public_call_requests: [Counted; 4], +} + +pub struct PrivateAccumulatedDataBuilder { + pub l2_to_l1_msgs: BoundedVec, + pub public_call_requests: BoundedVec, 4>, + pub private_call_stack: BoundedVec, +} + +impl PrivateAccumulatedDataBuilder { + pub fn finish(self) -> PrivateAccumulatedData { + PrivateAccumulatedData { public_call_requests: self.public_call_requests.storage() } + } +} + +pub struct Counted { + pub inner: T, + pub counter: u32, +} + +impl Counted { + pub fn new(inner: T, counter: u32) -> Self { + Self { inner, counter } + } +} + +pub struct PrivateKernelCircuitPublicInputs { + pub end: PrivateAccumulatedData, +} + +pub struct PrivateKernelData { + pub public_inputs: PrivateKernelCircuitPublicInputs, +} + +pub struct FixtureBuilder2 { + pub public_teardown_call_request: Field, + pub private_call_requests: BoundedVec, + pub public_call_requests: BoundedVec, 4>, + pub counter: u32, +} + +impl FixtureBuilder2 { + pub fn new() -> Self { + let mut builder: FixtureBuilder2 = zeroed(); + builder.counter = 1; + builder + } + + pub fn to_private_accumulated_data_builder(self) -> PrivateAccumulatedDataBuilder { + PrivateAccumulatedDataBuilder { + l2_to_l1_msgs: zeroed(), + public_call_requests: self.public_call_requests, + private_call_stack: vec_reverse(self.private_call_requests), + } + } + + pub fn to_private_accumulated_data(self) -> PrivateAccumulatedData { + self.to_private_accumulated_data_builder().finish() + } + + pub fn to_private_kernel_circuit_public_inputs(self) -> PrivateKernelCircuitPublicInputs { + PrivateKernelCircuitPublicInputs { end: self.to_private_accumulated_data() } + } + + pub fn to_private_kernel_data(self) -> PrivateKernelData { + let public_inputs = + PrivateKernelCircuitPublicInputs { end: self.to_private_accumulated_data() }; + PrivateKernelData { public_inputs } + } + + pub fn add_public_call_request(&mut self) { + self.public_call_requests.push(Counted::new(zeroed(), self.next_counter())); + } + + pub fn append_public_call_requests(&mut self, num: u32) { + for _ in 0..num { + self.add_public_call_request(); + } + } + + pub fn set_public_teardown_call_request(&mut self) { + let mut fields = [0; 5]; + for i in 0..5 { + fields[i] = i as Field; + } + + self.public_teardown_call_request = zeroed(); + } + + fn next_counter(&mut self) -> u32 { + let counter = self.counter; + self.counter += 1; + counter + } +} + +struct PrivateKernelTailToPublicInputsBuilder { + previous_kernel: FixtureBuilder2, +} + +impl PrivateKernelTailToPublicInputsBuilder { + pub unconstrained fn execute(&mut self) { + let kernel = PrivateKernelTailToPublicCircuitPrivateInputs { + previous_kernel: self.previous_kernel.to_private_kernel_data(), + }; + let mut output_composer = PrivateKernelCircuitPublicInputsComposer::new_from_previous_kernel( + kernel.previous_kernel.public_inputs, + ); + output_composer.sort_ordered_values(); + } +} + +pub struct PrivateKernelTailToPublicCircuitPrivateInputs { + previous_kernel: PrivateKernelData, +} + +pub struct PrivateKernelCircuitPublicInputsComposer { + public_inputs: PrivateKernelCircuitPublicInputsBuilder, +} + +impl PrivateKernelCircuitPublicInputsComposer { + pub unconstrained fn sort_ordered_values(&mut self) { + // Note hashes, nullifiers, and private logs are sorted in the reset circuit. + self.public_inputs.end.l2_to_l1_msgs.storage = + sort_by_counter_desc(self.public_inputs.end.l2_to_l1_msgs.storage); + self.public_inputs.end.public_call_requests.storage = + sort_by_counter_desc(self.public_inputs.end.public_call_requests.storage); + } +} + +impl PrivateKernelCircuitPublicInputsComposer { + pub fn new_from_previous_kernel( + previous_kernel_public_inputs: PrivateKernelCircuitPublicInputs, + ) -> Self { + let mut public_inputs: PrivateKernelCircuitPublicInputsBuilder = zeroed(); + let start = previous_kernel_public_inputs.end; + public_inputs.end.public_call_requests = BoundedVec { + storage: start.public_call_requests, + len: start.public_call_requests.len(), + }; + PrivateKernelCircuitPublicInputsComposer { public_inputs } + } +} + +pub struct PrivateKernelCircuitPublicInputsBuilder { + end: PrivateAccumulatedDataBuilder, +} + +fn vec_reverse(vec: BoundedVec) -> BoundedVec { + let mut reversed = BoundedVec::new(); + let len = vec.len(); + for i in 0..N { + if i < len { + reversed.push(vec.get_unchecked(len - i - 1)); + } + } + reversed +} + +pub unconstrained fn sort_by_counter_desc(array: [T; N]) -> [T; N] { + sort_by(array) +} + +pub unconstrained fn sort_by(array: [T; N]) -> [T; N] { + let mut result = array; + unsafe { get_sorting_index(array) }; + result +} + +unconstrained fn get_sorting_index(array: [T; N]) { + let _ = [0; 4]; + let mut a = array; + for i in 1..4 { + for j in 0..i { + a[i] = a[j]; + } + } +} + +unconstrained fn main() { + let mut previous_kernel = FixtureBuilder2::new(); + let mut builder = PrivateKernelTailToPublicInputsBuilder { previous_kernel }; + builder.previous_kernel.append_public_call_requests(4); + assert_eq(builder.previous_kernel.public_call_requests.storage[3].counter, 4); + builder.previous_kernel.set_public_teardown_call_request(); + builder.execute(); + assert_eq(builder.previous_kernel.public_call_requests.storage[3].counter, 4); +} diff --git a/noir/noir-repo/test_programs/execution_success/regression_6734/Nargo.toml b/noir/noir-repo/test_programs/execution_success/regression_6734/Nargo.toml new file mode 100644 index 00000000000..4c757bab454 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_6734/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "regression_6734" +type = "bin" +authors = [""] + +[dependencies] diff --git a/noir/noir-repo/test_programs/execution_success/regression_6734/src/main.nr b/noir/noir-repo/test_programs/execution_success/regression_6734/src/main.nr new file mode 100644 index 00000000000..87cac64aaa1 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/regression_6734/src/main.nr @@ -0,0 +1,24 @@ +// https://github.com/noir-lang/noir/issues/6734 + +pub fn sub_array_extended( + src: [Field; SRC_LEN], + offset: u32, +) -> [Field; DST_LEN] { + let available_elements_to_copy = SRC_LEN - offset; + let elements_to_copy = if DST_LEN > available_elements_to_copy { + available_elements_to_copy + } else { + DST_LEN + }; + + let mut dst: [Field; DST_LEN] = std::mem::zeroed(); + for i in 0..elements_to_copy { + dst[i] = src[i + offset]; + } + + dst +} + +unconstrained fn main() { + assert_eq(sub_array_extended([], 0), []); +} diff --git a/noir/noir-repo/test_programs/execution_success/return_twice/Nargo.toml b/noir/noir-repo/test_programs/execution_success/return_twice/Nargo.toml new file mode 100644 index 00000000000..defd68f37b8 --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/return_twice/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "return_twice" +version = "0.1.0" +type = "bin" +authors = [""] + +[dependencies] diff --git a/noir/noir-repo/test_programs/execution_success/return_twice/Prover.toml b/noir/noir-repo/test_programs/execution_success/return_twice/Prover.toml new file mode 100644 index 00000000000..b343065a7fc --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/return_twice/Prover.toml @@ -0,0 +1,2 @@ +return = [100, 100] +in0 = 10 diff --git a/noir/noir-repo/test_programs/execution_success/return_twice/src/main.nr b/noir/noir-repo/test_programs/execution_success/return_twice/src/main.nr new file mode 100644 index 00000000000..68bd5f916ce --- /dev/null +++ b/noir/noir-repo/test_programs/execution_success/return_twice/src/main.nr @@ -0,0 +1,5 @@ +pub fn main(in0: Field) -> pub (Field, Field) { + let out0 = (in0 * in0); + let out1 = (in0 * in0); + (out0, out1) +} diff --git a/noir/noir-repo/test_programs/execution_success/schnorr/Prover.toml b/noir/noir-repo/test_programs/execution_success/schnorr/Prover.toml deleted file mode 100644 index 2faf2018e07..00000000000 --- a/noir/noir-repo/test_programs/execution_success/schnorr/Prover.toml +++ /dev/null @@ -1,70 +0,0 @@ -message = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] -message_field = "0x010203040506070809" -pub_key_x = "0x04b260954662e97f00cab9adb773a259097f7a274b83b113532bce27fa3fb96a" -pub_key_y = "0x2fd51571db6c08666b0edfbfbc57d432068bccd0110a39b166ab243da0037197" -signature = [ - 1, - 13, - 119, - 112, - 212, - 39, - 233, - 41, - 84, - 235, - 255, - 93, - 245, - 172, - 186, - 83, - 157, - 253, - 76, - 77, - 33, - 128, - 178, - 15, - 214, - 67, - 105, - 107, - 177, - 234, - 77, - 48, - 27, - 237, - 155, - 84, - 39, - 84, - 247, - 27, - 22, - 8, - 176, - 230, - 24, - 115, - 145, - 220, - 254, - 122, - 135, - 179, - 171, - 4, - 214, - 202, - 64, - 199, - 19, - 84, - 239, - 138, - 124, - 12, -] diff --git a/noir/noir-repo/test_programs/execution_success/schnorr/src/main.nr b/noir/noir-repo/test_programs/execution_success/schnorr/src/main.nr deleted file mode 100644 index ab3c65372c5..00000000000 --- a/noir/noir-repo/test_programs/execution_success/schnorr/src/main.nr +++ /dev/null @@ -1,24 +0,0 @@ -use std::embedded_curve_ops; - -// Note: If main has any unsized types, then the verifier will never be able -// to figure out the circuit instance -fn main( - message: [u8; 10], - message_field: Field, - pub_key_x: Field, - pub_key_y: Field, - signature: [u8; 64], -) { - // Regression for issue #2421 - // We want to make sure that we can accurately verify a signature whose message is a slice vs. an array - let message_field_bytes: [u8; 10] = message_field.to_be_bytes(); - - // Check that passing an array as the message is valid - let pub_key = - embedded_curve_ops::EmbeddedCurvePoint { x: pub_key_x, y: pub_key_y, is_infinite: false }; - let valid_signature = std::schnorr::verify_signature(pub_key, signature, message_field_bytes); - assert(valid_signature); - let valid_signature = std::schnorr::verify_signature(pub_key, signature, message); - assert(valid_signature); - std::schnorr::assert_valid_signature(pub_key, signature, message); -} diff --git a/noir/noir-repo/test_programs/execution_success/uhashmap/src/main.nr b/noir/noir-repo/test_programs/execution_success/uhashmap/src/main.nr index b56a4fe1747..689ba9d4a04 100644 --- a/noir/noir-repo/test_programs/execution_success/uhashmap/src/main.nr +++ b/noir/noir-repo/test_programs/execution_success/uhashmap/src/main.nr @@ -104,7 +104,8 @@ unconstrained fn test_insert_and_methods(input: [Entry; HASHMAP_LEN]) { assert(hashmap.len() == HASHMAP_LEN, "hashmap.len() does not match input length."); for entry in input { - assert(hashmap.contains_key(entry.key), f"Not found inserted key {entry.key}."); + let entry_key = entry.key; + assert(hashmap.contains_key(entry.key), f"Not found inserted key {entry_key}."); } hashmap.clear(); diff --git a/noir/noir-repo/test_programs/gates_report_brillig_execution.sh b/noir/noir-repo/test_programs/gates_report_brillig_execution.sh index 024c7612541..b3f4a8bda98 100755 --- a/noir/noir-repo/test_programs/gates_report_brillig_execution.sh +++ b/noir/noir-repo/test_programs/gates_report_brillig_execution.sh @@ -8,8 +8,6 @@ excluded_dirs=( "double_verify_nested_proof" "overlapping_dep_and_mod" "comptime_println" - # Takes a very long time to execute as large loops do not get simplified. - "regression_4709" # bit sizes for bigint operation doesn't match up. "bigint" # Expected to fail as test asserts on which runtime it is in. diff --git a/noir/noir-repo/test_programs/memory_report.sh b/noir/noir-repo/test_programs/memory_report.sh index 1b8274b76cc..4d03726d374 100755 --- a/noir/noir-repo/test_programs/memory_report.sh +++ b/noir/noir-repo/test_programs/memory_report.sh @@ -5,22 +5,24 @@ sudo apt-get install heaptrack NARGO="nargo" - # Tests to be profiled for memory report tests_to_profile=("keccak256" "workspace" "regression_4709" "ram_blowup_regression") current_dir=$(pwd) -execution_success_path="$current_dir/execution_success" -test_dirs=$(ls $execution_success_path) +base_path="$current_dir/execution_success" + +# If there is an argument that means we want to generate a report for only the current directory +if [ "$#" -ne 0 ]; then + base_path="$current_dir" + tests_to_profile=(".") +fi FIRST="1" echo "{\"memory_reports\": [ " > memory_report.json - for test_name in ${tests_to_profile[@]}; do - full_path=$execution_success_path"/"$test_name - cd $full_path + cd $base_path/$test_name if [ $FIRST = "1" ] then @@ -28,6 +30,11 @@ for test_name in ${tests_to_profile[@]}; do else echo " ," >> $current_dir"/memory_report.json" fi + + if [ "$#" -ne 0 ]; then + test_name=$(basename $current_dir) + fi + heaptrack --output $current_dir/$test_name"_heap" $NARGO compile --force if test -f $current_dir/$test_name"_heap.gz"; then diff --git a/noir/noir-repo/test_programs/noir_test_success/assert_message/Nargo.toml b/noir/noir-repo/test_programs/noir_test_success/assert_message/Nargo.toml new file mode 100644 index 00000000000..667035339bd --- /dev/null +++ b/noir/noir-repo/test_programs/noir_test_success/assert_message/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "assert_message" +type = "bin" +authors = [""] +compiler_version = ">=0.23.0" + +[dependencies] diff --git a/noir/noir-repo/test_programs/noir_test_success/assert_message/src/main.nr b/noir/noir-repo/test_programs/noir_test_success/assert_message/src/main.nr new file mode 100644 index 00000000000..073e42daf4a --- /dev/null +++ b/noir/noir-repo/test_programs/noir_test_success/assert_message/src/main.nr @@ -0,0 +1,15 @@ +// Have to use inputs otherwise the ACIR gets optimized away due to constants. +// With the original ACIR the optimizations can rearrange or merge opcodes, +// which might end up getting out of sync with assertion messages. +#[test(should_fail_with = "first")] +fn test_assert_message_preserved_during_optimization(a: Field, b: Field, c: Field) { + if (a + b) * (a - b) != c { + assert((a + b) * (a - b) == c, "first"); + assert((a - b) * (a + b) == c, "second"); + assert((a + b) * (a - b) == c, "third"); + assert((2 * a + b) * (a - b / 2) == c * c, "fourth"); + } else { + // The fuzzer might have generated a passing test. + assert((a + b) * (a - b) != c, "first"); + } +} diff --git a/noir/noir-repo/test_programs/noir_test_success/fuzzer_checks/src/main.nr b/noir/noir-repo/test_programs/noir_test_success/fuzzer_checks/src/main.nr index 2b928db092e..70e5e29b855 100644 --- a/noir/noir-repo/test_programs/noir_test_success/fuzzer_checks/src/main.nr +++ b/noir/noir-repo/test_programs/noir_test_success/fuzzer_checks/src/main.nr @@ -1,6 +1,9 @@ - #[test(should_fail_with = "42 is not allowed")] fn finds_magic_value(x: u32) { let x = x as u64; - assert(2 * x != 42, "42 is not allowed"); + if x == 21 { + assert(2 * x != 42, "42 is not allowed"); + } else { + assert(2 * x == 42, "42 is not allowed"); + } } diff --git a/noir/noir-repo/tooling/acvm_cli/src/cli/execute_cmd.rs b/noir/noir-repo/tooling/acvm_cli/src/cli/execute_cmd.rs index bf5969718e5..d4473eb3eef 100644 --- a/noir/noir-repo/tooling/acvm_cli/src/cli/execute_cmd.rs +++ b/noir/noir-repo/tooling/acvm_cli/src/cli/execute_cmd.rs @@ -5,6 +5,7 @@ use acir::native_types::{WitnessMap, WitnessStack}; use acir::FieldElement; use bn254_blackbox_solver::Bn254BlackBoxSolver; use clap::Args; +use nargo::PrintOutput; use crate::cli::fs::inputs::{read_bytecode_from_file, read_inputs_from_file}; use crate::errors::CliError; @@ -73,7 +74,7 @@ pub(crate) fn execute_program_from_witness( &program, inputs_map, &Bn254BlackBoxSolver, - &mut DefaultForeignCallExecutor::new(true, None, None, None), + &mut DefaultForeignCallExecutor::new(PrintOutput::Stdout, None, None, None), ) .map_err(CliError::CircuitExecutionError) } diff --git a/noir/noir-repo/tooling/debugger/src/context.rs b/noir/noir-repo/tooling/debugger/src/context.rs index bec30976552..77c186fe707 100644 --- a/noir/noir-repo/tooling/debugger/src/context.rs +++ b/noir/noir-repo/tooling/debugger/src/context.rs @@ -967,6 +967,7 @@ mod tests { BinaryFieldOp, HeapValueType, MemoryAddress, Opcode as BrilligOpcode, ValueOrArray, }, }; + use nargo::PrintOutput; #[test] fn test_resolve_foreign_calls_stepping_into_brillig() { @@ -1025,8 +1026,10 @@ mod tests { let initial_witness = BTreeMap::from([(Witness(1), fe_1)]).into(); - let foreign_call_executor = - Box::new(DefaultDebugForeignCallExecutor::from_artifact(true, debug_artifact)); + let foreign_call_executor = Box::new(DefaultDebugForeignCallExecutor::from_artifact( + PrintOutput::Stdout, + debug_artifact, + )); let mut context = DebugContext::new( &StubbedBlackBoxSolver, circuits, @@ -1190,8 +1193,10 @@ mod tests { let initial_witness = BTreeMap::from([(Witness(1), fe_1), (Witness(2), fe_1)]).into(); - let foreign_call_executor = - Box::new(DefaultDebugForeignCallExecutor::from_artifact(true, debug_artifact)); + let foreign_call_executor = Box::new(DefaultDebugForeignCallExecutor::from_artifact( + PrintOutput::Stdout, + debug_artifact, + )); let brillig_funcs = &vec![brillig_bytecode]; let mut context = DebugContext::new( &StubbedBlackBoxSolver, @@ -1285,7 +1290,7 @@ mod tests { &circuits, &debug_artifact, WitnessMap::new(), - Box::new(DefaultDebugForeignCallExecutor::new(true)), + Box::new(DefaultDebugForeignCallExecutor::new(PrintOutput::Stdout)), brillig_funcs, ); diff --git a/noir/noir-repo/tooling/debugger/src/dap.rs b/noir/noir-repo/tooling/debugger/src/dap.rs index cfe33a61cb5..0d2095954e0 100644 --- a/noir/noir-repo/tooling/debugger/src/dap.rs +++ b/noir/noir-repo/tooling/debugger/src/dap.rs @@ -5,6 +5,7 @@ use acvm::acir::circuit::brillig::BrilligBytecode; use acvm::acir::circuit::Circuit; use acvm::acir::native_types::WitnessMap; use acvm::{BlackBoxFunctionSolver, FieldElement}; +use nargo::PrintOutput; use crate::context::DebugContext; use crate::context::{DebugCommandResult, DebugLocation}; @@ -71,7 +72,10 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession< circuits, debug_artifact, initial_witness, - Box::new(DefaultDebugForeignCallExecutor::from_artifact(true, debug_artifact)), + Box::new(DefaultDebugForeignCallExecutor::from_artifact( + PrintOutput::Stdout, + debug_artifact, + )), unconstrained_functions, ); Self { diff --git a/noir/noir-repo/tooling/debugger/src/foreign_calls.rs b/noir/noir-repo/tooling/debugger/src/foreign_calls.rs index ecf27a22f29..899ba892d8f 100644 --- a/noir/noir-repo/tooling/debugger/src/foreign_calls.rs +++ b/noir/noir-repo/tooling/debugger/src/foreign_calls.rs @@ -3,7 +3,10 @@ use acvm::{ pwg::ForeignCallWaitInfo, AcirField, FieldElement, }; -use nargo::foreign_calls::{DefaultForeignCallExecutor, ForeignCallExecutor}; +use nargo::{ + foreign_calls::{DefaultForeignCallExecutor, ForeignCallExecutor}, + PrintOutput, +}; use noirc_artifacts::debug::{DebugArtifact, DebugVars, StackFrame}; use noirc_errors::debug_info::{DebugFnId, DebugVarId}; use noirc_printable_type::ForeignCallError; @@ -41,21 +44,21 @@ pub trait DebugForeignCallExecutor: ForeignCallExecutor { fn current_stack_frame(&self) -> Option>; } -pub struct DefaultDebugForeignCallExecutor { - executor: DefaultForeignCallExecutor, +pub struct DefaultDebugForeignCallExecutor<'a> { + executor: DefaultForeignCallExecutor<'a, FieldElement>, pub debug_vars: DebugVars, } -impl DefaultDebugForeignCallExecutor { - pub fn new(show_output: bool) -> Self { +impl<'a> DefaultDebugForeignCallExecutor<'a> { + pub fn new(output: PrintOutput<'a>) -> Self { Self { - executor: DefaultForeignCallExecutor::new(show_output, None, None, None), + executor: DefaultForeignCallExecutor::new(output, None, None, None), debug_vars: DebugVars::default(), } } - pub fn from_artifact(show_output: bool, artifact: &DebugArtifact) -> Self { - let mut ex = Self::new(show_output); + pub fn from_artifact(output: PrintOutput<'a>, artifact: &DebugArtifact) -> Self { + let mut ex = Self::new(output); ex.load_artifact(artifact); ex } @@ -70,7 +73,7 @@ impl DefaultDebugForeignCallExecutor { } } -impl DebugForeignCallExecutor for DefaultDebugForeignCallExecutor { +impl DebugForeignCallExecutor for DefaultDebugForeignCallExecutor<'_> { fn get_variables(&self) -> Vec> { self.debug_vars.get_variables() } @@ -88,7 +91,7 @@ fn debug_fn_id(value: &FieldElement) -> DebugFnId { DebugFnId(value.to_u128() as u32) } -impl ForeignCallExecutor for DefaultDebugForeignCallExecutor { +impl ForeignCallExecutor for DefaultDebugForeignCallExecutor<'_> { fn execute( &mut self, foreign_call: &ForeignCallWaitInfo, diff --git a/noir/noir-repo/tooling/debugger/src/repl.rs b/noir/noir-repo/tooling/debugger/src/repl.rs index 486e84060f0..eda3cbfd895 100644 --- a/noir/noir-repo/tooling/debugger/src/repl.rs +++ b/noir/noir-repo/tooling/debugger/src/repl.rs @@ -8,7 +8,7 @@ use acvm::brillig_vm::brillig::Opcode as BrilligOpcode; use acvm::brillig_vm::MemoryValue; use acvm::AcirField; use acvm::{BlackBoxFunctionSolver, FieldElement}; -use nargo::NargoError; +use nargo::{NargoError, PrintOutput}; use noirc_driver::CompiledProgram; use crate::foreign_calls::DefaultDebugForeignCallExecutor; @@ -42,8 +42,10 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { initial_witness: WitnessMap, unconstrained_functions: &'a [BrilligBytecode], ) -> Self { - let foreign_call_executor = - Box::new(DefaultDebugForeignCallExecutor::from_artifact(true, debug_artifact)); + let foreign_call_executor = Box::new(DefaultDebugForeignCallExecutor::from_artifact( + PrintOutput::Stdout, + debug_artifact, + )); let context = DebugContext::new( blackbox_solver, circuits, @@ -313,8 +315,10 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { fn restart_session(&mut self) { let breakpoints: Vec = self.context.iterate_breakpoints().copied().collect(); - let foreign_call_executor = - Box::new(DefaultDebugForeignCallExecutor::from_artifact(true, self.debug_artifact)); + let foreign_call_executor = Box::new(DefaultDebugForeignCallExecutor::from_artifact( + PrintOutput::Stdout, + self.debug_artifact, + )); self.context = DebugContext::new( self.blackbox_solver, self.circuits, diff --git a/noir/noir-repo/tooling/lsp/src/requests/test_run.rs b/noir/noir-repo/tooling/lsp/src/requests/test_run.rs index 937fdcc0a5e..72ae6695b82 100644 --- a/noir/noir-repo/tooling/lsp/src/requests/test_run.rs +++ b/noir/noir-repo/tooling/lsp/src/requests/test_run.rs @@ -2,7 +2,10 @@ use std::future::{self, Future}; use crate::insert_all_files_for_workspace_into_file_manager; use async_lsp::{ErrorCode, ResponseError}; -use nargo::ops::{run_test, TestStatus}; +use nargo::{ + ops::{run_test, TestStatus}, + PrintOutput, +}; use nargo_toml::{find_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_driver::{check_crate, CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::hir::FunctionNameMatch; @@ -84,7 +87,7 @@ fn on_test_run_request_inner( &state.solver, &mut context, &test_function, - true, + PrintOutput::Stdout, None, Some(workspace.root_dir.clone()), Some(package.name.to_string()), diff --git a/noir/noir-repo/tooling/nargo/src/foreign_calls/mod.rs b/noir/noir-repo/tooling/nargo/src/foreign_calls/mod.rs index 16ed71e11e3..65ff051bcbf 100644 --- a/noir/noir-repo/tooling/nargo/src/foreign_calls/mod.rs +++ b/noir/noir-repo/tooling/nargo/src/foreign_calls/mod.rs @@ -3,7 +3,7 @@ use std::path::PathBuf; use acvm::{acir::brillig::ForeignCallResult, pwg::ForeignCallWaitInfo, AcirField}; use mocker::MockForeignCallExecutor; use noirc_printable_type::ForeignCallError; -use print::PrintForeignCallExecutor; +use print::{PrintForeignCallExecutor, PrintOutput}; use rand::Rng; use rpc::RPCForeignCallExecutor; use serde::{Deserialize, Serialize}; @@ -65,22 +65,22 @@ impl ForeignCall { } #[derive(Debug, Default)] -pub struct DefaultForeignCallExecutor { +pub struct DefaultForeignCallExecutor<'a, F> { /// The executor for any [`ForeignCall::Print`] calls. - printer: Option, + printer: PrintForeignCallExecutor<'a>, mocker: MockForeignCallExecutor, external: Option, } -impl DefaultForeignCallExecutor { +impl<'a, F: Default> DefaultForeignCallExecutor<'a, F> { pub fn new( - show_output: bool, + output: PrintOutput<'a>, resolver_url: Option<&str>, root_path: Option, package_name: Option, ) -> Self { let id = rand::thread_rng().gen(); - let printer = if show_output { Some(PrintForeignCallExecutor) } else { None }; + let printer = PrintForeignCallExecutor { output }; let external_resolver = resolver_url.map(|resolver_url| { RPCForeignCallExecutor::new(resolver_url, id, root_path, package_name) }); @@ -92,8 +92,8 @@ impl DefaultForeignCallExecutor { } } -impl Deserialize<'a>> ForeignCallExecutor - for DefaultForeignCallExecutor +impl<'a, F: AcirField + Serialize + for<'b> Deserialize<'b>> ForeignCallExecutor + for DefaultForeignCallExecutor<'a, F> { fn execute( &mut self, @@ -101,13 +101,7 @@ impl Deserialize<'a>> ForeignCallExecutor ) -> Result, ForeignCallError> { let foreign_call_name = foreign_call.function.as_str(); match ForeignCall::lookup(foreign_call_name) { - Some(ForeignCall::Print) => { - if let Some(printer) = &mut self.printer { - printer.execute(foreign_call) - } else { - Ok(ForeignCallResult::default()) - } - } + Some(ForeignCall::Print) => self.printer.execute(foreign_call), Some( ForeignCall::CreateMock | ForeignCall::SetMockParams diff --git a/noir/noir-repo/tooling/nargo/src/foreign_calls/print.rs b/noir/noir-repo/tooling/nargo/src/foreign_calls/print.rs index 92fcd65ae28..8b2b5efd8b6 100644 --- a/noir/noir-repo/tooling/nargo/src/foreign_calls/print.rs +++ b/noir/noir-repo/tooling/nargo/src/foreign_calls/print.rs @@ -4,9 +4,19 @@ use noirc_printable_type::{ForeignCallError, PrintableValueDisplay}; use super::{ForeignCall, ForeignCallExecutor}; #[derive(Debug, Default)] -pub(crate) struct PrintForeignCallExecutor; +pub enum PrintOutput<'a> { + #[default] + None, + Stdout, + String(&'a mut String), +} + +#[derive(Debug, Default)] +pub(crate) struct PrintForeignCallExecutor<'a> { + pub(crate) output: PrintOutput<'a>, +} -impl ForeignCallExecutor for PrintForeignCallExecutor { +impl ForeignCallExecutor for PrintForeignCallExecutor<'_> { fn execute( &mut self, foreign_call: &ForeignCallWaitInfo, @@ -26,7 +36,13 @@ impl ForeignCallExecutor for PrintForeignCallExecutor { let display_string = format!("{display_values}{}", if skip_newline { "" } else { "\n" }); - print!("{display_string}"); + match &mut self.output { + PrintOutput::None => (), + PrintOutput::Stdout => print!("{display_string}"), + PrintOutput::String(string) => { + string.push_str(&display_string); + } + } Ok(ForeignCallResult::default()) } diff --git a/noir/noir-repo/tooling/nargo/src/lib.rs b/noir/noir-repo/tooling/nargo/src/lib.rs index 74b7f54d860..ee7b2e4809a 100644 --- a/noir/noir-repo/tooling/nargo/src/lib.rs +++ b/noir/noir-repo/tooling/nargo/src/lib.rs @@ -30,6 +30,7 @@ use rayon::prelude::*; use walkdir::WalkDir; pub use self::errors::NargoError; +pub use self::foreign_calls::print::PrintOutput; pub fn prepare_dependencies( context: &mut Context, diff --git a/noir/noir-repo/tooling/nargo/src/ops/test.rs b/noir/noir-repo/tooling/nargo/src/ops/test.rs index e258627b522..1306150518d 100644 --- a/noir/noir-repo/tooling/nargo/src/ops/test.rs +++ b/noir/noir-repo/tooling/nargo/src/ops/test.rs @@ -9,7 +9,7 @@ use acvm::{ AcirField, BlackBoxFunctionSolver, FieldElement, }; use noirc_abi::Abi; -use noirc_driver::{compile_no_check, CompileError, CompileOptions}; +use noirc_driver::{compile_no_check, CompileError, CompileOptions, DEFAULT_EXPRESSION_WIDTH}; use noirc_errors::{debug_info::DebugInfo, FileDiagnostic}; use noirc_frontend::hir::{def_map::TestFunction, Context}; use noirc_printable_type::ForeignCallError; @@ -19,14 +19,17 @@ use serde::{Deserialize, Serialize}; use crate::{ errors::try_to_diagnose_runtime_error, foreign_calls::{ - mocker::MockForeignCallExecutor, print::PrintForeignCallExecutor, - rpc::RPCForeignCallExecutor, ForeignCall, ForeignCallExecutor, + mocker::MockForeignCallExecutor, + print::{PrintForeignCallExecutor, PrintOutput}, + rpc::RPCForeignCallExecutor, + ForeignCall, ForeignCallExecutor, }, NargoError, }; use super::execute_program; +#[derive(Debug)] pub enum TestStatus { Pass, Fail { message: String, error_diagnostic: Option }, @@ -45,7 +48,7 @@ pub fn run_test>( blackbox_solver: &B, context: &mut Context, test_function: &TestFunction, - show_output: bool, + output: PrintOutput<'_>, foreign_call_resolver_url: Option<&str>, root_path: Option, package_name: Option, @@ -60,11 +63,15 @@ pub fn run_test>( match compile_no_check(context, config, test_function.get_id(), None, false) { Ok(compiled_program) => { + // Do the same optimizations as `compile_cmd`. + let target_width = config.expression_width.unwrap_or(DEFAULT_EXPRESSION_WIDTH); + let compiled_program = crate::ops::transform_program(compiled_program, target_width); + if test_function_has_no_arguments { // Run the backend to ensure the PWG evaluates functions like std::hash::pedersen, // otherwise constraints involving these expressions will not error. let mut foreign_call_executor = TestForeignCallExecutor::new( - show_output, + output, foreign_call_resolver_url, root_path, package_name, @@ -79,9 +86,9 @@ pub fn run_test>( let status = test_status_program_compile_pass( test_function, - compiled_program.abi, - compiled_program.debug, - circuit_execution, + &compiled_program.abi, + &compiled_program.debug, + &circuit_execution, ); let ignore_foreign_call_failures = @@ -113,26 +120,46 @@ pub fn run_test>( { use acvm::acir::circuit::Program; use noir_fuzzer::FuzzedExecutor; + use proptest::test_runner::Config; use proptest::test_runner::TestRunner; - let runner = TestRunner::default(); + + let runner = + TestRunner::new(Config { failure_persistence: None, ..Config::default() }); + + let abi = compiled_program.abi.clone(); + let debug = compiled_program.debug.clone(); let executor = |program: &Program, initial_witness: WitnessMap| -> Result, String> { - execute_program( + let circuit_execution = execute_program( program, initial_witness, blackbox_solver, &mut TestForeignCallExecutor::::new( - false, + PrintOutput::None, foreign_call_resolver_url, root_path.clone(), package_name.clone(), ), - ) - .map_err(|err| err.to_string()) + ); + + let status = test_status_program_compile_pass( + test_function, + &abi, + &debug, + &circuit_execution, + ); + + if let TestStatus::Fail { message, error_diagnostic: _ } = status { + Err(message) + } else { + // The fuzzer doesn't care about the actual result. + Ok(WitnessStack::default()) + } }; + let fuzzer = FuzzedExecutor::new(compiled_program.into(), executor, runner); let result = fuzzer.fuzz(); @@ -172,9 +199,9 @@ fn test_status_program_compile_fail(err: CompileError, test_function: &TestFunct /// passed/failed to determine the test status. fn test_status_program_compile_pass( test_function: &TestFunction, - abi: Abi, - debug: Vec, - circuit_execution: Result, NargoError>, + abi: &Abi, + debug: &[DebugInfo], + circuit_execution: &Result, NargoError>, ) -> TestStatus { let circuit_execution_err = match circuit_execution { // Circuit execution was successful; ie no errors or unsatisfied constraints @@ -194,7 +221,7 @@ fn test_status_program_compile_pass( // If we reach here, then the circuit execution failed. // // Check if the function should have passed - let diagnostic = try_to_diagnose_runtime_error(&circuit_execution_err, &abi, &debug); + let diagnostic = try_to_diagnose_runtime_error(circuit_execution_err, abi, debug); let test_should_have_passed = !test_function.should_fail(); if test_should_have_passed { return TestStatus::Fail { @@ -251,24 +278,24 @@ fn check_expected_failure_message( } /// A specialized foreign call executor which tracks whether it has encountered any unknown foreign calls -struct TestForeignCallExecutor { +struct TestForeignCallExecutor<'a, F> { /// The executor for any [`ForeignCall::Print`] calls. - printer: Option, + printer: PrintForeignCallExecutor<'a>, mocker: MockForeignCallExecutor, external: Option, encountered_unknown_foreign_call: bool, } -impl TestForeignCallExecutor { +impl<'a, F: Default> TestForeignCallExecutor<'a, F> { fn new( - show_output: bool, + output: PrintOutput<'a>, resolver_url: Option<&str>, root_path: Option, package_name: Option, ) -> Self { let id = rand::thread_rng().gen(); - let printer = if show_output { Some(PrintForeignCallExecutor) } else { None }; + let printer = PrintForeignCallExecutor { output }; let external_resolver = resolver_url.map(|resolver_url| { RPCForeignCallExecutor::new(resolver_url, id, root_path, package_name) }); @@ -281,8 +308,8 @@ impl TestForeignCallExecutor { } } -impl Deserialize<'a>> ForeignCallExecutor - for TestForeignCallExecutor +impl<'a, F: AcirField + Serialize + for<'b> Deserialize<'b>> ForeignCallExecutor + for TestForeignCallExecutor<'a, F> { fn execute( &mut self, @@ -293,13 +320,7 @@ impl Deserialize<'a>> ForeignCallExecutor let foreign_call_name = foreign_call.function.as_str(); match ForeignCall::lookup(foreign_call_name) { - Some(ForeignCall::Print) => { - if let Some(printer) = &mut self.printer { - printer.execute(foreign_call) - } else { - Ok(ForeignCallResult::default()) - } - } + Some(ForeignCall::Print) => self.printer.execute(foreign_call), Some( ForeignCall::CreateMock diff --git a/noir/noir-repo/tooling/nargo_cli/benches/criterion.rs b/noir/noir-repo/tooling/nargo_cli/benches/criterion.rs index 51de97df139..9bc50f87d8e 100644 --- a/noir/noir-repo/tooling/nargo_cli/benches/criterion.rs +++ b/noir/noir-repo/tooling/nargo_cli/benches/criterion.rs @@ -3,6 +3,7 @@ use acvm::{acir::native_types::WitnessMap, FieldElement}; use assert_cmd::prelude::{CommandCargoExt, OutputAssertExt}; use criterion::{criterion_group, criterion_main, Criterion}; +use nargo::PrintOutput; use noirc_abi::{ input_parser::{Format, InputValue}, Abi, InputMap, @@ -115,7 +116,7 @@ fn criterion_test_execution(c: &mut Criterion, test_program_dir: &Path, force_br let artifacts = RefCell::new(None); let mut foreign_call_executor = - nargo::foreign_calls::DefaultForeignCallExecutor::new(false, None, None, None); + nargo::foreign_calls::DefaultForeignCallExecutor::new(PrintOutput::None, None, None, None); c.bench_function(&benchmark_name, |b| { b.iter_batched( diff --git a/noir/noir-repo/tooling/nargo_cli/build.rs b/noir/noir-repo/tooling/nargo_cli/build.rs index 41b3c0c9cf7..8db2c1786d8 100644 --- a/noir/noir-repo/tooling/nargo_cli/build.rs +++ b/noir/noir-repo/tooling/nargo_cli/build.rs @@ -36,14 +36,13 @@ fn main() { generate_compile_success_empty_tests(&mut test_file, &test_dir); generate_compile_success_contract_tests(&mut test_file, &test_dir); generate_compile_success_no_bug_tests(&mut test_file, &test_dir); + generate_compile_success_with_bug_tests(&mut test_file, &test_dir); generate_compile_failure_tests(&mut test_file, &test_dir); } /// Some tests are explicitly ignored in brillig due to them failing. /// These should be fixed and removed from this list. -const IGNORED_BRILLIG_TESTS: [&str; 11] = [ - // Takes a very long time to execute as large loops do not get simplified. - "regression_4709", +const IGNORED_BRILLIG_TESTS: [&str; 10] = [ // bit sizes for bigint operation doesn't match up. "bigint", // ICE due to looking for function which doesn't exist. @@ -458,6 +457,35 @@ fn generate_compile_success_no_bug_tests(test_file: &mut File, test_data_dir: &P writeln!(test_file, "}}").unwrap(); } +/// Generate tests for checking that the contract compiles and there are "bugs" in stderr +fn generate_compile_success_with_bug_tests(test_file: &mut File, test_data_dir: &Path) { + let test_type = "compile_success_with_bug"; + let test_cases = read_test_cases(test_data_dir, test_type); + + writeln!( + test_file, + "mod {test_type} {{ + use super::*; + " + ) + .unwrap(); + for (test_name, test_dir) in test_cases { + let test_dir = test_dir.display(); + + generate_test_cases( + test_file, + &test_name, + &test_dir, + "compile", + r#" + nargo.assert().success().stderr(predicate::str::contains("bug:")); + "#, + &MatrixConfig::default(), + ); + } + writeln!(test_file, "}}").unwrap(); +} + fn generate_compile_failure_tests(test_file: &mut File, test_data_dir: &Path) { let test_type = "compile_failure"; let test_cases = read_test_cases(test_data_dir, test_type); diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs index 9059f1dd8e8..c8695a8f626 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/check_cmd.rs @@ -4,33 +4,25 @@ use clap::Args; use fm::FileManager; use iter_extended::btree_map; use nargo::{ - errors::CompileError, - insert_all_files_for_workspace_into_file_manager, - ops::report_errors, - package::{CrateName, Package}, - parse_all, prepare_package, + errors::CompileError, insert_all_files_for_workspace_into_file_manager, ops::report_errors, + package::Package, parse_all, prepare_package, }; -use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; +use nargo_toml::{get_package_manifest, resolve_workspace_from_toml}; use noirc_abi::{AbiParameter, AbiType, MAIN_RETURN_NAME}; use noirc_driver::{ check_crate, compute_function_abi, CompileOptions, CrateId, NOIR_ARTIFACT_VERSION_STRING, }; use noirc_frontend::hir::{Context, ParsedFiles}; -use super::fs::write_to_file; use super::NargoConfig; +use super::{fs::write_to_file, PackageOptions}; /// Checks the constraint system for errors #[derive(Debug, Clone, Args)] #[clap(visible_alias = "c")] pub(crate) struct CheckCommand { - /// The name of the package to check - #[clap(long, conflicts_with = "workspace")] - package: Option, - - /// Check all packages in the workspace - #[clap(long, conflicts_with = "package")] - workspace: bool, + #[clap(flatten)] + pub(super) package_options: PackageOptions, /// Force overwrite of existing files #[clap(long = "overwrite")] @@ -42,9 +34,7 @@ pub(crate) struct CheckCommand { pub(crate) fn run(args: CheckCommand, config: NargoConfig) -> Result<(), CliError> { let toml_path = get_package_manifest(&config.program_dir)?; - let default_selection = - if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; - let selection = args.package.map_or(default_selection, PackageSelection::Selected); + let selection = args.package_options.package_selection(); let workspace = resolve_workspace_from_toml( &toml_path, selection, diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs index ff6009981c7..2ecf6959a94 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -5,10 +5,12 @@ use std::time::Duration; use acvm::acir::circuit::ExpressionWidth; use fm::FileManager; use nargo::ops::{collect_errors, compile_contract, compile_program, report_errors}; -use nargo::package::{CrateName, Package}; +use nargo::package::Package; use nargo::workspace::Workspace; use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; -use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; +use nargo_toml::{ + get_package_manifest, resolve_workspace_from_toml, ManifestError, PackageSelection, +}; use noirc_driver::DEFAULT_EXPRESSION_WIDTH; use noirc_driver::NOIR_ARTIFACT_VERSION_STRING; use noirc_driver::{CompilationResult, CompileOptions, CompiledContract}; @@ -21,19 +23,14 @@ use notify_debouncer_full::new_debouncer; use crate::errors::CliError; use super::fs::program::{read_program_from_file, save_contract_to_file, save_program_to_file}; -use super::NargoConfig; +use super::{NargoConfig, PackageOptions}; use rayon::prelude::*; /// Compile the program and its secret execution trace into ACIR format #[derive(Debug, Clone, Args)] pub(crate) struct CompileCommand { - /// The name of the package to compile - #[clap(long, conflicts_with = "workspace")] - package: Option, - - /// Compile all packages in the workspace. - #[clap(long, conflicts_with = "package")] - workspace: bool, + #[clap(flatten)] + pub(super) package_options: PackageOptions, #[clap(flatten)] compile_options: CompileOptions, @@ -44,16 +41,8 @@ pub(crate) struct CompileCommand { } pub(crate) fn run(args: CompileCommand, config: NargoConfig) -> Result<(), CliError> { - let toml_path = get_package_manifest(&config.program_dir)?; - let default_selection = - if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; - let selection = args.package.map_or(default_selection, PackageSelection::Selected); - - let workspace = resolve_workspace_from_toml( - &toml_path, - selection, - Some(NOIR_ARTIFACT_VERSION_STRING.to_owned()), - )?; + let selection = args.package_options.package_selection(); + let workspace = read_workspace(&config.program_dir, selection)?; if args.watch { watch_workspace(&workspace, &args.compile_options) @@ -65,6 +54,22 @@ pub(crate) fn run(args: CompileCommand, config: NargoConfig) -> Result<(), CliEr Ok(()) } +/// Read a given program directory into a workspace. +fn read_workspace( + program_dir: &Path, + selection: PackageSelection, +) -> Result { + let toml_path = get_package_manifest(program_dir)?; + + let workspace = resolve_workspace_from_toml( + &toml_path, + selection, + Some(NOIR_ARTIFACT_VERSION_STRING.to_owned()), + )?; + + Ok(workspace) +} + /// Continuously recompile the workspace on any Noir file change event. fn watch_workspace(workspace: &Workspace, compile_options: &CompileOptions) -> notify::Result<()> { let (tx, rx) = std::sync::mpsc::channel(); @@ -109,15 +114,21 @@ fn watch_workspace(workspace: &Workspace, compile_options: &CompileOptions) -> n Ok(()) } +/// Parse all files in the workspace. +fn parse_workspace(workspace: &Workspace) -> (FileManager, ParsedFiles) { + let mut file_manager = workspace.new_file_manager(); + insert_all_files_for_workspace_into_file_manager(workspace, &mut file_manager); + let parsed_files = parse_all(&file_manager); + (file_manager, parsed_files) +} + /// Parse and compile the entire workspace, then report errors. /// This is the main entry point used by all other commands that need compilation. pub(super) fn compile_workspace_full( workspace: &Workspace, compile_options: &CompileOptions, ) -> Result<(), CliError> { - let mut workspace_file_manager = workspace.new_file_manager(); - insert_all_files_for_workspace_into_file_manager(workspace, &mut workspace_file_manager); - let parsed_files = parse_all(&workspace_file_manager); + let (workspace_file_manager, parsed_files) = parse_workspace(workspace); let compiled_workspace = compile_workspace(&workspace_file_manager, &parsed_files, workspace, compile_options); @@ -150,7 +161,7 @@ fn compile_workspace( let program_warnings_or_errors: CompilationResult<()> = compile_programs(file_manager, parsed_files, workspace, &binary_packages, compile_options); - let contract_warnings_or_errors: CompilationResult<()> = compiled_contracts( + let contract_warnings_or_errors: CompilationResult<()> = compile_contracts( file_manager, parsed_files, &contract_packages, @@ -244,7 +255,7 @@ fn compile_programs( } /// Compile the given contracts in the workspace. -fn compiled_contracts( +fn compile_contracts( file_manager: &FileManager, parsed_files: &ParsedFiles, contract_packages: &[Package], @@ -296,3 +307,138 @@ pub(crate) fn get_target_width( compile_options_width.unwrap_or(DEFAULT_EXPRESSION_WIDTH) } } + +#[cfg(test)] +mod tests { + use std::{ + path::{Path, PathBuf}, + str::FromStr, + }; + + use clap::Parser; + use nargo::ops::compile_program; + use nargo_toml::PackageSelection; + use noirc_driver::{CompileOptions, CrateName}; + use rayon::prelude::*; + + use crate::cli::compile_cmd::{get_target_width, parse_workspace, read_workspace}; + + /// Try to find the directory that Cargo sets when it is running; + /// otherwise fallback to assuming the CWD is the root of the repository + /// and append the crate path. + fn test_programs_dir() -> PathBuf { + let root_dir = match std::env::var("CARGO_MANIFEST_DIR") { + Ok(dir) => PathBuf::from(dir).parent().unwrap().parent().unwrap().to_path_buf(), + Err(_) => std::env::current_dir().unwrap(), + }; + root_dir.join("test_programs") + } + + /// Collect the test programs under a sub-directory. + fn read_test_program_dirs( + test_programs_dir: &Path, + test_sub_dir: &str, + ) -> impl Iterator { + let test_case_dir = test_programs_dir.join(test_sub_dir); + std::fs::read_dir(test_case_dir) + .unwrap() + .flatten() + .filter(|c| c.path().is_dir()) + .map(|c| c.path()) + } + + #[derive(Parser, Debug)] + #[command(ignore_errors = true)] + struct Options { + /// Test name to filter for. + /// + /// For example: + /// ```text + /// cargo test -p nargo_cli -- test_transform_program_is_idempotent slice_loop + /// ``` + args: Vec, + } + + impl Options { + fn package_selection(&self) -> PackageSelection { + match self.args.as_slice() { + [_test_name, test_program] => { + PackageSelection::Selected(CrateName::from_str(test_program).unwrap()) + } + _ => PackageSelection::DefaultOrAll, + } + } + } + + /// Check that `nargo::ops::transform_program` is idempotent by compiling the + /// test programs and running them through the optimizer twice. + /// + /// This test is here purely because of the convenience of having access to + /// the utility functions to process workspaces. + #[test] + fn test_transform_program_is_idempotent() { + let opts = Options::parse(); + + let sel = opts.package_selection(); + let verbose = matches!(sel, PackageSelection::Selected(_)); + + let test_workspaces = read_test_program_dirs(&test_programs_dir(), "execution_success") + .filter_map(|dir| read_workspace(&dir, sel.clone()).ok()) + .collect::>(); + + assert!(!test_workspaces.is_empty(), "should find some test workspaces"); + + test_workspaces.par_iter().for_each(|workspace| { + let (file_manager, parsed_files) = parse_workspace(workspace); + let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); + + for package in binary_packages { + let (program_0, _warnings) = compile_program( + &file_manager, + &parsed_files, + workspace, + package, + &CompileOptions::default(), + None, + ) + .expect("failed to compile"); + + let width = get_target_width(package.expression_width, None); + + let program_1 = nargo::ops::transform_program(program_0, width); + let program_2 = nargo::ops::transform_program(program_1.clone(), width); + + if verbose { + // Compare where the most likely difference is. + similar_asserts::assert_eq!( + format!("{}", program_1.program), + format!("{}", program_2.program), + "optimization not idempotent for test program '{}'", + package.name + ); + assert_eq!( + program_1.program, program_2.program, + "optimization not idempotent for test program '{}'", + package.name + ); + + // Compare the whole content. + similar_asserts::assert_eq!( + serde_json::to_string_pretty(&program_1).unwrap(), + serde_json::to_string_pretty(&program_2).unwrap(), + "optimization not idempotent for test program '{}'", + package.name + ); + } else { + // Just compare hashes, which would just state that the program failed. + // Then we can use the filter option to zoom in one one to see why. + assert!( + fxhash::hash64(&program_1) == fxhash::hash64(&program_2), + "optimization not idempotent for test program '{}'", + package.name + ); + } + } + }); + } +} diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/debug_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/debug_cmd.rs index e837f297475..f4dd607a53e 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/debug_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/debug_cmd.rs @@ -38,7 +38,7 @@ pub(crate) struct DebugCommand { /// The name of the package to execute #[clap(long)] - package: Option, + pub(super) package: Option, #[clap(flatten)] compile_options: CompileOptions, diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs index fa95d3123c6..49a23a7ea62 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/execute_cmd.rs @@ -8,8 +8,9 @@ use clap::Args; use nargo::constants::PROVER_INPUT_FILE; use nargo::errors::try_to_diagnose_runtime_error; use nargo::foreign_calls::DefaultForeignCallExecutor; -use nargo::package::{CrateName, Package}; -use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; +use nargo::package::Package; +use nargo::PrintOutput; +use nargo_toml::{get_package_manifest, resolve_workspace_from_toml}; use noirc_abi::input_parser::{Format, InputValue}; use noirc_abi::InputMap; use noirc_artifacts::debug::DebugArtifact; @@ -17,7 +18,7 @@ use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING use super::compile_cmd::compile_workspace_full; use super::fs::{inputs::read_inputs_from_file, witness::save_witness_to_dir}; -use super::NargoConfig; +use super::{NargoConfig, PackageOptions}; use crate::cli::fs::program::read_program_from_file; use crate::errors::CliError; @@ -34,13 +35,8 @@ pub(crate) struct ExecuteCommand { #[clap(long, short, default_value = PROVER_INPUT_FILE)] prover_name: String, - /// The name of the package to execute - #[clap(long, conflicts_with = "workspace")] - package: Option, - - /// Execute all packages in the workspace - #[clap(long, conflicts_with = "package")] - workspace: bool, + #[clap(flatten)] + pub(super) package_options: PackageOptions, #[clap(flatten)] compile_options: CompileOptions, @@ -52,9 +48,7 @@ pub(crate) struct ExecuteCommand { pub(crate) fn run(args: ExecuteCommand, config: NargoConfig) -> Result<(), CliError> { let toml_path = get_package_manifest(&config.program_dir)?; - let default_selection = - if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; - let selection = args.package.map_or(default_selection, PackageSelection::Selected); + let selection = args.package_options.package_selection(); let workspace = resolve_workspace_from_toml( &toml_path, selection, @@ -70,8 +64,9 @@ pub(crate) fn run(args: ExecuteCommand, config: NargoConfig) -> Result<(), CliEr let program_artifact_path = workspace.package_build_path(package); let program: CompiledProgram = read_program_from_file(program_artifact_path.clone())?.into(); + let abi = program.abi.clone(); - let (return_value, witness_stack) = execute_program_and_decode( + let results = execute_program_and_decode( program, package, &args.prover_name, @@ -81,14 +76,27 @@ pub(crate) fn run(args: ExecuteCommand, config: NargoConfig) -> Result<(), CliEr )?; println!("[{}] Circuit witness successfully solved", package.name); - if let Some(return_value) = return_value { + if let Some(ref return_value) = results.actual_return { println!("[{}] Circuit output: {return_value:?}", package.name); } let package_name = package.name.clone().into(); let witness_name = args.witness_name.as_ref().unwrap_or(&package_name); - let witness_path = save_witness_to_dir(witness_stack, witness_name, target_dir)?; + let witness_path = save_witness_to_dir(results.witness_stack, witness_name, target_dir)?; println!("[{}] Witness saved to {}", package.name, witness_path.display()); + + // Sanity checks on the return value after the witness has been saved, so it can be inspected if necessary. + if let Some(expected) = results.expected_return { + if results.actual_return.as_ref() != Some(&expected) { + return Err(CliError::UnexpectedReturn { expected, actual: results.actual_return }); + } + } + // We can expect that if the circuit returns something, it should be non-empty after execution. + if let Some(ref expected) = abi.return_type { + if results.actual_return.is_none() { + return Err(CliError::MissingReturn { expected: expected.clone() }); + } + } } Ok(()) } @@ -100,18 +108,24 @@ fn execute_program_and_decode( foreign_call_resolver_url: Option<&str>, root_path: Option, package_name: Option, -) -> Result<(Option, WitnessStack), CliError> { +) -> Result { // Parse the initial witness values from Prover.toml - let (inputs_map, _) = + let (inputs_map, expected_return) = read_inputs_from_file(&package.root_dir, prover_name, Format::Toml, &program.abi)?; let witness_stack = execute_program(&program, &inputs_map, foreign_call_resolver_url, root_path, package_name)?; // Get the entry point witness for the ABI let main_witness = &witness_stack.peek().expect("Should have at least one witness on the stack").witness; - let (_, return_value) = program.abi.decode(main_witness)?; + let (_, actual_return) = program.abi.decode(main_witness)?; + + Ok(ExecutionResults { expected_return, actual_return, witness_stack }) +} - Ok((return_value, witness_stack)) +struct ExecutionResults { + expected_return: Option, + actual_return: Option, + witness_stack: WitnessStack, } pub(crate) fn execute_program( @@ -128,7 +142,7 @@ pub(crate) fn execute_program( initial_witness, &Bn254BlackBoxSolver, &mut DefaultForeignCallExecutor::new( - true, + PrintOutput::Stdout, foreign_call_resolver_url, root_path, package_name, diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs index c3752db7fbd..cb92b987c4e 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/export_cmd.rs @@ -10,13 +10,11 @@ use nargo::package::Package; use nargo::prepare_package; use nargo::workspace::Workspace; use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; -use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; +use nargo_toml::{get_package_manifest, resolve_workspace_from_toml}; use noirc_driver::{ compile_no_check, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, }; -use noirc_frontend::graph::CrateName; - use clap::Args; use crate::errors::CliError; @@ -24,18 +22,13 @@ use crate::errors::CliError; use super::check_cmd::check_crate_and_report_errors; use super::fs::program::save_program_to_file; -use super::NargoConfig; +use super::{NargoConfig, PackageOptions}; /// Exports functions marked with #[export] attribute #[derive(Debug, Clone, Args)] pub(crate) struct ExportCommand { - /// The name of the package to compile - #[clap(long, conflicts_with = "workspace")] - package: Option, - - /// Compile all packages in the workspace - #[clap(long, conflicts_with = "package")] - workspace: bool, + #[clap(flatten)] + pub(super) package_options: PackageOptions, #[clap(flatten)] compile_options: CompileOptions, @@ -43,10 +36,7 @@ pub(crate) struct ExportCommand { pub(crate) fn run(args: ExportCommand, config: NargoConfig) -> Result<(), CliError> { let toml_path = get_package_manifest(&config.program_dir)?; - let default_selection = - if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; - let selection = args.package.map_or(default_selection, PackageSelection::Selected); - + let selection = args.package_options.package_selection(); let workspace = resolve_workspace_from_toml( &toml_path, selection, diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/fmt_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/fmt_cmd.rs index 66496db517c..7b29a90c5c0 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/fmt_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/fmt_cmd.rs @@ -9,7 +9,7 @@ use noirc_frontend::{hir::def_map::parse_file, parser::ParserError}; use crate::errors::CliError; -use super::NargoConfig; +use super::{NargoConfig, PackageOptions}; /// Format the Noir files in a workspace #[derive(Debug, Clone, Args)] @@ -17,15 +17,22 @@ pub(crate) struct FormatCommand { /// Run noirfmt in check mode #[arg(long)] check: bool, + + #[clap(flatten)] + pub(super) package_options: PackageOptions, } pub(crate) fn run(args: FormatCommand, config: NargoConfig) -> Result<(), CliError> { let check_mode = args.check; let toml_path = get_package_manifest(&config.program_dir)?; + let selection = match args.package_options.package_selection() { + PackageSelection::DefaultOrAll => PackageSelection::All, + other => other, + }; let workspace = resolve_workspace_from_toml( &toml_path, - PackageSelection::All, + selection, Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs index 769a1f79d81..ee8ff32922e 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/info_cmd.rs @@ -3,11 +3,10 @@ use bn254_blackbox_solver::Bn254BlackBoxSolver; use clap::Args; use iter_extended::vecmap; use nargo::{ - constants::PROVER_INPUT_FILE, - foreign_calls::DefaultForeignCallExecutor, - package::{CrateName, Package}, + constants::PROVER_INPUT_FILE, foreign_calls::DefaultForeignCallExecutor, package::Package, + PrintOutput, }; -use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; +use nargo_toml::{get_package_manifest, resolve_workspace_from_toml}; use noirc_abi::input_parser::Format; use noirc_artifacts::program::ProgramArtifact; use noirc_driver::{CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; @@ -20,7 +19,7 @@ use crate::{cli::fs::inputs::read_inputs_from_file, errors::CliError}; use super::{ compile_cmd::{compile_workspace_full, get_target_width}, fs::program::read_program_from_file, - NargoConfig, + NargoConfig, PackageOptions, }; /// Provides detailed information on each of a program's function (represented by a single circuit) @@ -31,13 +30,8 @@ use super::{ #[derive(Debug, Clone, Args)] #[clap(visible_alias = "i")] pub(crate) struct InfoCommand { - /// The name of the package to detail - #[clap(long, conflicts_with = "workspace")] - package: Option, - - /// Detail all packages in the workspace - #[clap(long, conflicts_with = "package")] - workspace: bool, + #[clap(flatten)] + pub(super) package_options: PackageOptions, /// Output a JSON formatted report. Changes to this format are not currently considered breaking. #[clap(long, hide = true)] @@ -56,9 +50,7 @@ pub(crate) struct InfoCommand { pub(crate) fn run(mut args: InfoCommand, config: NargoConfig) -> Result<(), CliError> { let toml_path = get_package_manifest(&config.program_dir)?; - let default_selection = - if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; - let selection = args.package.map_or(default_selection, PackageSelection::Selected); + let selection = args.package_options.package_selection(); let workspace = resolve_workspace_from_toml( &toml_path, selection, @@ -263,7 +255,7 @@ fn profile_brillig_execution( &program_artifact.bytecode, initial_witness, &Bn254BlackBoxSolver, - &mut DefaultForeignCallExecutor::new(false, None, None, None), + &mut DefaultForeignCallExecutor::new(PrintOutput::None, None, None, None), )?; let expression_width = get_target_width(package.expression_width, expression_width); diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/mod.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/mod.rs index 284dd10cb88..cc72092daa1 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/mod.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/mod.rs @@ -1,8 +1,8 @@ use clap::{Args, Parser, Subcommand}; use const_format::formatcp; -use nargo_toml::find_package_root; -use noirc_driver::NOIR_ARTIFACT_VERSION_STRING; -use std::path::PathBuf; +use nargo_toml::{ManifestError, PackageSelection}; +use noirc_driver::{CrateName, NOIR_ARTIFACT_VERSION_STRING}; +use std::path::{Path, PathBuf}; use color_eyre::eyre; @@ -52,6 +52,48 @@ pub(crate) struct NargoConfig { program_dir: PathBuf, } +/// Options for commands that work on either workspace or package scope. +#[derive(Args, Clone, Debug, Default)] +pub(crate) struct PackageOptions { + /// The name of the package to run the command on. + /// By default run on the first one found moving up along the ancestors of the current directory. + #[clap(long, conflicts_with = "workspace")] + package: Option, + + /// Run on all packages in the workspace + #[clap(long, conflicts_with = "package")] + workspace: bool, +} + +impl PackageOptions { + /// Decide which package to run the command on: + /// * `package` if non-empty + /// * all packages if `workspace` is `true` + /// * otherwise the default package + pub(crate) fn package_selection(&self) -> PackageSelection { + let default_selection = + if self.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; + + self.package.clone().map_or(default_selection, PackageSelection::Selected) + } + + /// Whether we need to look for the package manifest at the workspace level. + /// If a package is specified, it might not be the current package. + fn scope(&self) -> CommandScope { + if self.workspace || self.package.is_some() { + CommandScope::Workspace + } else { + CommandScope::CurrentPackage + } + } +} + +enum CommandScope { + Workspace, + CurrentPackage, + Any, +} + #[non_exhaustive] #[derive(Subcommand, Clone, Debug)] enum NargoCommand { @@ -83,22 +125,8 @@ pub(crate) fn start_cli() -> eyre::Result<()> { } // Search through parent directories to find package root if necessary. - match &command { - NargoCommand::Check(..) - | NargoCommand::Fmt(..) - | NargoCommand::Compile(..) - | NargoCommand::Execute(..) - | NargoCommand::Export(..) - | NargoCommand::Debug(..) - | NargoCommand::Test(..) - | NargoCommand::Info(..) => { - config.program_dir = find_package_root(&config.program_dir)?; - } - NargoCommand::New(..) - | NargoCommand::Init(..) - | NargoCommand::Lsp(..) - | NargoCommand::Dap(..) - | NargoCommand::GenerateCompletionScript(..) => (), + if let Some(program_dir) = command_dir(&command, &config.program_dir)? { + config.program_dir = program_dir; } match command { @@ -127,6 +155,43 @@ pub(crate) fn start_cli() -> eyre::Result<()> { Ok(()) } +/// Some commands have package options, which we use here to decide whether to +/// alter `--program-dir` to point at a manifest, depending on whether we want +/// to work on a specific package or the entire workspace. +fn command_scope(cmd: &NargoCommand) -> CommandScope { + match &cmd { + NargoCommand::Check(cmd) => cmd.package_options.scope(), + NargoCommand::Compile(cmd) => cmd.package_options.scope(), + NargoCommand::Execute(cmd) => cmd.package_options.scope(), + NargoCommand::Export(cmd) => cmd.package_options.scope(), + NargoCommand::Test(cmd) => cmd.package_options.scope(), + NargoCommand::Info(cmd) => cmd.package_options.scope(), + NargoCommand::Fmt(cmd) => cmd.package_options.scope(), + NargoCommand::Debug(cmd) => { + if cmd.package.is_some() { + CommandScope::Workspace + } else { + CommandScope::CurrentPackage + } + } + NargoCommand::New(..) + | NargoCommand::Init(..) + | NargoCommand::Lsp(..) + | NargoCommand::Dap(..) + | NargoCommand::GenerateCompletionScript(..) => CommandScope::Any, + } +} + +/// A manifest directory we need to change into, if the command needs it. +fn command_dir(cmd: &NargoCommand, program_dir: &Path) -> Result, ManifestError> { + let workspace = match command_scope(cmd) { + CommandScope::Workspace => true, + CommandScope::CurrentPackage => false, + CommandScope::Any => return Ok(None), + }; + Ok(Some(nargo_toml::find_root(program_dir, workspace)?)) +} + #[cfg(test)] mod tests { use clap::Parser; diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs index aa0ee1bb94b..1fd4ed2d873 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd.rs @@ -1,24 +1,31 @@ -use std::{io::Write, path::PathBuf}; +use std::{ + collections::{BTreeMap, HashMap}, + fmt::Display, + panic::{catch_unwind, UnwindSafe}, + path::PathBuf, + sync::{mpsc, Mutex}, + thread, + time::Duration, +}; use acvm::{BlackBoxFunctionSolver, FieldElement}; use bn254_blackbox_solver::Bn254BlackBoxSolver; use clap::Args; use fm::FileManager; +use formatters::{Formatter, JsonFormatter, PrettyFormatter, TerseFormatter}; use nargo::{ - insert_all_files_for_workspace_into_file_manager, - ops::TestStatus, - package::{CrateName, Package}, - parse_all, prepare_package, + insert_all_files_for_workspace_into_file_manager, ops::TestStatus, package::Package, parse_all, + prepare_package, workspace::Workspace, PrintOutput, }; -use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; +use nargo_toml::{get_package_manifest, resolve_workspace_from_toml}; use noirc_driver::{check_crate, CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::hir::{FunctionNameMatch, ParsedFiles}; -use rayon::prelude::{IntoParallelIterator, ParallelBridge, ParallelIterator}; -use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor}; use crate::{cli::check_cmd::check_crate_and_report_errors, errors::CliError}; -use super::NargoConfig; +use super::{NargoConfig, PackageOptions}; + +mod formatters; /// Run the tests for this program #[derive(Debug, Clone, Args)] @@ -35,13 +42,8 @@ pub(crate) struct TestCommand { #[clap(long)] exact: bool, - /// The name of the package to test - #[clap(long, conflicts_with = "workspace")] - package: Option, - - /// Test all packages in the workspace - #[clap(long, conflicts_with = "package")] - workspace: bool, + #[clap(flatten)] + pub(super) package_options: PackageOptions, #[clap(flatten)] compile_options: CompileOptions, @@ -49,22 +51,78 @@ pub(crate) struct TestCommand { /// JSON RPC url to solve oracle calls #[clap(long)] oracle_resolver: Option, + + /// Number of threads used for running tests in parallel + #[clap(long, default_value_t = rayon::current_num_threads())] + test_threads: usize, + + /// Configure formatting of output + #[clap(long)] + format: Option, + + /// Display one character per test instead of one line + #[clap(short = 'q', long = "quiet")] + quiet: bool, +} + +#[derive(Debug, Copy, Clone, clap::ValueEnum)] +enum Format { + /// Print verbose output + Pretty, + /// Display one character per test + Terse, + /// Output a JSON Lines document + Json, +} + +impl Format { + fn formatter(&self) -> Box { + match self { + Format::Pretty => Box::new(PrettyFormatter), + Format::Terse => Box::new(TerseFormatter), + Format::Json => Box::new(JsonFormatter), + } + } +} + +impl Display for Format { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Format::Pretty => write!(f, "pretty"), + Format::Terse => write!(f, "terse"), + Format::Json => write!(f, "json"), + } + } } +struct Test<'a> { + name: String, + package_name: String, + runner: Box (TestStatus, String) + Send + UnwindSafe + 'a>, +} + +struct TestResult { + name: String, + package_name: String, + status: TestStatus, + output: String, + time_to_run: Duration, +} + +const STACK_SIZE: usize = 4 * 1024 * 1024; + pub(crate) fn run(args: TestCommand, config: NargoConfig) -> Result<(), CliError> { let toml_path = get_package_manifest(&config.program_dir)?; - let default_selection = - if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; - let selection = args.package.map_or(default_selection, PackageSelection::Selected); + let selection = args.package_options.package_selection(); let workspace = resolve_workspace_from_toml( &toml_path, selection, Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; - let mut workspace_file_manager = workspace.new_file_manager(); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let parsed_files = parse_all(&workspace_file_manager); + let mut file_manager = workspace.new_file_manager(); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut file_manager); + let parsed_files = parse_all(&file_manager); let pattern = match &args.test_name { Some(name) => { @@ -77,230 +135,388 @@ pub(crate) fn run(args: TestCommand, config: NargoConfig) -> Result<(), CliError None => FunctionNameMatch::Anything, }; - // Configure a thread pool with a larger stack size to prevent overflowing stack in large programs. - // Default is 2MB. - let pool = rayon::ThreadPoolBuilder::new().stack_size(4 * 1024 * 1024).build().unwrap(); - let test_reports: Vec> = pool.install(|| { - workspace - .into_iter() - .par_bridge() - .map(|package| { - run_tests::( - &workspace_file_manager, - &parsed_files, - package, - pattern, - args.show_output, - args.oracle_resolver.as_deref(), - Some(workspace.root_dir.clone()), - Some(package.name.to_string()), - &args.compile_options, - ) - }) - .collect::>() - })?; - let test_report: Vec<(String, TestStatus)> = test_reports.into_iter().flatten().collect(); - - if test_report.is_empty() { - match &pattern { - FunctionNameMatch::Exact(pattern) => { - return Err(CliError::Generic( - format!("Found 0 tests matching input '{pattern}'.",), - )) - } - FunctionNameMatch::Contains(pattern) => { - return Err(CliError::Generic(format!("Found 0 tests containing '{pattern}'.",))) - } - // If we are running all tests in a crate, having none is not an error - FunctionNameMatch::Anything => {} - }; - } - - if test_report.iter().any(|(_, status)| status.failed()) { - Err(CliError::Generic(String::new())) + let formatter: Box = if let Some(format) = args.format { + format.formatter() + } else if args.quiet { + Box::new(TerseFormatter) } else { - Ok(()) - } -} + Box::new(PrettyFormatter) + }; -#[allow(clippy::too_many_arguments)] -fn run_tests + Default>( - file_manager: &FileManager, - parsed_files: &ParsedFiles, - package: &Package, - fn_name: FunctionNameMatch, - show_output: bool, - foreign_call_resolver_url: Option<&str>, - root_path: Option, - package_name: Option, - compile_options: &CompileOptions, -) -> Result, CliError> { - let test_functions = - get_tests_in_package(file_manager, parsed_files, package, fn_name, compile_options)?; - - let count_all = test_functions.len(); - - let plural = if count_all == 1 { "" } else { "s" }; - println!("[{}] Running {count_all} test function{plural}", package.name); - - let test_report: Vec<(String, TestStatus)> = test_functions - .into_par_iter() - .map(|test_name| { - let status = run_test::( - file_manager, - parsed_files, - package, - &test_name, - show_output, - foreign_call_resolver_url, - root_path.clone(), - package_name.clone(), - compile_options, - ); - - (test_name, status) - }) - .collect(); - - display_test_report(file_manager, package, compile_options, &test_report)?; - Ok(test_report) + let runner = TestRunner { + file_manager: &file_manager, + parsed_files: &parsed_files, + workspace, + args: &args, + pattern, + num_threads: args.test_threads, + formatter, + }; + runner.run() } -#[allow(clippy::too_many_arguments)] -fn run_test + Default>( - file_manager: &FileManager, - parsed_files: &ParsedFiles, - package: &Package, - fn_name: &str, - show_output: bool, - foreign_call_resolver_url: Option<&str>, - root_path: Option, - package_name: Option, - compile_options: &CompileOptions, -) -> TestStatus { - // This is really hacky but we can't share `Context` or `S` across threads. - // We then need to construct a separate copy for each test. - - let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); - check_crate(&mut context, crate_id, compile_options) - .expect("Any errors should have occurred when collecting test functions"); - - let test_functions = context - .get_all_test_functions_in_crate_matching(&crate_id, FunctionNameMatch::Exact(fn_name)); - let (_, test_function) = test_functions.first().expect("Test function should exist"); - - let blackbox_solver = S::default(); - - nargo::ops::run_test( - &blackbox_solver, - &mut context, - test_function, - show_output, - foreign_call_resolver_url, - root_path, - package_name, - compile_options, - ) +struct TestRunner<'a> { + file_manager: &'a FileManager, + parsed_files: &'a ParsedFiles, + workspace: Workspace, + args: &'a TestCommand, + pattern: FunctionNameMatch<'a>, + num_threads: usize, + formatter: Box, } -fn get_tests_in_package( - file_manager: &FileManager, - parsed_files: &ParsedFiles, - package: &Package, - fn_name: FunctionNameMatch, - options: &CompileOptions, -) -> Result, CliError> { - let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); - check_crate_and_report_errors(&mut context, crate_id, options)?; - - Ok(context - .get_all_test_functions_in_crate_matching(&crate_id, fn_name) - .into_iter() - .map(|(test_name, _)| test_name) - .collect()) -} +impl<'a> TestRunner<'a> { + fn run(&self) -> Result<(), CliError> { + // First compile all packages and collect their tests + let packages_tests = self.collect_packages_tests()?; + + // Now gather all tests and how many are per packages + let mut tests = Vec::new(); + let mut test_count_per_package = BTreeMap::new(); + + for (package_name, package_tests) in packages_tests { + test_count_per_package.insert(package_name, package_tests.len()); + tests.extend(package_tests); + } + + // Now run all tests in parallel, but show output for each package sequentially + let tests_count = tests.len(); + let all_passed = self.run_all_tests(tests, &test_count_per_package); + + if tests_count == 0 { + match &self.pattern { + FunctionNameMatch::Exact(pattern) => { + return Err(CliError::Generic(format!( + "Found 0 tests matching input '{pattern}'.", + ))) + } + FunctionNameMatch::Contains(pattern) => { + return Err(CliError::Generic( + format!("Found 0 tests containing '{pattern}'.",), + )) + } + // If we are running all tests in a crate, having none is not an error + FunctionNameMatch::Anything => {} + }; + } -fn display_test_report( - file_manager: &FileManager, - package: &Package, - compile_options: &CompileOptions, - test_report: &[(String, TestStatus)], -) -> Result<(), CliError> { - let writer = StandardStream::stderr(ColorChoice::Always); - let mut writer = writer.lock(); - - for (test_name, test_status) in test_report { - write!(writer, "[{}] Testing {test_name}... ", package.name) - .expect("Failed to write to stderr"); - writer.flush().expect("Failed to flush writer"); - - match &test_status { - TestStatus::Pass { .. } => { - writer - .set_color(ColorSpec::new().set_fg(Some(Color::Green))) - .expect("Failed to set color"); - writeln!(writer, "ok").expect("Failed to write to stderr"); + if all_passed { + Ok(()) + } else { + Err(CliError::Generic(String::new())) + } + } + + /// Runs all tests. Returns `true` if all tests passed, `false` otherwise. + fn run_all_tests( + &self, + tests: Vec>, + test_count_per_package: &BTreeMap, + ) -> bool { + let mut all_passed = true; + + for (package_name, total_test_count) in test_count_per_package { + self.formatter + .package_start_async(package_name, *total_test_count) + .expect("Could not display package start"); + } + + let (sender, receiver) = mpsc::channel(); + let iter = &Mutex::new(tests.into_iter()); + thread::scope(|scope| { + // Start worker threads + for _ in 0..self.num_threads { + // Clone sender so it's dropped once the thread finishes + let thread_sender = sender.clone(); + thread::Builder::new() + // Specify a larger-than-default stack size to prevent overflowing stack in large programs. + // (the default is 2MB) + .stack_size(STACK_SIZE) + .spawn_scoped(scope, move || loop { + // Get next test to process from the iterator. + let Some(test) = iter.lock().unwrap().next() else { + break; + }; + + self.formatter + .test_start_async(&test.name, &test.package_name) + .expect("Could not display test start"); + + let time_before_test = std::time::Instant::now(); + let (status, output) = match catch_unwind(test.runner) { + Ok((status, output)) => (status, output), + Err(err) => ( + TestStatus::Fail { + message: + // It seems `panic!("...")` makes the error be `&str`, so we handle this common case + if let Some(message) = err.downcast_ref::<&str>() { + message.to_string() + } else { + "An unexpected error happened".to_string() + }, + error_diagnostic: None, + }, + String::new(), + ), + }; + let time_to_run = time_before_test.elapsed(); + + let test_result = TestResult { + name: test.name, + package_name: test.package_name, + status, + output, + time_to_run, + }; + + self.formatter + .test_end_async( + &test_result, + self.file_manager, + self.args.show_output, + self.args.compile_options.deny_warnings, + self.args.compile_options.silence_warnings, + ) + .expect("Could not display test start"); + + if thread_sender.send(test_result).is_err() { + break; + } + }) + .unwrap(); } - TestStatus::Fail { message, error_diagnostic } => { - writer - .set_color(ColorSpec::new().set_fg(Some(Color::Red))) - .expect("Failed to set color"); - writeln!(writer, "FAIL\n{message}\n").expect("Failed to write to stderr"); - if let Some(diag) = error_diagnostic { - noirc_errors::reporter::report_all( - file_manager.as_file_map(), - &[diag.clone()], - compile_options.deny_warnings, - compile_options.silence_warnings, - ); + + // Also drop main sender so the channel closes + drop(sender); + + // We'll go package by package, but we might get test results from packages ahead of us. + // We'll buffer those here and show them all at once when we get to those packages. + let mut buffer: HashMap> = HashMap::new(); + for (package_name, total_test_count) in test_count_per_package { + let mut test_report = Vec::new(); + + let mut current_test_count = 0; + let total_test_count = *total_test_count; + + self.formatter + .package_start_sync(package_name, total_test_count) + .expect("Could not display package start"); + + // Check if we have buffered test results for this package + if let Some(buffered_tests) = buffer.remove(package_name) { + for test_result in buffered_tests { + self.display_test_result( + &test_result, + current_test_count + 1, + total_test_count, + ) + .expect("Could not display test status"); + test_report.push(test_result); + current_test_count += 1; + } + } + + if current_test_count < total_test_count { + while let Ok(test_result) = receiver.recv() { + if test_result.status.failed() { + all_passed = false; + } + + // This is a test result from a different package: buffer it. + if &test_result.package_name != package_name { + buffer + .entry(test_result.package_name.clone()) + .or_default() + .push(test_result); + continue; + } + + self.display_test_result( + &test_result, + current_test_count + 1, + total_test_count, + ) + .expect("Could not display test status"); + test_report.push(test_result); + current_test_count += 1; + if current_test_count == total_test_count { + break; + } + } } + + self.formatter + .package_end( + package_name, + &test_report, + self.file_manager, + self.args.show_output, + self.args.compile_options.deny_warnings, + self.args.compile_options.silence_warnings, + ) + .expect("Could not display test report"); } - TestStatus::Skipped { .. } => { - writer - .set_color(ColorSpec::new().set_fg(Some(Color::Yellow))) - .expect("Failed to set color"); - writeln!(writer, "skipped").expect("Failed to write to stderr"); + }); + + all_passed + } + + /// Compiles all packages in parallel and returns their tests + fn collect_packages_tests(&'a self) -> Result>>, CliError> { + let mut package_tests = BTreeMap::new(); + let mut error = None; + + let (sender, receiver) = mpsc::channel(); + let iter = &Mutex::new(self.workspace.into_iter()); + + thread::scope(|scope| { + // Start worker threads + for _ in 0..self.num_threads { + // Clone sender so it's dropped once the thread finishes + let thread_sender = sender.clone(); + thread::Builder::new() + // Specify a larger-than-default stack size to prevent overflowing stack in large programs. + // (the default is 2MB) + .stack_size(STACK_SIZE) + .spawn_scoped(scope, move || loop { + // Get next package to process from the iterator. + let Some(package) = iter.lock().unwrap().next() else { + break; + }; + let tests = self.collect_package_tests::( + package, + self.args.oracle_resolver.as_deref(), + Some(self.workspace.root_dir.clone()), + package.name.to_string(), + ); + if thread_sender.send((package, tests)).is_err() { + break; + } + }) + .unwrap(); } - TestStatus::CompileError(err) => { - noirc_errors::reporter::report_all( - file_manager.as_file_map(), - &[err.clone()], - compile_options.deny_warnings, - compile_options.silence_warnings, - ); + + // Also drop main sender so the channel closes + drop(sender); + + for (package, tests) in receiver.iter() { + match tests { + Ok(tests) => { + package_tests.insert(package.name.to_string(), tests); + } + Err(err) => { + error = Some(err); + } + } } + }); + + if let Some(error) = error { + Err(error) + } else { + Ok(package_tests) } - writer.reset().expect("Failed to reset writer"); } - write!(writer, "[{}] ", package.name).expect("Failed to write to stderr"); + /// Compiles a single package and returns all of its tests + fn collect_package_tests + Default>( + &'a self, + package: &'a Package, + foreign_call_resolver_url: Option<&'a str>, + root_path: Option, + package_name: String, + ) -> Result>, CliError> { + let test_functions = self.get_tests_in_package(package)?; + + let tests: Vec = test_functions + .into_iter() + .map(|test_name| { + let test_name_copy = test_name.clone(); + let root_path = root_path.clone(); + let package_name_clone = package_name.clone(); + let package_name_clone2 = package_name.clone(); + let runner = Box::new(move || { + self.run_test::( + package, + &test_name, + foreign_call_resolver_url, + root_path, + package_name_clone.clone(), + ) + }); + Test { name: test_name_copy, package_name: package_name_clone2, runner } + }) + .collect(); - let count_all = test_report.len(); - let count_failed = test_report.iter().filter(|(_, status)| status.failed()).count(); - let plural = if count_all == 1 { "" } else { "s" }; - if count_failed == 0 { - writer.set_color(ColorSpec::new().set_fg(Some(Color::Green))).expect("Failed to set color"); - write!(writer, "{count_all} test{plural} passed").expect("Failed to write to stderr"); - writer.reset().expect("Failed to reset writer"); - writeln!(writer).expect("Failed to write to stderr"); - } else { - let count_passed = count_all - count_failed; - let plural_failed = if count_failed == 1 { "" } else { "s" }; - let plural_passed = if count_passed == 1 { "" } else { "s" }; - - if count_passed != 0 { - writer - .set_color(ColorSpec::new().set_fg(Some(Color::Green))) - .expect("Failed to set color"); - write!(writer, "{count_passed} test{plural_passed} passed, ",) - .expect("Failed to write to stderr"); - } + Ok(tests) + } + + /// Compiles a single package and returns all of its test names + fn get_tests_in_package(&'a self, package: &'a Package) -> Result, CliError> { + let (mut context, crate_id) = + prepare_package(self.file_manager, self.parsed_files, package); + check_crate_and_report_errors(&mut context, crate_id, &self.args.compile_options)?; - writer.set_color(ColorSpec::new().set_fg(Some(Color::Red))).expect("Failed to set color"); - writeln!(writer, "{count_failed} test{plural_failed} failed") - .expect("Failed to write to stderr"); - writer.reset().expect("Failed to reset writer"); + Ok(context + .get_all_test_functions_in_crate_matching(&crate_id, self.pattern) + .into_iter() + .map(|(test_name, _)| test_name) + .collect()) + } + + /// Runs a single test and returns its status together with whatever was printed to stdout + /// during the test. + fn run_test + Default>( + &'a self, + package: &Package, + fn_name: &str, + foreign_call_resolver_url: Option<&str>, + root_path: Option, + package_name: String, + ) -> (TestStatus, String) { + // This is really hacky but we can't share `Context` or `S` across threads. + // We then need to construct a separate copy for each test. + + let (mut context, crate_id) = + prepare_package(self.file_manager, self.parsed_files, package); + check_crate(&mut context, crate_id, &self.args.compile_options) + .expect("Any errors should have occurred when collecting test functions"); + + let test_functions = context + .get_all_test_functions_in_crate_matching(&crate_id, FunctionNameMatch::Exact(fn_name)); + let (_, test_function) = test_functions.first().expect("Test function should exist"); + + let blackbox_solver = S::default(); + let mut output_string = String::new(); + + let test_status = nargo::ops::run_test( + &blackbox_solver, + &mut context, + test_function, + PrintOutput::String(&mut output_string), + foreign_call_resolver_url, + root_path, + Some(package_name), + &self.args.compile_options, + ); + (test_status, output_string) } - Ok(()) + /// Display the status of a single test + fn display_test_result( + &'a self, + test_result: &'a TestResult, + current_test_count: usize, + total_test_count: usize, + ) -> std::io::Result<()> { + self.formatter.test_end_sync( + test_result, + current_test_count, + total_test_count, + self.file_manager, + self.args.show_output, + self.args.compile_options.deny_warnings, + self.args.compile_options.silence_warnings, + ) + } } diff --git a/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd/formatters.rs b/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd/formatters.rs new file mode 100644 index 00000000000..1b9b2d50378 --- /dev/null +++ b/noir/noir-repo/tooling/nargo_cli/src/cli/test_cmd/formatters.rs @@ -0,0 +1,539 @@ +use std::{io::Write, panic::RefUnwindSafe, time::Duration}; + +use fm::FileManager; +use nargo::ops::TestStatus; +use noirc_errors::{reporter::stack_trace, FileDiagnostic}; +use serde_json::{json, Map}; +use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, StandardStreamLock, WriteColor}; + +use super::TestResult; + +/// A formatter for showing test results. +/// +/// The order of events is: +/// 1. Compilation of all packages happen (in parallel). There's no formatter method for this. +/// 2. If compilation is successful, one `package_start_async` for each package. +/// 3. For each test, one `test_start_async` event +/// (there's no `test_start_sync` event because it would happen right before `test_end_sync`) +/// 4. For each package, sequentially: +/// a. A `package_start_sync` event +/// b. One `test_end` event for each test +/// a. A `package_end` event +/// +/// The reason we have some `sync` and `async` events is that formatters that show output +/// to humans rely on the `sync` events to show a more predictable output (package by package), +/// and formatters that output to a machine-readable format (like JSON) rely on the `async` +/// events to show things as soon as they happen, regardless of a package ordering. +pub(super) trait Formatter: Send + Sync + RefUnwindSafe { + fn package_start_async(&self, package_name: &str, test_count: usize) -> std::io::Result<()>; + + fn package_start_sync(&self, package_name: &str, test_count: usize) -> std::io::Result<()>; + + fn test_start_async(&self, name: &str, package_name: &str) -> std::io::Result<()>; + + #[allow(clippy::too_many_arguments)] + fn test_end_async( + &self, + test_result: &TestResult, + file_manager: &FileManager, + show_output: bool, + deny_warnings: bool, + silence_warnings: bool, + ) -> std::io::Result<()>; + + #[allow(clippy::too_many_arguments)] + fn test_end_sync( + &self, + test_result: &TestResult, + current_test_count: usize, + total_test_count: usize, + file_manager: &FileManager, + show_output: bool, + deny_warnings: bool, + silence_warnings: bool, + ) -> std::io::Result<()>; + + fn package_end( + &self, + package_name: &str, + test_results: &[TestResult], + file_manager: &FileManager, + show_output: bool, + deny_warnings: bool, + silence_warnings: bool, + ) -> std::io::Result<()>; +} + +pub(super) struct PrettyFormatter; + +impl Formatter for PrettyFormatter { + fn package_start_async(&self, _package_name: &str, _test_count: usize) -> std::io::Result<()> { + Ok(()) + } + + fn package_start_sync(&self, package_name: &str, test_count: usize) -> std::io::Result<()> { + package_start(package_name, test_count) + } + + fn test_start_async(&self, _name: &str, _package_name: &str) -> std::io::Result<()> { + Ok(()) + } + + fn test_end_async( + &self, + _test_result: &TestResult, + _file_manager: &FileManager, + _show_output: bool, + _deny_warnings: bool, + _silence_warnings: bool, + ) -> std::io::Result<()> { + Ok(()) + } + + fn test_end_sync( + &self, + test_result: &TestResult, + _current_test_count: usize, + _total_test_count: usize, + file_manager: &FileManager, + show_output: bool, + deny_warnings: bool, + silence_warnings: bool, + ) -> std::io::Result<()> { + let writer = StandardStream::stderr(ColorChoice::Always); + let mut writer = writer.lock(); + + let is_slow = test_result.time_to_run >= Duration::from_secs(30); + let show_time = |writer: &mut StandardStreamLock<'_>| { + if is_slow { + write!(writer, " <{:.3}s>", test_result.time_to_run.as_secs_f64()) + } else { + Ok(()) + } + }; + + write!(writer, "[{}] Testing {}... ", &test_result.package_name, &test_result.name)?; + writer.flush()?; + + match &test_result.status { + TestStatus::Pass { .. } => { + writer.set_color(ColorSpec::new().set_fg(Some(Color::Green)))?; + write!(writer, "ok")?; + writer.reset()?; + show_time(&mut writer)?; + writeln!(writer)?; + } + TestStatus::Fail { message, error_diagnostic } => { + writer.set_color(ColorSpec::new().set_fg(Some(Color::Red)))?; + write!(writer, "FAIL\n{message}\n")?; + writer.reset()?; + show_time(&mut writer)?; + writeln!(writer)?; + if let Some(diag) = error_diagnostic { + noirc_errors::reporter::report_all( + file_manager.as_file_map(), + &[diag.clone()], + deny_warnings, + silence_warnings, + ); + } + } + TestStatus::Skipped { .. } => { + writer.set_color(ColorSpec::new().set_fg(Some(Color::Yellow)))?; + write!(writer, "skipped")?; + writer.reset()?; + show_time(&mut writer)?; + writeln!(writer)?; + } + TestStatus::CompileError(file_diagnostic) => { + noirc_errors::reporter::report_all( + file_manager.as_file_map(), + &[file_diagnostic.clone()], + deny_warnings, + silence_warnings, + ); + } + } + + if show_output && !test_result.output.is_empty() { + writeln!(writer, "--- {} stdout ---", test_result.name)?; + write!(writer, "{}", test_result.output)?; + let name_len = test_result.name.len(); + writeln!(writer, "{}", "-".repeat(name_len + "--- stdout ---".len())) + } else { + Ok(()) + } + } + + fn package_end( + &self, + package_name: &str, + test_results: &[TestResult], + _file_manager: &FileManager, + _show_output: bool, + _deny_warnings: bool, + _silence_warnings: bool, + ) -> std::io::Result<()> { + let writer = StandardStream::stderr(ColorChoice::Always); + let mut writer = writer.lock(); + + let failed_tests: Vec<_> = test_results + .iter() + .filter_map(|test_result| test_result.status.failed().then_some(&test_result.name)) + .collect(); + + if !failed_tests.is_empty() { + writeln!(writer)?; + writeln!(writer, "[{}] Failures:", package_name)?; + for failed_test in failed_tests { + writeln!(writer, " {}", failed_test)?; + } + writeln!(writer)?; + } + + write!(writer, "[{}] ", package_name)?; + + let count_all = test_results.len(); + let count_failed = + test_results.iter().filter(|test_result| test_result.status.failed()).count(); + let plural = if count_all == 1 { "" } else { "s" }; + if count_failed == 0 { + writer.set_color(ColorSpec::new().set_fg(Some(Color::Green)))?; + write!(writer, "{count_all} test{plural} passed")?; + writer.reset()?; + writeln!(writer)?; + } else { + let count_passed = count_all - count_failed; + let plural_failed = if count_failed == 1 { "" } else { "s" }; + let plural_passed = if count_passed == 1 { "" } else { "s" }; + + if count_passed != 0 { + writer.set_color(ColorSpec::new().set_fg(Some(Color::Green)))?; + write!(writer, "{count_passed} test{plural_passed} passed, ")?; + } + + writer.set_color(ColorSpec::new().set_fg(Some(Color::Red)))?; + writeln!(writer, "{count_failed} test{plural_failed} failed")?; + writer.reset()?; + } + + Ok(()) + } +} + +pub(super) struct TerseFormatter; + +impl Formatter for TerseFormatter { + fn package_start_async(&self, _package_name: &str, _test_count: usize) -> std::io::Result<()> { + Ok(()) + } + + fn package_start_sync(&self, package_name: &str, test_count: usize) -> std::io::Result<()> { + package_start(package_name, test_count) + } + + fn test_start_async(&self, _name: &str, _package_name: &str) -> std::io::Result<()> { + Ok(()) + } + + fn test_end_async( + &self, + _test_result: &TestResult, + _file_manager: &FileManager, + _show_output: bool, + _deny_warnings: bool, + _silence_warnings: bool, + ) -> std::io::Result<()> { + Ok(()) + } + + fn test_end_sync( + &self, + test_result: &TestResult, + current_test_count: usize, + total_test_count: usize, + _file_manager: &FileManager, + _show_output: bool, + _deny_warnings: bool, + _silence_warnings: bool, + ) -> std::io::Result<()> { + let writer = StandardStream::stderr(ColorChoice::Always); + let mut writer = writer.lock(); + + match &test_result.status { + TestStatus::Pass => { + writer.set_color(ColorSpec::new().set_fg(Some(Color::Green)))?; + write!(writer, ".")?; + writer.reset()?; + } + TestStatus::Fail { .. } | TestStatus::CompileError(_) => { + writer.set_color(ColorSpec::new().set_fg(Some(Color::Red)))?; + write!(writer, "F")?; + writer.reset()?; + } + TestStatus::Skipped => { + writer.set_color(ColorSpec::new().set_fg(Some(Color::Yellow)))?; + write!(writer, "s")?; + writer.reset()?; + } + } + + // How many tests ('.', 'F', etc.) to print per line. + // We use 88 which is a bit more than the traditional 80 columns (screens are larger these days) + // but we also want the output to be readable in case the terminal isn't maximized. + const MAX_TESTS_PER_LINE: usize = 88; + + if current_test_count % MAX_TESTS_PER_LINE == 0 && current_test_count < total_test_count { + writeln!(writer, " {}/{}", current_test_count, total_test_count)?; + } + + Ok(()) + } + + fn package_end( + &self, + package_name: &str, + test_results: &[TestResult], + file_manager: &FileManager, + show_output: bool, + deny_warnings: bool, + silence_warnings: bool, + ) -> std::io::Result<()> { + let writer = StandardStream::stderr(ColorChoice::Always); + let mut writer = writer.lock(); + + if !test_results.is_empty() { + writeln!(writer)?; + } + + for test_result in test_results { + if (show_output && !test_result.output.is_empty()) || test_result.status.failed() { + writeln!(writer, "--- {} stdout ---", test_result.name)?; + if !test_result.output.is_empty() { + write!(writer, "{}", test_result.output)?; + } + + match &test_result.status { + TestStatus::Pass | TestStatus::Skipped => (), + TestStatus::Fail { message, error_diagnostic } => { + writer.set_color(ColorSpec::new().set_fg(Some(Color::Red)))?; + writeln!(writer, "{message}")?; + writer.reset()?; + if let Some(diag) = error_diagnostic { + noirc_errors::reporter::report_all( + file_manager.as_file_map(), + &[diag.clone()], + deny_warnings, + silence_warnings, + ); + } + } + TestStatus::CompileError(file_diagnostic) => { + noirc_errors::reporter::report_all( + file_manager.as_file_map(), + &[file_diagnostic.clone()], + deny_warnings, + silence_warnings, + ); + } + } + + let name_len = test_result.name.len(); + writeln!(writer, "{}", "-".repeat(name_len + "--- stdout ---".len()))?; + } + } + + let failed_tests: Vec<_> = test_results + .iter() + .filter_map(|test_result| test_result.status.failed().then_some(&test_result.name)) + .collect(); + + if !failed_tests.is_empty() { + writeln!(writer)?; + writeln!(writer, "[{}] Failures:", package_name)?; + for failed_test in failed_tests { + writeln!(writer, " {}", failed_test)?; + } + writeln!(writer)?; + } + + write!(writer, "[{}] ", package_name)?; + + let count_all = test_results.len(); + let count_failed = + test_results.iter().filter(|test_result| test_result.status.failed()).count(); + let plural = if count_all == 1 { "" } else { "s" }; + if count_failed == 0 { + writer.set_color(ColorSpec::new().set_fg(Some(Color::Green)))?; + write!(writer, "{count_all} test{plural} passed")?; + writer.reset()?; + writeln!(writer)?; + } else { + let count_passed = count_all - count_failed; + let plural_failed = if count_failed == 1 { "" } else { "s" }; + let plural_passed = if count_passed == 1 { "" } else { "s" }; + + if count_passed != 0 { + writer.set_color(ColorSpec::new().set_fg(Some(Color::Green)))?; + write!(writer, "{count_passed} test{plural_passed} passed, ")?; + } + + writer.set_color(ColorSpec::new().set_fg(Some(Color::Red)))?; + writeln!(writer, "{count_failed} test{plural_failed} failed")?; + writer.reset()?; + } + + Ok(()) + } +} + +pub(super) struct JsonFormatter; + +impl Formatter for JsonFormatter { + fn package_start_async(&self, package_name: &str, test_count: usize) -> std::io::Result<()> { + let json = json!({"type": "suite", "event": "started", "name": package_name, "test_count": test_count}); + println!("{json}"); + Ok(()) + } + + fn package_start_sync(&self, _package_name: &str, _test_count: usize) -> std::io::Result<()> { + Ok(()) + } + + fn test_start_async(&self, name: &str, package_name: &str) -> std::io::Result<()> { + let json = json!({"type": "test", "event": "started", "name": name, "suite": package_name}); + println!("{json}"); + Ok(()) + } + + fn test_end_async( + &self, + test_result: &TestResult, + file_manager: &FileManager, + show_output: bool, + _deny_warnings: bool, + silence_warnings: bool, + ) -> std::io::Result<()> { + let mut json = Map::new(); + json.insert("type".to_string(), json!("test")); + json.insert("name".to_string(), json!(&test_result.name)); + json.insert("exec_time".to_string(), json!(test_result.time_to_run.as_secs_f64())); + + let mut stdout = String::new(); + if show_output && !test_result.output.is_empty() { + stdout.push_str(test_result.output.trim()); + } + + match &test_result.status { + TestStatus::Pass => { + json.insert("event".to_string(), json!("ok")); + } + TestStatus::Fail { message, error_diagnostic } => { + json.insert("event".to_string(), json!("failed")); + + if !stdout.is_empty() { + stdout.push('\n'); + } + stdout.push_str(message.trim()); + + if let Some(diagnostic) = error_diagnostic { + if !(diagnostic.diagnostic.is_warning() && silence_warnings) { + stdout.push('\n'); + stdout.push_str(&diagnostic_to_string(diagnostic, file_manager)); + } + } + } + TestStatus::Skipped => { + json.insert("event".to_string(), json!("ignored")); + } + TestStatus::CompileError(diagnostic) => { + json.insert("event".to_string(), json!("failed")); + + if !(diagnostic.diagnostic.is_warning() && silence_warnings) { + if !stdout.is_empty() { + stdout.push('\n'); + } + stdout.push_str(&diagnostic_to_string(diagnostic, file_manager)); + } + } + } + + if !stdout.is_empty() { + json.insert("stdout".to_string(), json!(stdout)); + } + + let json = json!(json); + println!("{json}"); + + Ok(()) + } + + fn test_end_sync( + &self, + _test_result: &TestResult, + _current_test_count: usize, + _total_test_count: usize, + _file_manager: &FileManager, + _show_output: bool, + _deny_warnings: bool, + _silence_warnings: bool, + ) -> std::io::Result<()> { + Ok(()) + } + + fn package_end( + &self, + _package_name: &str, + test_results: &[TestResult], + _file_manager: &FileManager, + _show_output: bool, + _deny_warnings: bool, + _silence_warnings: bool, + ) -> std::io::Result<()> { + let mut passed = 0; + let mut failed = 0; + let mut ignored = 0; + for test_result in test_results { + match &test_result.status { + TestStatus::Pass => passed += 1, + TestStatus::Fail { .. } | TestStatus::CompileError(..) => failed += 1, + TestStatus::Skipped => ignored += 1, + } + } + let event = if failed == 0 { "ok" } else { "failed" }; + let json = json!({"type": "suite", "event": event, "passed": passed, "failed": failed, "ignored": ignored}); + println!("{json}"); + Ok(()) + } +} + +fn package_start(package_name: &str, test_count: usize) -> std::io::Result<()> { + let plural = if test_count == 1 { "" } else { "s" }; + println!("[{package_name}] Running {test_count} test function{plural}"); + Ok(()) +} + +fn diagnostic_to_string(file_diagnostic: &FileDiagnostic, file_manager: &FileManager) -> String { + let file_map = file_manager.as_file_map(); + + let custom_diagnostic = &file_diagnostic.diagnostic; + let mut message = String::new(); + message.push_str(custom_diagnostic.message.trim()); + + for note in &custom_diagnostic.notes { + message.push('\n'); + message.push_str(note.trim()); + } + + if let Ok(name) = file_map.get_name(file_diagnostic.file_id) { + message.push('\n'); + message.push_str(&format!("at {name}")); + } + + if !custom_diagnostic.call_stack.is_empty() { + message.push('\n'); + message.push_str(&stack_trace(file_map, &custom_diagnostic.call_stack)); + } + + message +} diff --git a/noir/noir-repo/tooling/nargo_cli/src/errors.rs b/noir/noir-repo/tooling/nargo_cli/src/errors.rs index b28012ae7aa..9255d6fc6a6 100644 --- a/noir/noir-repo/tooling/nargo_cli/src/errors.rs +++ b/noir/noir-repo/tooling/nargo_cli/src/errors.rs @@ -2,7 +2,11 @@ use acvm::{acir::native_types::WitnessStackError, FieldElement}; use nargo::{errors::CompileError, NargoError}; use nargo_toml::ManifestError; use noir_debugger::errors::DapError; -use noirc_abi::errors::{AbiError, InputParserError}; +use noirc_abi::{ + errors::{AbiError, InputParserError}, + input_parser::InputValue, + AbiReturnType, +}; use std::path::PathBuf; use thiserror::Error; @@ -32,6 +36,7 @@ pub(crate) enum FilesystemError { pub(crate) enum CliError { #[error("{0}")] Generic(String), + #[error("Error: destination {} already exists", .0.display())] DestinationAlreadyExists(PathBuf), @@ -63,4 +68,10 @@ pub(crate) enum CliError { /// Error from the compilation pipeline #[error(transparent)] CompileError(#[from] CompileError), + + #[error("Unexpected return value: expected {expected:?}; got {actual:?}")] + UnexpectedReturn { expected: InputValue, actual: Option }, + + #[error("Missing return witnesses; expected {expected:?}")] + MissingReturn { expected: AbiReturnType }, } diff --git a/noir/noir-repo/tooling/nargo_cli/tests/stdlib-props.rs b/noir/noir-repo/tooling/nargo_cli/tests/stdlib-props.rs index 86c225831b9..a19408bd5fd 100644 --- a/noir/noir-repo/tooling/nargo_cli/tests/stdlib-props.rs +++ b/noir/noir-repo/tooling/nargo_cli/tests/stdlib-props.rs @@ -2,7 +2,9 @@ use std::{cell::RefCell, collections::BTreeMap, path::Path}; use acvm::{acir::native_types::WitnessStack, AcirField, FieldElement}; use iter_extended::vecmap; -use nargo::{foreign_calls::DefaultForeignCallExecutor, ops::execute_program, parse_all}; +use nargo::{ + foreign_calls::DefaultForeignCallExecutor, ops::execute_program, parse_all, PrintOutput, +}; use noirc_abi::input_parser::InputValue; use noirc_driver::{ compile_main, file_manager_with_stdlib, prepare_crate, CompilationResult, CompileOptions, @@ -80,7 +82,7 @@ fn run_snippet_proptest( let blackbox_solver = bn254_blackbox_solver::Bn254BlackBoxSolver; let foreign_call_executor = - RefCell::new(DefaultForeignCallExecutor::new(false, None, None, None)); + RefCell::new(DefaultForeignCallExecutor::new(PrintOutput::None, None, None, None)); // Generate multiple input/output proptest!(ProptestConfig::with_cases(100), |(io in strategy)| { diff --git a/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs b/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs index 99f0c9a2e7f..29b871814b8 100644 --- a/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs +++ b/noir/noir-repo/tooling/nargo_cli/tests/stdlib-tests.rs @@ -2,6 +2,7 @@ #![allow(clippy::items_after_test_module)] use clap::Parser; use fm::FileManager; +use nargo::PrintOutput; use noirc_driver::{check_crate, file_manager_with_stdlib, CompileOptions}; use noirc_frontend::hir::FunctionNameMatch; use std::io::Write; @@ -86,7 +87,7 @@ fn run_stdlib_tests(force_brillig: bool, inliner_aggressiveness: i64) { &bn254_blackbox_solver::Bn254BlackBoxSolver, &mut context, &test_function, - true, + PrintOutput::Stdout, None, Some(dummy_package.root_dir.clone()), Some(dummy_package.name.to_string()), diff --git a/noir/noir-repo/tooling/nargo_fmt/src/formatter/expression.rs b/noir/noir-repo/tooling/nargo_fmt/src/formatter/expression.rs index 0730d06ad72..ecc9fab18ce 100644 --- a/noir/noir-repo/tooling/nargo_fmt/src/formatter/expression.rs +++ b/noir/noir-repo/tooling/nargo_fmt/src/formatter/expression.rs @@ -104,11 +104,12 @@ impl<'a, 'b> ChunkFormatter<'a, 'b> { formatter.write_left_paren(); formatter.write_right_paren(); })), - Literal::Bool(_) | Literal::Str(_) | Literal::FmtStr(_) | Literal::RawStr(..) => group - .text(self.chunk(|formatter| { + Literal::Bool(_) | Literal::Str(_) | Literal::FmtStr(_, _) | Literal::RawStr(..) => { + group.text(self.chunk(|formatter| { formatter.write_current_token_as_in_source(); formatter.bump(); - })), + })); + } Literal::Integer(..) => group.text(self.chunk(|formatter| { if formatter.is_at(Token::Minus) { formatter.write_token(Token::Minus); diff --git a/noir/noir-repo/tooling/nargo_toml/Cargo.toml b/noir/noir-repo/tooling/nargo_toml/Cargo.toml index e4766e44859..f5f7d7cd595 100644 --- a/noir/noir-repo/tooling/nargo_toml/Cargo.toml +++ b/noir/noir-repo/tooling/nargo_toml/Cargo.toml @@ -25,3 +25,5 @@ noirc_driver.workspace = true semver = "1.0.20" [dev-dependencies] +tempfile.workspace = true +test-case.workspace = true diff --git a/noir/noir-repo/tooling/nargo_toml/src/git.rs b/noir/noir-repo/tooling/nargo_toml/src/git.rs index 80e57247ae6..efaed4fabb9 100644 --- a/noir/noir-repo/tooling/nargo_toml/src/git.rs +++ b/noir/noir-repo/tooling/nargo_toml/src/git.rs @@ -3,16 +3,20 @@ use std::path::PathBuf; /// Creates a unique folder name for a GitHub repo /// by using its URL and tag fn resolve_folder_name(base: &url::Url, tag: &str) -> String { - let mut folder_name = base.domain().unwrap().to_owned(); - folder_name.push_str(base.path()); - folder_name.push_str(tag); - folder_name + let mut folder = PathBuf::from(""); + for part in [base.domain().unwrap(), base.path(), tag] { + folder.push(part.trim_start_matches('/')); + } + folder.to_string_lossy().into_owned() } +/// Path to the `nargo` directory under `$HOME`. fn nargo_crates() -> PathBuf { dirs::home_dir().unwrap().join("nargo") } +/// Target directory to download dependencies into, e.g. +/// `$HOME/nargo/github.com/noir-lang/noir-bignum/v0.1.2` fn git_dep_location(base: &url::Url, tag: &str) -> PathBuf { let folder_name = resolve_folder_name(base, tag); @@ -53,3 +57,19 @@ pub(crate) fn clone_git_repo(url: &str, tag: &str) -> Result { Ok(loc) } + +#[cfg(test)] +mod tests { + use test_case::test_case; + use url::Url; + + use super::resolve_folder_name; + + #[test_case("https://github.com/noir-lang/noir-bignum/"; "with slash")] + #[test_case("https://github.com/noir-lang/noir-bignum"; "without slash")] + fn test_resolve_folder_name(url: &str) { + let tag = "v0.4.2"; + let dir = resolve_folder_name(&Url::parse(url).unwrap(), tag); + assert_eq!(dir, "github.com/noir-lang/noir-bignum/v0.4.2"); + } +} diff --git a/noir/noir-repo/tooling/nargo_toml/src/lib.rs b/noir/noir-repo/tooling/nargo_toml/src/lib.rs index c0d8c7997fd..b5c45977618 100644 --- a/noir/noir-repo/tooling/nargo_toml/src/lib.rs +++ b/noir/noir-repo/tooling/nargo_toml/src/lib.rs @@ -47,6 +47,35 @@ pub fn find_file_manifest(current_path: &Path) -> Option { } /// Returns the [PathBuf] of the directory containing the `Nargo.toml` by searching from `current_path` to the root of its [Path]. +/// When `workspace` is `true` it returns the topmost directory, when `false` the innermost one. +/// +/// Returns a [ManifestError] if no parent directories of `current_path` contain a manifest file. +pub fn find_root(current_path: &Path, workspace: bool) -> Result { + if workspace { + find_package_root(current_path) + } else { + find_file_root(current_path) + } +} + +/// Returns the [PathBuf] of the directory containing the `Nargo.toml` by searching from `current_path` to the root of its [Path], +/// returning at the innermost directory found, i.e. the one corresponding to the package that contains the `current_path`. +/// +/// Returns a [ManifestError] if no parent directories of `current_path` contain a manifest file. +pub fn find_file_root(current_path: &Path) -> Result { + match find_file_manifest(current_path) { + Some(manifest_path) => { + let package_root = manifest_path + .parent() + .expect("infallible: manifest file path can't be root directory"); + Ok(package_root.to_path_buf()) + } + None => Err(ManifestError::MissingFile(current_path.to_path_buf())), + } +} + +/// Returns the [PathBuf] of the directory containing the `Nargo.toml` by searching from `current_path` to the root of its [Path], +/// returning at the topmost directory found, i.e. the one corresponding to the entire workspace. /// /// Returns a [ManifestError] if no parent directories of `current_path` contain a manifest file. pub fn find_package_root(current_path: &Path) -> Result { @@ -60,6 +89,11 @@ pub fn find_package_root(current_path: &Path) -> Result } // TODO(#2323): We are probably going to need a "filepath utils" crate soon +/// Get the root of path, for example: +/// * `C:\foo\bar` -> `C:\foo` +/// * `//shared/foo/bar` -> `//shared/foo` +/// * `/foo` -> `/foo` +/// otherwise empty path. fn path_root(path: &Path) -> PathBuf { let mut components = path.components(); @@ -101,6 +135,7 @@ pub fn find_package_manifest( }) } } + /// Returns the [PathBuf] of the `Nargo.toml` file in the `current_path` directory. /// /// Returns a [ManifestError] if `current_path` does not contain a manifest file. @@ -469,7 +504,7 @@ fn resolve_package_from_toml( result } -#[derive(Debug, PartialEq, Eq)] +#[derive(Debug, PartialEq, Eq, Clone)] pub enum PackageSelection { Selected(CrateName), DefaultOrAll, @@ -490,9 +525,20 @@ pub fn resolve_workspace_from_toml( Ok(workspace) } -#[test] -fn parse_standard_toml() { - let src = r#" +#[cfg(test)] +mod tests { + use std::{ + path::{Path, PathBuf}, + str::FromStr, + }; + + use test_case::test_matrix; + + use crate::{find_root, Config, ManifestError}; + + #[test] + fn parse_standard_toml() { + let src = r#" [package] name = "test" @@ -505,49 +551,49 @@ fn parse_standard_toml() { hello = {path = "./noir_driver"} "#; - assert!(Config::try_from(String::from(src)).is_ok()); - assert!(Config::try_from(src).is_ok()); -} + assert!(Config::try_from(String::from(src)).is_ok()); + assert!(Config::try_from(src).is_ok()); + } -#[test] -fn parse_package_toml_no_deps() { - let src = r#" + #[test] + fn parse_package_toml_no_deps() { + let src = r#" [package] name = "test" authors = ["kev", "foo"] compiler_version = "*" "#; - assert!(Config::try_from(String::from(src)).is_ok()); - assert!(Config::try_from(src).is_ok()); -} + assert!(Config::try_from(String::from(src)).is_ok()); + assert!(Config::try_from(src).is_ok()); + } -#[test] -fn parse_workspace_toml() { - let src = r#" + #[test] + fn parse_workspace_toml() { + let src = r#" [workspace] members = ["a", "b"] "#; - assert!(Config::try_from(String::from(src)).is_ok()); - assert!(Config::try_from(src).is_ok()); -} + assert!(Config::try_from(String::from(src)).is_ok()); + assert!(Config::try_from(src).is_ok()); + } -#[test] -fn parse_workspace_default_member_toml() { - let src = r#" + #[test] + fn parse_workspace_default_member_toml() { + let src = r#" [workspace] members = ["a", "b"] default-member = "a" "#; - assert!(Config::try_from(String::from(src)).is_ok()); - assert!(Config::try_from(src).is_ok()); -} + assert!(Config::try_from(String::from(src)).is_ok()); + assert!(Config::try_from(src).is_ok()); + } -#[test] -fn parse_package_expression_width_toml() { - let src = r#" + #[test] + fn parse_package_expression_width_toml() { + let src = r#" [package] name = "test" version = "0.1.0" @@ -556,6 +602,124 @@ fn parse_package_expression_width_toml() { expression_width = "3" "#; - assert!(Config::try_from(String::from(src)).is_ok()); - assert!(Config::try_from(src).is_ok()); + assert!(Config::try_from(String::from(src)).is_ok()); + assert!(Config::try_from(src).is_ok()); + } + + /// Test that `find_root` handles all kinds of prefixes. + /// (It dispatches based on `workspace` to methods which handle paths differently). + #[test_matrix( + [true, false], + ["C:\\foo\\bar", "//shared/foo/bar", "/foo/bar", "bar/baz", ""] + )] + fn test_find_root_does_not_panic(workspace: bool, path: &str) { + let path = PathBuf::from_str(path).unwrap(); + let error = find_root(&path, workspace).expect_err("non-existing paths"); + assert!(matches!(error, ManifestError::MissingFile(_))); + } + + /// Test to demonstrate how `find_root` works. + #[test] + fn test_find_root_example() { + const INDENT_SIZE: usize = 4; + /// Create directories and files according to a YAML-like layout below + fn setup(layout: &str, root: &Path) { + fn is_dir(item: &str) -> bool { + !item.contains('.') + } + let mut current_dir = root.to_path_buf(); + let mut current_indent = 0; + let mut last_item: Option = None; + + for line in layout.lines() { + if let Some((prefix, item)) = line.split_once('-') { + let item = item.replace(std::path::MAIN_SEPARATOR, "_").trim().to_string(); + + let indent = prefix.len() / INDENT_SIZE; + + if last_item.is_none() { + current_indent = indent; + } + + assert!( + indent <= current_indent + 1, + "cannot increase indent by more than {INDENT_SIZE}; item = {item}, current_dir={}", current_dir.display() + ); + + // Go into the last created directory + if indent > current_indent && last_item.is_some() { + let last_item = last_item.unwrap(); + assert!(is_dir(&last_item), "last item was not a dir: {last_item}"); + current_dir.push(last_item); + current_indent += 1; + } + // Go back into an ancestor directory + while indent < current_indent { + current_dir.pop(); + current_indent -= 1; + } + // Create a file or a directory + let item_path = current_dir.join(&item); + if is_dir(&item) { + std::fs::create_dir(&item_path).unwrap_or_else(|e| { + panic!("failed to create dir {}: {e}", item_path.display()) + }); + } else { + std::fs::write(&item_path, "").expect("failed to create file"); + } + + last_item = Some(item); + } + } + } + + // Temporary directory to hold the project. + let tmp = tempfile::tempdir().unwrap(); + // Join a string path to the tmp dir + let path = |p: &str| tmp.path().join(p); + // Check that an expected root is found + let assert_ok = |current_dir: &str, ws: bool, exp: &str| { + let root = find_root(&path(current_dir), ws).expect("should find a root"); + assert_eq!(root, path(exp)); + }; + // Check that a root is not found + let assert_err = |current_dir: &str| { + find_root(&path(current_dir), true).expect_err("shouldn't find a root"); + }; + + let layout = r" + - project + - docs + - workspace + - packages + - foo + - Nargo.toml + - Prover.toml + - src + - main.nr + - bar + - Nargo.toml + - src + - lib.nr + - Nargo.toml + - examples + - baz + - Nargo.toml + - src + - main.nr + "; + + // Set up the file system. + setup(layout, tmp.path()); + + assert_err("dummy"); + assert_err("project/docs"); + assert_err("project/examples"); + assert_ok("project/workspace", true, "project/workspace"); + assert_ok("project/workspace", false, "project/workspace"); + assert_ok("project/workspace/packages/foo", true, "project/workspace"); + assert_ok("project/workspace/packages/bar", false, "project/workspace/packages/bar"); + assert_ok("project/examples/baz/src", true, "project/examples/baz"); + assert_ok("project/examples/baz/src", false, "project/examples/baz"); + } } diff --git a/noir/noir-repo/tooling/noirc_abi_wasm/build.sh b/noir/noir-repo/tooling/noirc_abi_wasm/build.sh index c07d2d8a4c1..16fb26e55db 100755 --- a/noir/noir-repo/tooling/noirc_abi_wasm/build.sh +++ b/noir/noir-repo/tooling/noirc_abi_wasm/build.sh @@ -25,7 +25,7 @@ function run_if_available { require_command jq require_command cargo require_command wasm-bindgen -#require_command wasm-opt +require_command wasm-opt self_path=$(dirname "$(readlink -f "$0")") pname=$(cargo read-manifest | jq -r '.name') diff --git a/noir/noir-repo/tooling/profiler/src/cli/execution_flamegraph_cmd.rs b/noir/noir-repo/tooling/profiler/src/cli/execution_flamegraph_cmd.rs index 6d6da89f660..76b23ebf739 100644 --- a/noir/noir-repo/tooling/profiler/src/cli/execution_flamegraph_cmd.rs +++ b/noir/noir-repo/tooling/profiler/src/cli/execution_flamegraph_cmd.rs @@ -3,6 +3,7 @@ use std::path::{Path, PathBuf}; use acir::circuit::OpcodeLocation; use clap::Args; use color_eyre::eyre::{self, Context}; +use nargo::PrintOutput; use crate::flamegraph::{BrilligExecutionSample, FlamegraphGenerator, InfernoFlamegraphGenerator}; use crate::fs::{read_inputs_from_file, read_program_from_file}; @@ -54,7 +55,7 @@ fn run_with_generator( &program.bytecode, initial_witness, &Bn254BlackBoxSolver, - &mut DefaultForeignCallExecutor::new(true, None, None, None), + &mut DefaultForeignCallExecutor::new(PrintOutput::Stdout, None, None, None), )?; println!("Executed"); diff --git a/noir/noir-repo/yarn.lock b/noir/noir-repo/yarn.lock index 3c8df2b1772..77962512b08 100644 --- a/noir/noir-repo/yarn.lock +++ b/noir/noir-repo/yarn.lock @@ -221,9 +221,9 @@ __metadata: languageName: node linkType: hard -"@aztec/bb.js@portal:../../../../barretenberg/ts::locator=integration-tests%40workspace%3Acompiler%2Fintegration-tests": - version: 0.0.0-use.local - resolution: "@aztec/bb.js@portal:../../../../barretenberg/ts::locator=integration-tests%40workspace%3Acompiler%2Fintegration-tests" +"@aztec/bb.js@npm:0.66.0": + version: 0.66.0 + resolution: "@aztec/bb.js@npm:0.66.0" dependencies: comlink: ^4.4.1 commander: ^10.0.1 @@ -232,9 +232,10 @@ __metadata: pako: ^2.1.0 tslib: ^2.4.0 bin: - bb.js: ./dest/node/main.js + bb.js: dest/node/main.js + checksum: 7295bf6543afe1c2db795a95c7ed40806de63c77e44bb4dacb2ec6a9171d1d34749150844ab47bc2499d06676e623acb408857b6aa9da702d3c576efadb8c906 languageName: node - linkType: soft + linkType: hard "@babel/code-frame@npm:^7.0.0, @babel/code-frame@npm:^7.10.4, @babel/code-frame@npm:^7.12.11, @babel/code-frame@npm:^7.16.0, @babel/code-frame@npm:^7.22.13, @babel/code-frame@npm:^7.23.5, @babel/code-frame@npm:^7.8.3": version: 7.23.5 @@ -14123,7 +14124,7 @@ __metadata: version: 0.0.0-use.local resolution: "integration-tests@workspace:compiler/integration-tests" dependencies: - "@aztec/bb.js": "portal:../../../../barretenberg/ts" + "@aztec/bb.js": 0.66.0 "@noir-lang/noir_js": "workspace:*" "@noir-lang/noir_wasm": "workspace:*" "@nomicfoundation/hardhat-chai-matchers": ^2.0.0