Skip to content

Commit

Permalink
feat: run solidity tests for all acir artifacts (#3161)
Browse files Browse the repository at this point in the history
fixes: #3048

This pr:
- Adds secp256k1 circuit and solidity verifier to the tests
- Runs a solidity verifier test on ALL acir artifacts
   - Generate a proof
   - Generate a vk
   - Generate sol verification key
   - We create anvil on a random port
   - Compile the contract using solcjs to orchestrate compilation
   - Deploy the contract
   - Test the contract with the created prood
 
This pr builds on #3215 which fixes verifier issues ( in a blanket
manner ) not directly addressing the dummy constraints issue that
appears to be causing it.
  • Loading branch information
Maddiaa0 authored Nov 7, 2023
1 parent f3d8aae commit d09f667
Show file tree
Hide file tree
Showing 32 changed files with 789 additions and 122 deletions.
2 changes: 1 addition & 1 deletion .gitmodules
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,4 @@
url = https://github.com/Arachnid/solidity-stringutils
[submodule "barretenberg/sol/lib/openzeppelin-contracts"]
path = barretenberg/sol/lib/openzeppelin-contracts
url = https://github.com/OpenZeppelin/openzeppelin-contracts
url = https://github.com/OpenZeppelin/openzeppelin-contracts
2 changes: 1 addition & 1 deletion barretenberg/acir_tests/Dockerfile.bb
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,4 @@ COPY . .
# Run every acir test through native bb build "prove_and_verify".
RUN FLOW=all_cmds ./run_acir_tests.sh
# Run 1_mul through native bb build, all_cmds flow, to test all cli args.
RUN VERBOSE=1 FLOW=all_cmds ./run_acir_tests.sh 1_mul
RUN VERBOSE=1 FLOW=all_cmds ./run_acir_tests.sh 1_mul
13 changes: 13 additions & 0 deletions barretenberg/acir_tests/Dockerfile.bb.sol
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/barretenberg-x86_64-linux-clang-assert
FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/barretenberg-x86_64-linux-clang-sol

FROM node:18-alpine
RUN apk update && apk add git bash curl jq
COPY --from=0 /usr/src/barretenberg/cpp/build /usr/src/barretenberg/cpp/build
COPY --from=1 /usr/src/barretenberg/sol/src/ultra/BaseUltraVerifier.sol /usr/src/barretenberg/sol/src/ultra/BaseUltraVerifier.sol
COPY --from=ghcr.io/foundry-rs/foundry:latest /usr/local/bin/anvil /usr/local/bin/anvil
WORKDIR /usr/src/barretenberg/acir_tests
COPY . .
# Run every acir test through a solidity verifier".
RUN (cd sol-test && yarn)
RUN PARALLEL=1 FLOW=sol ./run_acir_tests.sh
28 changes: 28 additions & 0 deletions barretenberg/acir_tests/bash_helpers/catch.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
#!/bin/bash

# Handler for SIGCHLD, cleanup if child exit with error
handle_sigchild() {
for pid in "${pids[@]}"; do
# If process is no longer running
if ! kill -0 "$pid" 2>/dev/null; then
# Wait for the process and get exit status
wait "$pid"
status=$?

# If exit status is error
if [ $status -ne 0 ]; then
# Create error file
touch "$error_file"
fi
fi
done
}

check_error_file() {
# If error file exists, exit with error
if [ -f "$error_file" ]; then
rm "$error_file"
echo "Error occurred in one or more child processes. Exiting..."
exit 1
fi
}
23 changes: 23 additions & 0 deletions barretenberg/acir_tests/flows/sol.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
#!/bin/sh
set -eu

export PROOF="$(pwd)/proof"
export PROOF_AS_FIELDS="$(pwd)/proof_fields.json"

# Create a proof, write the solidity contract, write the proof as fields in order to extract the public inputs
$BIN prove -o proof
$BIN write_vk -o vk
$BIN proof_as_fields -k vk -c $CRS_PATH -p $PROOF
$BIN contract -k vk -c $CRS_PATH -b ./target/acir.gz -o Key.sol

# Export the paths to the environment variables for the js test runner
export KEY_PATH="$(pwd)/Key.sol"
export VERIFIER_PATH=$(realpath "../../sol-test/Verifier.sol")
export TEST_PATH=$(realpath "../../sol-test/Test.sol")
export BASE_PATH=$(realpath "../../../sol/src/ultra/BaseUltraVerifier.sol")

# Use solcjs to compile the generated key contract with the template verifier and test contract
# index.js will start an anvil, on a random port
# Deploy the verifier then send a test transaction
export TEST_NAME=$(basename $(pwd))
node ../../sol-test/src/index.js
19 changes: 18 additions & 1 deletion barretenberg/acir_tests/run_acir_tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,12 @@
# VERBOSE: to enable logging for each test.
set -eu

# Catch when running in parallel
error_file="/tmp/error.$$"
pids=()
source ./bash_helpers/catch.sh
trap handle_sigchild SIGCHLD

BIN=${BIN:-../cpp/build/bin/bb}
FLOW=${FLOW:-prove_and_verify}
CRS_PATH=~/.bb-crs
Expand Down Expand Up @@ -43,6 +49,7 @@ function test() {
echo -e "\033[32mPASSED\033[0m ($duration ms)"
else
echo -e "\033[31mFAILED\033[0m"
touch "$error_file"
exit 1
fi

Expand All @@ -68,6 +75,16 @@ else
continue
fi

test $TEST_NAME
# If parallel flag is set, run in parallel
if [ -n "${PARALLEL:-}" ]; then
test $TEST_NAME &
else
test $TEST_NAME
fi
done
fi

wait

# Check for parallel errors
check_error_file
19 changes: 19 additions & 0 deletions barretenberg/acir_tests/sol-test/Test.sol
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
// THIS FILE WILL NOT COMPILE BY ITSELF
// Compilation is handled in `src/index.js` where solcjs gathers the dependencies

pragma solidity >=0.8.4;

import {Verifier} from "./Verifier.sol";

contract Test {
Verifier verifier;

constructor() {
verifier = new Verifier();
}

function test(bytes calldata proof, bytes32[] calldata publicInputs) view public returns(bool) {
return verifier.verify(proof, publicInputs);
}
}

19 changes: 19 additions & 0 deletions barretenberg/acir_tests/sol-test/Verifier.sol
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
// THIS FILE WILL NOT COMPILE BY ITSELF
// Compilation is handled in `src/index.js` where solcjs gathers the dependencies

// SPDX-License-Identifier: Apache-2.0
// Copyright 2022 Aztec
pragma solidity >=0.8.4;

import {UltraVerificationKey} from "./Key.sol";
import {BaseUltraVerifier} from "./BaseUltraVerifier.sol";

contract Verifier is BaseUltraVerifier {
function getVerificationKeyHash() public pure override(BaseUltraVerifier) returns (bytes32) {
return UltraVerificationKey.verificationKeyHash();
}

function loadVerificationKey(uint256 vk, uint256 _omegaInverseLoc) internal pure virtual override(BaseUltraVerifier) {
UltraVerificationKey.loadVerificationKey(vk, _omegaInverseLoc);
}
}
14 changes: 14 additions & 0 deletions barretenberg/acir_tests/sol-test/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
{
"name": "headless-test",
"version": "1.0.0",
"main": "index.js",
"license": "MIT",
"type": "module",
"scripts": {
"start": "node ./src/index.js"
},
"dependencies": {
"ethers": "^6.8.1",
"solc": "^0.8.22"
}
}
191 changes: 191 additions & 0 deletions barretenberg/acir_tests/sol-test/src/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,191 @@
import fs from "fs";
const {readFileSync, promises: fsPromises} = fs;
import {spawn} from "child_process";
import {ethers} from "ethers";
import solc from "solc";

const NUMBER_OF_FIELDS_IN_PROOF = 93;

// We use the solcjs compiler version in this test, although it is slower than foundry, to run the test end to end
// it simplifies of parallelising the test suite

// What does this file do?
//
// 1. Launch an instance of anvil { on a random port, for parallelism }
// 2. Compile the solidity files using solcjs
// 3. Deploy the contract
// 4. Read the previously created proof, and append public inputs
// 5. Run the test against the deployed contract
// 6. Kill the anvil instance

const getEnvVar = (envvar) => {
const varVal = process.env[envvar];
if (!varVal) {
throw new Error(`Missing environment variable ${envvar}`);
}
return varVal;
}

// Test name is passed into environment from `flows/sol.sh`
const testName = getEnvVar("TEST_NAME");

// Get solidity files, passed into environment from `flows/sol.sh`
const keyPath = getEnvVar("KEY_PATH");
const verifierPath = getEnvVar("VERIFIER_PATH");
const testPath = getEnvVar("TEST_PATH");
const basePath = getEnvVar("BASE_PATH");
const encoding = {encoding: "utf8"};
const [key, test, verifier, base] = await Promise.all(
[
fsPromises.readFile(keyPath, encoding),
fsPromises.readFile(testPath, encoding),
fsPromises.readFile(verifierPath, encoding),
fsPromises.readFile(basePath, encoding)
]);

var input = {
language: 'Solidity',
sources: {
'Key.sol': {
content: key
},
'Test.sol': {
content: test
},
'Verifier.sol': {
content: verifier
},
'BaseUltraVerifier.sol': {
content: base
}
},
settings: { // we require the optimiser
optimizer: {
enabled: true,
runs: 200
},
outputSelection: {
'*': {
'*': ['evm.bytecode.object', 'abi']
}
}
}
};

var output = JSON.parse(solc.compile(JSON.stringify(input)));
const contract = output.contracts['Test.sol']['Test'];
const bytecode = contract.evm.bytecode.object;
const abi = contract.abi;

/**
* Launch anvil on the given port,
* Resolves when ready, rejects when port is already allocated
* @param {Number} port
*/
const launchAnvil = async (port) => {
const handle = spawn("anvil", ["-p", port]);

// wait until the anvil instance is ready on port
await new Promise((resolve, reject) => {
// If we get an error reject, which will cause the caller to retry on a new port
handle.stderr.on("data", (data) => {
const str = data.toString();
if (str.includes("error binding")) {
reject("we go again baby")
}
});

// If we get a success resolve, anvil is ready
handle.stdout.on("data", (data) => {
const str = data.toString();
if (str.includes("Listening on")) {
resolve(undefined);
}
});
});

return handle;
}

/**
* Deploys the contract
* @param {ethers.Signer} signer
*/
const deploy = async (signer) => {
const factory = new ethers.ContractFactory(abi, bytecode, signer);
const deployment = await factory.deploy();
const deployed = await deployment.waitForDeployment();
return await deployed.getAddress();
}

/**
* Takes in a proof as fields, and returns the public inputs, as well as the number of public inputs
* @param {Array<String>} proofAsFields
* @return {Array} [number, Array<String>]
*/
const readPublicInputs = (proofAsFields) => {
const publicInputs = [];
// A proof with no public inputs is 93 fields long
const numPublicInputs = proofAsFields.length - NUMBER_OF_FIELDS_IN_PROOF;
for (let i = 0; i < numPublicInputs; i++) {
publicInputs.push(proofAsFields[i]);
}
return [numPublicInputs, publicInputs];
}

/**
* Get Anvil
*
* Creates an anvil instance on a random port, and returns the instance and the port
* If the port is alredy allocated, it will try again
* @returns {[ChildProcess, Number]} [anvil, port]
*/
const getAnvil = async () => {
const port = Math.floor(Math.random() * 10000) + 10000;
try {
return [await launchAnvil(port), port];
} catch (e) {
// Recursive call should try again on a new port in the rare case the port is already taken
// yes this looks dangerous, but it relies on 0-10000 being hard to collide on
return getAnvil();
}
}

const [anvil, randomPort] = await getAnvil();
const killAnvil = () => {
anvil.kill();
console.log(testName, " complete")
}

try {
const proofAsFieldsPath = getEnvVar("PROOF_AS_FIELDS");
const proofAsFields = readFileSync(proofAsFieldsPath);
const [numPublicInputs, publicInputs] = readPublicInputs(JSON.parse(proofAsFields.toString()));

const proofPath = getEnvVar("PROOF");
const proof = readFileSync(proofPath);

// Cut the number of public inputs off of the proof string
const proofStr = `0x${proof.toString("hex").substring(64*numPublicInputs)}`;

const key = "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80";
const provider = new ethers.JsonRpcProvider(`http://localhost:${randomPort}`);
const signer = new ethers.Wallet(key, provider);

// deploy
const address = await deploy(signer);
const contract = new ethers.Contract(address, abi, signer);

const result = await contract.test(proofStr, publicInputs);
if (!result) throw new Error("Test failed");
}
catch (e) {
console.error(testName, " failed")
console.log(e)
throw e;
}
finally {
// Kill anvil at the end of running
killAnvil();
}

Loading

0 comments on commit d09f667

Please sign in to comment.