Skip to content

Commit

Permalink
feat: give up and make blob oracle for simulation
Browse files Browse the repository at this point in the history
  • Loading branch information
MirandaWood committed Nov 27, 2024
1 parent de14c32 commit b75a6f7
Show file tree
Hide file tree
Showing 15 changed files with 190 additions and 84 deletions.
1 change: 1 addition & 0 deletions noir-projects/noir-protocol-circuits/Nargo.template.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ members = [
"crates/rollup-base-public-simulated",
"crates/rollup-block-merge",
"crates/rollup-block-root",
"crates/rollup-block-root-simulated",
"crates/rollup-block-root-empty",
"crates/rollup-root",
]
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
mod blob_public_inputs;
mod blob;
mod mock_blob_oracle;
mod unconstrained_config;
// TODO(#9982): Replace unconstrained_config with config and import ROOTS - calculating ROOTS in unconstrained is insecure.
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
use crate::blob_public_inputs::{BlobCommitment, BlockBlobPublicInputs};
use types::{
abis::sponge_blob::SpongeBlob,
constants::{BLOB_PUBLIC_INPUTS, BLOBS_PER_BLOCK, FIELDS_PER_BLOB},
};
// TODO(BLOB_ORACLE): this was added to save simulation time (~1min in ACVM, ~3mins in wasm -> 500ms).
// The use of bignum adds a lot of unconstrained code which overloads limits when simulating.
// If/when simulation times of unconstrained are improved, remove this.
pub unconstrained fn evaluate_blobs(
blobs_as_fields: [Field; FIELDS_PER_BLOB * BLOBS_PER_BLOCK],
kzg_commitments: [BlobCommitment; BLOBS_PER_BLOCK],
mut sponge_blob: SpongeBlob,
) -> BlockBlobPublicInputs {
let fields = evaluate_blobs_oracle(blobs_as_fields, kzg_commitments, sponge_blob);
BlockBlobPublicInputs::deserialize(fields)
}

#[oracle(evaluateBlobs)]
unconstrained fn evaluate_blobs_oracle(
blobs_as_fields: [Field; FIELDS_PER_BLOB * BLOBS_PER_BLOCK],
kzg_commitments: [BlobCommitment; BLOBS_PER_BLOCK],
mut sponge_blob: SpongeBlob,
) -> [Field; BLOB_PUBLIC_INPUTS * BLOBS_PER_BLOCK] {}
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
[package]
name = "rollup_block_root_simulated"
type = "bin"
authors = [""]
compiler_version = ">=0.18.0"

[dependencies]
rollup_lib = { path = "../rollup-lib" }
types = { path = "../types" }
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
use dep::rollup_lib::block_root::{BlockRootOrBlockMergePublicInputs, BlockRootRollupInputs};

unconstrained fn main(inputs: BlockRootRollupInputs) -> pub BlockRootOrBlockMergePublicInputs {
inputs.block_root_rollup_circuit()
}
Original file line number Diff line number Diff line change
Expand Up @@ -131,11 +131,22 @@ impl BlockRootRollupInputs {
FeeRecipient { recipient: left.constants.global_variables.coinbase, value: total_fees };

let mut blob_public_inputs = [BlockBlobPublicInputs::empty(); AZTEC_MAX_EPOCH_DURATION];
blob_public_inputs[0] = evaluate_blobs(
self.blobs_fields,
self.blob_commitments,
right.end_sponge_blob,
);
if !dep::std::runtime::is_unconstrained() {
blob_public_inputs[0] = evaluate_blobs(
self.blobs_fields,
self.blob_commitments,
right.end_sponge_blob,
);
} else {
// TODO(BLOB_ORACLE): this was added to save simulation time, if/when simulation times of unconstrained are improved, remove this.
blob_public_inputs[0] = unsafe {
blob::mock_blob_oracle::evaluate_blobs(
self.blobs_fields,
self.blob_commitments,
right.end_sponge_blob,
)
};
}

BlockRootOrBlockMergePublicInputs {
previous_archive: left.constants.last_archive, // archive before this block was added
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,9 @@ mod tests {
}

#[test]
unconstrained fn check_blob() {
fn check_blob() {
// Note: this test will not run in unconstrained, because it reaches the foreign call
// TODO(BLOB_ORACLE): remove the oracle and switch this test to unconstrained
let inputs = default_block_root_rollup_inputs();
let outputs = inputs.block_root_rollup_circuit();

Expand Down
34 changes: 9 additions & 25 deletions yarn-project/bb-prover/src/test/test_circuit_prover.ts
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@ import {
import { createDebugLogger } from '@aztec/foundation/log';
import { sleep } from '@aztec/foundation/sleep';
import { Timer } from '@aztec/foundation/timer';
import { fileURLToPath } from '@aztec/foundation/url';
import {
ProtocolCircuitVks,
type ServerProtocolArtifact,
Expand All @@ -47,8 +46,6 @@ import {
convertBaseParityOutputsFromWitnessMap,
convertBlockMergeRollupInputsToWitnessMap,
convertBlockMergeRollupOutputsFromWitnessMap,
convertBlockRootRollupInputsToWitnessMap,
convertBlockRootRollupOutputsFromWitnessMap,
convertEmptyBlockRootRollupInputsToWitnessMap,
convertEmptyBlockRootRollupOutputsFromWitnessMap,
convertMergeRollupInputsToWitnessMap,
Expand All @@ -58,22 +55,18 @@ import {
convertRootParityOutputsFromWitnessMap,
convertRootRollupInputsToWitnessMap,
convertRootRollupOutputsFromWitnessMap,
convertSimulatedBlockRootRollupInputsToWitnessMap,
convertSimulatedBlockRootRollupOutputsFromWitnessMap,
convertSimulatedPrivateBaseRollupInputsToWitnessMap,
convertSimulatedPrivateBaseRollupOutputsFromWitnessMap,
convertSimulatedPrivateKernelEmptyOutputsFromWitnessMap,
convertSimulatedPublicBaseRollupInputsToWitnessMap,
convertSimulatedPublicBaseRollupOutputsFromWitnessMap,
} from '@aztec/noir-protocol-circuits-types';
import {
NativeACVMSimulator,
type SimulationProvider,
WASMSimulator,
emitCircuitSimulationStats,
} from '@aztec/simulator';
import { type SimulationProvider, WASMSimulator, emitCircuitSimulationStats } from '@aztec/simulator';
import { type TelemetryClient, trackSpan } from '@aztec/telemetry-client';

import { type WitnessMap } from '@noir-lang/types';
import path from 'path';

import { ProverInstrumentation } from '../instrumentation.js';
import { mapProtocolArtifactNameToCircuitName } from '../stats.js';
Expand Down Expand Up @@ -222,8 +215,8 @@ export class TestCircuitProver implements ServerCircuitProver {
input,
'BlockRootRollupArtifact',
NESTED_RECURSIVE_PROOF_LENGTH,
convertBlockRootRollupInputsToWitnessMap,
convertBlockRootRollupOutputsFromWitnessMap,
convertSimulatedBlockRootRollupInputsToWitnessMap,
convertSimulatedBlockRootRollupOutputsFromWitnessMap,
);
}

Expand Down Expand Up @@ -321,19 +314,10 @@ export class TestCircuitProver implements ServerCircuitProver {
const circuitName = mapProtocolArtifactNameToCircuitName(artifactName);

let simulationProvider = this.simulationProvider ?? this.wasmSimulator;
// With the blob circuit, we require a long array of constants and lots of unconstrained.
// Unfortunately, this overflows wasm limits, so cannot be simulated via wasm.
// The below forces use of the native simulator just for this circuit:
if (artifactName == 'BlockRootRollupArtifact' && !(simulationProvider instanceof NativeACVMSimulator)) {
simulationProvider = new NativeACVMSimulator(
process.env.TEMP_DIR || `/tmp`,
process.env.ACVM_BINARY_PATH ||
`${path.resolve(
path.dirname(fileURLToPath(import.meta.url)),
'../../../../noir/',
process.env.NOIR_RELEASE_DIRECTORY || 'noir-repo/target/release',
)}/acvm`,
);
if (artifactName == 'BlockRootRollupArtifact') {
// TODO(BLOB_ORACLE): temporarily force block root to use wasm while we simulate
// the blob operations with an oracle. Appears to be no way to provide nativeACVM with a foreign call hander.
simulationProvider = this.wasmSimulator;
}
const witness = await simulationProvider.simulateCircuit(witnessMap, SimulatedServerCircuitArtifacts[artifactName]);

Expand Down
11 changes: 5 additions & 6 deletions yarn-project/circuits.js/src/structs/sponge_blob.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,11 @@ describe('SpongeBlob', () => {
expect(res).toEqual(spongeBlob);
});

// TODO(Miranda): reinstate if we need to/from fields
// it('serializes to field array and deserializes it back', () => {
// const fieldArray = spongeBlob.toFields();
// const res = SpongeBlob.fromFields(fieldArray);
// expect(res).toEqual(spongeBlob);
// });
it('serializes to field array and deserializes it back', () => {
const fieldArray = spongeBlob.toFields();
const res = SpongeBlob.fromFields(fieldArray);
expect(res).toEqual(spongeBlob);
});

it('number of fields matches constant', () => {
const fields = spongeBlob.sponge.cache.concat([
Expand Down
45 changes: 43 additions & 2 deletions yarn-project/circuits.js/src/structs/sponge_blob.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,13 @@
import { makeTuple } from '@aztec/foundation/array';
import { type FieldsOf, makeTuple } from '@aztec/foundation/array';
import { poseidon2Permutation } from '@aztec/foundation/crypto';
import { Fr } from '@aztec/foundation/fields';
import { BufferReader, type Tuple, serializeToBuffer } from '@aztec/foundation/serialize';
import {
BufferReader,
FieldReader,
type Tuple,
serializeToBuffer,
serializeToFields,
} from '@aztec/foundation/serialize';

/**
* A Poseidon2 sponge used to accumulate data that will be added to a blob.
Expand All @@ -26,6 +32,23 @@ export class SpongeBlob {
return serializeToBuffer(this.sponge, this.fields, this.expectedFields);
}

static getFields(fields: FieldsOf<SpongeBlob>) {
return [fields.sponge, fields.fields, fields.expectedFields];
}

toFields(): Fr[] {
return serializeToFields(...SpongeBlob.getFields(this));
}

static fromFields(fields: Fr[] | FieldReader): SpongeBlob {
const reader = FieldReader.asReader(fields);
return new SpongeBlob(
reader.readObject(Poseidon2Sponge),
reader.readField().toNumber(),
reader.readField().toNumber(),
);
}

clone() {
return SpongeBlob.fromBuffer(this.toBuffer());
}
Expand Down Expand Up @@ -81,6 +104,24 @@ export class Poseidon2Sponge {
return serializeToBuffer(this.cache, this.state, this.cacheSize, this.squeezeMode);
}

static getFields(fields: FieldsOf<Poseidon2Sponge>) {
return [fields.cache, fields.state, fields.cacheSize, fields.squeezeMode];
}

toFields(): Fr[] {
return serializeToFields(...Poseidon2Sponge.getFields(this));
}

static fromFields(fields: Fr[] | FieldReader): Poseidon2Sponge {
const reader = FieldReader.asReader(fields);
return new Poseidon2Sponge(
reader.readFieldArray(3),
reader.readFieldArray(4),
reader.readField().toNumber(),
reader.readBoolean(),
);
}

static empty(): Poseidon2Sponge {
return new Poseidon2Sponge(
makeTuple(3, () => Fr.ZERO),
Expand Down
8 changes: 3 additions & 5 deletions yarn-project/end-to-end/src/e2e_epochs.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ describe('e2e_epochs', () => {
let handle: NodeJS.Timeout;

const EPOCH_DURATION = 4;
const L1_BLOCK_TIME = 25;
const L1_BLOCK_TIME = 5;
const L2_SLOT_DURATION_IN_L1_BLOCKS = 2;

beforeAll(async () => {
Expand Down Expand Up @@ -112,7 +112,7 @@ describe('e2e_epochs', () => {

/** Waits until the given L2 block number is mined. */
const waitUntilL2BlockNumber = async (target: number) => {
await retryUntil(() => Promise.resolve(target === l2BlockNumber), `Wait until L2 block ${target}`, 200, 0.1);
await retryUntil(() => Promise.resolve(target === l2BlockNumber), `Wait until L2 block ${target}`, 60, 0.1);
};

it('does not allow submitting proof after epoch end', async () => {
Expand All @@ -130,8 +130,6 @@ describe('e2e_epochs', () => {
sequencerDelayer.pauseNextTxUntilTimestamp(epoch2Start + BigInt(L1_BLOCK_TIME));

// Next sequencer to publish a block should trigger a rollback to block 1
// The below is a bit of a hack - to avoid the waitUntilL1Timestamp timing out, I wait until the reorg back to block 1 is complete
await waitUntilL2BlockNumber(1);
await waitUntilL1Timestamp(l1Client, epoch2Start + BigInt(L1_BLOCK_TIME));
expect(await rollup.getBlockNumber()).toEqual(1n);
expect(await rollup.getSlotNumber()).toEqual(8n);
Expand All @@ -145,5 +143,5 @@ describe('e2e_epochs', () => {
const lastL2BlockTxReceipt = await l1Client.getTransactionReceipt({ hash: lastL2BlockTxHash! });
expect(lastL2BlockTxReceipt.status).toEqual('success');
expect(lastL2BlockTxReceipt.blockNumber).toBeGreaterThan(lastProverTxReceipt!.blockNumber);
}, 400_000);
});
});
3 changes: 2 additions & 1 deletion yarn-project/noir-protocol-circuits-types/src/artifacts.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import PublicBaseRollupSimulatedJson from '../artifacts/rollup_base_public_simul
import BlockMergeRollupJson from '../artifacts/rollup_block_merge.json' assert { type: 'json' };
import BlockRootRollupJson from '../artifacts/rollup_block_root.json' assert { type: 'json' };
import EmptyBlockRootRollupJson from '../artifacts/rollup_block_root_empty.json' assert { type: 'json' };
import BlockRootRollupSimulatedJson from '../artifacts/rollup_block_root_simulated.json' assert { type: 'json' };
import MergeRollupJson from '../artifacts/rollup_merge.json' assert { type: 'json' };
import RootRollupJson from '../artifacts/rollup_root.json' assert { type: 'json' };
import {
Expand Down Expand Up @@ -74,7 +75,7 @@ export const SimulatedServerCircuitArtifacts: Record<ServerProtocolArtifact, Noi
PrivateBaseRollupArtifact: PrivateBaseRollupSimulatedJson as NoirCompiledCircuit,
PublicBaseRollupArtifact: PublicBaseRollupSimulatedJson as NoirCompiledCircuit,
MergeRollupArtifact: MergeRollupJson as NoirCompiledCircuit,
BlockRootRollupArtifact: BlockRootRollupJson as NoirCompiledCircuit,
BlockRootRollupArtifact: BlockRootRollupSimulatedJson as NoirCompiledCircuit,
EmptyBlockRootRollupArtifact: EmptyBlockRootRollupJson as NoirCompiledCircuit,
BlockMergeRollupArtifact: BlockMergeRollupJson as NoirCompiledCircuit,
RootRollupArtifact: RootRollupJson as NoirCompiledCircuit,
Expand Down
Loading

0 comments on commit b75a6f7

Please sign in to comment.