diff --git a/yarn-project/archiver/src/index.ts b/yarn-project/archiver/src/index.ts index 523565aea4e4..37b0f6da3b69 100644 --- a/yarn-project/archiver/src/index.ts +++ b/yarn-project/archiver/src/index.ts @@ -13,8 +13,9 @@ export * from './archiver/index.js'; export * from './rpc/index.js'; export * from './factory.js'; -// We are not storing the info from these events in the archiver for now (and we don't really need to), so we expose this query directly -export { retrieveL2ProofVerifiedEvents } from './archiver/data_retrieval.js'; +export { retrieveL2ProofVerifiedEvents, retrieveBlockMetadataFromRollup } from './archiver/data_retrieval.js'; + +export { getL2BlockProposedLogs } from './archiver/eth_log_handlers.js'; const log = createDebugLogger('aztec:archiver'); diff --git a/yarn-project/cli/package.json b/yarn-project/cli/package.json index 7c9959d2b1e9..c0f4af797327 100644 --- a/yarn-project/cli/package.json +++ b/yarn-project/cli/package.json @@ -72,6 +72,7 @@ "@iarna/toml": "^2.2.5", "@libp2p/peer-id-factory": "^3.0.4", "commander": "^12.1.0", + "lodash.chunk": "^4.2.0", "lodash.groupby": "^4.6.0", "semver": "^7.5.4", "solc": "^0.8.26", @@ -86,6 +87,7 @@ "@aztec/protocol-contracts": "workspace:^", "@jest/globals": "^29.5.0", "@types/jest": "^29.5.0", + "@types/lodash.chunk": "^4.2.9", "@types/lodash.groupby": "^4.6.9", "@types/lodash.startcase": "^4.4.7", "@types/node": "^18.7.23", diff --git a/yarn-project/cli/src/cmds/l1/index.ts b/yarn-project/cli/src/cmds/l1/index.ts index f3ca73f36831..80a490fa1b04 100644 --- a/yarn-project/cli/src/cmds/l1/index.ts +++ b/yarn-project/cli/src/cmds/l1/index.ts @@ -198,8 +198,10 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL ETHEREUM_HOST, ) .addOption(l1ChainIdOption) - .option('--start-block ', 'The block number to start from', parseBigint, 1n) + .option('--start-block ', 'The L1 block number to start from', parseBigint, 1n) + .option('--end-block ', 'The last L1 block number to query', parseBigint) .option('--batch-size ', 'The number of blocks to query in each batch', parseBigint, 100n) + .option('--proving-timeout ', 'Cutoff for proving time to consider a block', parseBigint) .option('--l1-rollup-address ', 'Address of the rollup contract (required if node URL is not set)') .option( '--node-url ', @@ -207,8 +209,18 @@ export function injectCommands(program: Command, log: LogFn, debugLogger: DebugL ) .action(async options => { const { proverStats } = await import('./prover_stats.js'); - const { l1RpcUrl, chainId, l1RollupAddress, startBlock, batchSize, nodeUrl } = options; - await proverStats({ l1RpcUrl, chainId, l1RollupAddress, startBlock, batchSize, nodeUrl, log }); + const { l1RpcUrl, chainId, l1RollupAddress, startBlock, endBlock, batchSize, nodeUrl, provingTimeout } = options; + await proverStats({ + l1RpcUrl, + chainId, + l1RollupAddress, + startBlock, + endBlock, + batchSize, + nodeUrl, + provingTimeout, + log, + }); }); return program; diff --git a/yarn-project/cli/src/cmds/l1/prover_stats.ts b/yarn-project/cli/src/cmds/l1/prover_stats.ts index 4cda8a13a455..0ca48cc1d01d 100644 --- a/yarn-project/cli/src/cmds/l1/prover_stats.ts +++ b/yarn-project/cli/src/cmds/l1/prover_stats.ts @@ -1,11 +1,13 @@ -import { retrieveL2ProofVerifiedEvents } from '@aztec/archiver'; +import { getL2BlockProposedLogs, retrieveL2ProofVerifiedEvents } from '@aztec/archiver'; import { createAztecNodeClient } from '@aztec/circuit-types'; import { EthAddress } from '@aztec/circuits.js'; import { createEthereumChain } from '@aztec/ethereum'; -import { type LogFn, createDebugLogger } from '@aztec/foundation/log'; +import { unique } from '@aztec/foundation/collection'; +import { type LogFn, type Logger, createDebugLogger } from '@aztec/foundation/log'; +import chunk from 'lodash.chunk'; import groupBy from 'lodash.groupby'; -import { createPublicClient, http } from 'viem'; +import { type PublicClient, createPublicClient, http } from 'viem'; export async function proverStats(opts: { l1RpcUrl: string; @@ -14,13 +16,16 @@ export async function proverStats(opts: { nodeUrl: string | undefined; log: LogFn; startBlock: bigint; + endBlock: bigint | undefined; batchSize: bigint; + provingTimeout: bigint | undefined; }) { const debugLog = createDebugLogger('aztec:cli:prover_stats'); - const { startBlock, chainId, l1RpcUrl, l1RollupAddress, batchSize, nodeUrl, log } = opts; + const { startBlock, chainId, l1RpcUrl, l1RollupAddress, batchSize, nodeUrl, provingTimeout, endBlock, log } = opts; if (!l1RollupAddress && !nodeUrl) { throw new Error('Either L1 rollup address or node URL must be set'); } + const rollup = l1RollupAddress ? EthAddress.fromString(l1RollupAddress) : await createAztecNodeClient(nodeUrl!) @@ -29,22 +34,106 @@ export async function proverStats(opts: { const chain = createEthereumChain(l1RpcUrl, chainId).chainInfo; const publicClient = createPublicClient({ chain, transport: http(l1RpcUrl) }); - const lastBlockNum = await publicClient.getBlockNumber(); + const lastBlockNum = endBlock ?? (await publicClient.getBlockNumber()); debugLog.verbose(`Querying events on rollup at ${rollup.toString()} from ${startBlock} up to ${lastBlockNum}`); + // Get all events for L2 proof submissions + const events = await getL2ProofVerifiedEvents(startBlock, lastBlockNum, batchSize, debugLog, publicClient, rollup); + + // If we don't have a proving timeout, we can just count the number of unique blocks per prover + if (!provingTimeout) { + const stats = groupBy(events, 'proverId'); + log(`prover_id, total_blocks_proven`); + for (const proverId in stats) { + const uniqueBlocks = new Set(stats[proverId].map(e => e.l2BlockNumber)); + log(`${proverId}, ${uniqueBlocks.size}`); + } + return; + } + + // But if we do, fetch the events for each block submitted, so we can look up their timestamp + const blockEvents = await getL2BlockEvents(startBlock, lastBlockNum, batchSize, debugLog, publicClient, rollup); + + // Get the timestamps for every block on every log, both for proof and block submissions + const l1BlockNumbers = unique([...events.map(e => e.l1BlockNumber), ...blockEvents.map(e => e.blockNumber)]); + const l1BlockTimestamps: Record = {}; + for (const l1Batch of chunk(l1BlockNumbers, Number(batchSize))) { + const blocks = await Promise.all( + l1Batch.map(blockNumber => publicClient.getBlock({ includeTransactions: false, blockNumber })), + ); + debugLog.verbose(`Queried ${blocks.length} L1 blocks between ${l1Batch[0]} and ${l1Batch[l1Batch.length - 1]}`); + for (const block of blocks) { + l1BlockTimestamps[block.number.toString()] = block.timestamp; + } + } + + // Map from l2 block number to the l1 block in which it was submitted + const l2BlockSubmissions: Record = {}; + for (const blockEvent of blockEvents) { + l2BlockSubmissions[blockEvent.args.blockNumber.toString()] = blockEvent.blockNumber; + } + + // Now calculate stats + const stats = groupBy(events, 'proverId'); + log(`prover_id, blocks_proven_within_timeout, total_blocks_proven, avg_proving_time`); + for (const proverId in stats) { + const blocks = stats[proverId].map(e => { + const provenTimestamp = l1BlockTimestamps[e.l1BlockNumber.toString()]; + const uploadedBlockNumber = l2BlockSubmissions[e.l2BlockNumber.toString()]; + const uploadedTimestamp = l1BlockTimestamps[uploadedBlockNumber.toString()]; + const provingTime = provenTimestamp - uploadedTimestamp; + debugLog.debug( + `prover=${e.proverId} blockNumber=${e.l2BlockNumber} uploaded=${uploadedTimestamp} proven=${provenTimestamp} time=${provingTime}`, + ); + return { provenTimestamp, uploadedTimestamp, provingTime, ...e }; + }); + + const withinTimeout = blocks.filter(b => b.provingTime <= provingTimeout); + const uniqueBlocksWithinTimeout = new Set(withinTimeout.map(e => e.l2BlockNumber)); + const uniqueBlocks = new Set(stats[proverId].map(e => e.l2BlockNumber)); + const avgProvingTime = Math.ceil(Number(blocks.reduce((acc, b) => acc + b.provingTime, 0n)) / blocks.length); + + log(`${proverId}, ${uniqueBlocksWithinTimeout.size}, ${uniqueBlocks.size}, ${avgProvingTime}`); + } + return; +} + +async function getL2ProofVerifiedEvents( + startBlock: bigint, + lastBlockNum: bigint, + batchSize: bigint, + debugLog: Logger, + publicClient: PublicClient, + rollup: EthAddress, +) { let blockNum = startBlock; const events = []; while (blockNum <= lastBlockNum) { const end = blockNum + batchSize > lastBlockNum + 1n ? lastBlockNum + 1n : blockNum + batchSize; - debugLog.verbose(`Querying events from block ${blockNum} to ${end}`); const newEvents = await retrieveL2ProofVerifiedEvents(publicClient, rollup, blockNum, end); events.push(...newEvents); - debugLog.verbose(`Got ${newEvents.length} events`); + debugLog.verbose(`Got ${newEvents.length} events querying l2 proof verified from block ${blockNum} to ${end}`); blockNum += batchSize; } + return events; +} - const stats = groupBy(events, 'proverId'); - for (const proverId in stats) { - log(`${proverId}, ${stats[proverId].length}`); +async function getL2BlockEvents( + startBlock: bigint, + lastBlockNum: bigint, + batchSize: bigint, + debugLog: Logger, + publicClient: PublicClient, + rollup: EthAddress, +) { + let blockNum = startBlock; + const events = []; + while (blockNum <= lastBlockNum) { + const end = blockNum + batchSize > lastBlockNum + 1n ? lastBlockNum + 1n : blockNum + batchSize; + const newEvents = await getL2BlockProposedLogs(publicClient, rollup, blockNum, end); + events.push(...newEvents); + debugLog.verbose(`Got ${newEvents.length} events querying l2 block submitted from block ${blockNum} to ${end}`); + blockNum += batchSize; } + return events; } diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 56cea10eb581..3c960d96f536 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -463,6 +463,7 @@ __metadata: "@jest/globals": ^29.5.0 "@libp2p/peer-id-factory": ^3.0.4 "@types/jest": ^29.5.0 + "@types/lodash.chunk": ^4.2.9 "@types/lodash.groupby": ^4.6.9 "@types/lodash.startcase": ^4.4.7 "@types/node": ^18.7.23 @@ -471,6 +472,7 @@ __metadata: commander: ^12.1.0 jest: ^29.5.0 jest-mock-extended: ^3.0.5 + lodash.chunk: ^4.2.0 lodash.groupby: ^4.6.0 semver: ^7.5.4 solc: ^0.8.26