Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Get logs by tags #9353

Merged
merged 18 commits into from
Oct 24, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 14 additions & 0 deletions yarn-project/archiver/src/archiver/archiver.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import {
type EncryptedL2NoteLog,
type FromLogType,
type GetUnencryptedLogsResponse,
type InboxLeaf,
Expand Down Expand Up @@ -627,6 +628,16 @@ export class Archiver implements ArchiveSource {
return this.store.getLogs(from, limit, logType);
}

/**
* Gets all logs that match any of the received tags (i.e. logs with their first field equal to a tag).
* @param tags - The tags to filter the logs by.
* @returns For each received tag, an array of matching logs is returned. An empty array implies no logs match
* that tag.
*/
getLogsByTags(tags: Fr[]): Promise<EncryptedL2NoteLog[][]> {
return this.store.getLogsByTags(tags);
}

/**
* Gets unencrypted logs based on the provided filter.
* @param filter - The filter to apply to the logs.
Expand Down Expand Up @@ -924,6 +935,9 @@ class ArchiverStoreHelper
): Promise<L2BlockL2Logs<FromLogType<TLogType>>[]> {
return this.store.getLogs(from, limit, logType);
}
getLogsByTags(tags: Fr[]): Promise<EncryptedL2NoteLog[][]> {
return this.store.getLogsByTags(tags);
}
getUnencryptedLogs(filter: LogFilter): Promise<GetUnencryptedLogsResponse> {
return this.store.getUnencryptedLogs(filter);
}
Expand Down
9 changes: 9 additions & 0 deletions yarn-project/archiver/src/archiver/archiver_store.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import {
type EncryptedL2NoteLog,
type FromLogType,
type GetUnencryptedLogsResponse,
type InboxLeaf,
Expand Down Expand Up @@ -136,6 +137,14 @@ export interface ArchiverDataStore {
logType: TLogType,
): Promise<L2BlockL2Logs<FromLogType<TLogType>>[]>;

/**
* Gets all logs that match any of the received tags (i.e. logs with their first field equal to a tag).
* @param tags - The tags to filter the logs by.
* @returns For each received tag, an array of matching logs is returned. An empty array implies no logs match
* that tag.
*/
getLogsByTags(tags: Fr[]): Promise<EncryptedL2NoteLog[][]>;

/**
* Gets unencrypted logs based on the provided filter.
* @param filter - The filter to apply to the logs.
Expand Down
118 changes: 118 additions & 0 deletions yarn-project/archiver/src/archiver/archiver_store_test_suite.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import {
makeExecutablePrivateFunctionWithMembershipProof,
makeUnconstrainedFunctionWithMembershipProof,
} from '@aztec/circuits.js/testing';
import { toBufferBE } from '@aztec/foundation/bigint-buffer';
import { times } from '@aztec/foundation/collection';
import { randomBytes, randomInt } from '@aztec/foundation/crypto';

Expand Down Expand Up @@ -354,6 +355,123 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
});
});

describe('getLogsByTags', () => {
const txsPerBlock = 4;
const numPrivateFunctionCalls = 3;
const numNoteEncryptedLogs = 2;
const numBlocks = 10;
let blocks: L1Published<L2Block>[];
let tags: { [i: number]: { [j: number]: Buffer[] } } = {};

beforeEach(async () => {
blocks = times(numBlocks, (index: number) => ({
data: L2Block.random(index + 1, txsPerBlock, numPrivateFunctionCalls, 2, numNoteEncryptedLogs, 2),
l1: { blockNumber: BigInt(index), blockHash: `0x${index}`, timestamp: BigInt(index) },
}));
// Last block has the note encrypted log tags of the first tx copied from the previous block
blocks[numBlocks - 1].data.body.noteEncryptedLogs.txLogs[0].functionLogs.forEach((fnLogs, fnIndex) => {
fnLogs.logs.forEach((log, logIndex) => {
const previousLogData =
blocks[numBlocks - 2].data.body.noteEncryptedLogs.txLogs[0].functionLogs[fnIndex].logs[logIndex].data;
previousLogData.copy(log.data, 0, 0, 32);
});
});
// Last block has invalid tags in the second tx
const tooBig = toBufferBE(Fr.MODULUS, 32);
blocks[numBlocks - 1].data.body.noteEncryptedLogs.txLogs[1].functionLogs.forEach(fnLogs => {
fnLogs.logs.forEach(log => {
tooBig.copy(log.data, 0, 0, 32);
});
});

await store.addBlocks(blocks);
await store.addLogs(blocks.map(b => b.data));
Thunkar marked this conversation as resolved.
Show resolved Hide resolved

tags = {};
blocks.forEach((b, blockIndex) => {
if (!tags[blockIndex]) {
tags[blockIndex] = {};
}
b.data.body.noteEncryptedLogs.txLogs.forEach((txLogs, txIndex) => {
if (!tags[blockIndex][txIndex]) {
tags[blockIndex][txIndex] = [];
}
tags[blockIndex][txIndex].push(...txLogs.unrollLogs().map(log => log.data.subarray(0, 32)));
});
});
});

it('is possible to batch request all logs of a tx via tags', async () => {
// get random tx from any block that's not the last one
const targetBlockIndex = randomInt(numBlocks - 2);
const targetTxIndex = randomInt(txsPerBlock);

const logsByTags = await store.getLogsByTags(
tags[targetBlockIndex][targetTxIndex].map(buffer => new Fr(buffer)),
);

const expectedResponseSize = numPrivateFunctionCalls * numNoteEncryptedLogs;
expect(logsByTags.length).toEqual(expectedResponseSize);

logsByTags.forEach((logsByTag, logIndex) => {
expect(logsByTag).toHaveLength(1);
const [log] = logsByTag;
expect(log).toEqual(
blocks[targetBlockIndex].data.body.noteEncryptedLogs.txLogs[targetTxIndex].unrollLogs()[logIndex],
);
});
});

it('is possible to batch request all logs of different blocks via tags', async () => {
// get first tx of first block and second tx of second block
const logsByTags = await store.getLogsByTags([...tags[0][0], ...tags[1][1]].map(buffer => new Fr(buffer)));

const expectedResponseSize = 2 * numPrivateFunctionCalls * numNoteEncryptedLogs;
expect(logsByTags.length).toEqual(expectedResponseSize);

logsByTags.forEach(logsByTag => expect(logsByTag).toHaveLength(1));
});

it('is possible to batch request logs that have the same tag but different content', async () => {
// get first tx of last block
const logsByTags = await store.getLogsByTags(tags[numBlocks - 1][0].map(buffer => new Fr(buffer)));

const expectedResponseSize = numPrivateFunctionCalls * numNoteEncryptedLogs;
expect(logsByTags.length).toEqual(expectedResponseSize);

logsByTags.forEach(logsByTag => {
expect(logsByTag).toHaveLength(2);
const [tag0, tag1] = logsByTag.map(log => new Fr(log.data.subarray(0, 32)));
expect(tag0).toEqual(tag1);
});
});

it('is possible to request logs for non-existing tags and determine their position', async () => {
// get random tx from any block that's not the last one
const targetBlockIndex = randomInt(numBlocks - 2);
const targetTxIndex = randomInt(txsPerBlock);

const logsByTags = await store.getLogsByTags([
Fr.random(),
...tags[targetBlockIndex][targetTxIndex].slice(1).map(buffer => new Fr(buffer)),
]);

const expectedResponseSize = numPrivateFunctionCalls * numNoteEncryptedLogs;
expect(logsByTags.length).toEqual(expectedResponseSize);

const [emptyLogsByTag, ...populatedLogsByTags] = logsByTags;
expect(emptyLogsByTag).toHaveLength(0);

populatedLogsByTags.forEach((logsByTag, logIndex) => {
expect(logsByTag).toHaveLength(1);
const [log] = logsByTag;
expect(log).toEqual(
blocks[targetBlockIndex].data.body.noteEncryptedLogs.txLogs[targetTxIndex].unrollLogs()[logIndex + 1],
);
});
});
});

describe('getUnencryptedLogs', () => {
const txsPerBlock = 4;
const numPublicFunctionCalls = 3;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import {
type EncryptedL2NoteLog,
type FromLogType,
type GetUnencryptedLogsResponse,
type InboxLeaf,
Expand Down Expand Up @@ -239,6 +240,20 @@ export class KVArchiverDataStore implements ArchiverDataStore {
}
}

/**
* Gets all logs that match any of the received tags (i.e. logs with their first field equal to a tag).
* @param tags - The tags to filter the logs by.
* @returns For each received tag, an array of matching logs is returned. An empty array implies no logs match
* that tag.
*/
getLogsByTags(tags: Fr[]): Promise<EncryptedL2NoteLog[][]> {
try {
return this.#logStore.getLogsByTags(tags);
} catch (err) {
return Promise.reject(err);
}
}

/**
* Gets unencrypted logs based on the provided filter.
* @param filter - The filter to apply to the logs.
Expand Down
109 changes: 88 additions & 21 deletions yarn-project/archiver/src/archiver/kv_archiver_store/log_store.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import {
EncryptedL2BlockL2Logs,
EncryptedL2NoteLog,
EncryptedNoteL2BlockL2Logs,
ExtendedUnencryptedL2Log,
type FromLogType,
Expand All @@ -12,26 +13,33 @@ import {
UnencryptedL2BlockL2Logs,
type UnencryptedL2Log,
} from '@aztec/circuit-types';
import { Fr } from '@aztec/circuits.js';
import { INITIAL_L2_BLOCK_NUM } from '@aztec/circuits.js/constants';
import { createDebugLogger } from '@aztec/foundation/log';
import { type AztecKVStore, type AztecMap } from '@aztec/kv-store';
import { type AztecKVStore, type AztecMap, type AztecMultiMap } from '@aztec/kv-store';

import { type BlockStore } from './block_store.js';

/**
* A store for logs
*/
export class LogStore {
#noteEncryptedLogs: AztecMap<number, Buffer>;
#encryptedLogs: AztecMap<number, Buffer>;
#unencryptedLogs: AztecMap<number, Buffer>;
#noteEncryptedLogsByBlock: AztecMap<number, Buffer>;
#noteEncryptedLogsByHash: AztecMap<string, Buffer>;
#noteEncryptedLogHashesByTag: AztecMultiMap<string, string>;
#noteEncryptedLogTagsByBlock: AztecMultiMap<number, string>;
#encryptedLogsByBlock: AztecMap<number, Buffer>;
#unencryptedLogsByBlock: AztecMap<number, Buffer>;
#logsMaxPageSize: number;
#log = createDebugLogger('aztec:archiver:log_store');

constructor(private db: AztecKVStore, private blockStore: BlockStore, logsMaxPageSize: number = 1000) {
this.#noteEncryptedLogs = db.openMap('archiver_note_encrypted_logs');
this.#encryptedLogs = db.openMap('archiver_encrypted_logs');
this.#unencryptedLogs = db.openMap('archiver_unencrypted_logs');
this.#noteEncryptedLogsByBlock = db.openMap('archiver_note_encrypted_logs_by_block');
this.#noteEncryptedLogsByHash = db.openMap('archiver_note_encrypted_logs_by_hash');
this.#noteEncryptedLogHashesByTag = db.openMultiMap('archiver_tagged_note_encrypted_log_hashes_by_tag');
this.#noteEncryptedLogTagsByBlock = db.openMultiMap('archiver_note_encrypted_log_tags_by_block');
this.#encryptedLogsByBlock = db.openMap('archiver_encrypted_logs_by_block');
this.#unencryptedLogsByBlock = db.openMap('archiver_unencrypted_logs_by_block');

this.#logsMaxPageSize = logsMaxPageSize;
}
Expand All @@ -44,21 +52,58 @@ export class LogStore {
addLogs(blocks: L2Block[]): Promise<boolean> {
return this.db.transaction(() => {
blocks.forEach(block => {
void this.#noteEncryptedLogs.set(block.number, block.body.noteEncryptedLogs.toBuffer());
void this.#encryptedLogs.set(block.number, block.body.encryptedLogs.toBuffer());
void this.#unencryptedLogs.set(block.number, block.body.unencryptedLogs.toBuffer());
void this.#noteEncryptedLogsByBlock.set(block.number, block.body.noteEncryptedLogs.toBuffer());
block.body.noteEncryptedLogs.txLogs.forEach(txLogs => {
const noteLogs = txLogs.unrollLogs();
noteLogs.forEach(noteLog => {
if (noteLog.data.length < 32) {
this.#log.warn(`Skipping note log with invalid data length: ${noteLog.data.length}`);
return;
}
try {
const tag = new Fr(noteLog.data.subarray(0, 32));
const hexHash = noteLog.hash().toString('hex');
// Ideally we'd store all of the logs for a matching tag in an AztecMultiMap, but this type doesn't doesn't
// handle storing buffers well. The 'ordered-binary' encoding returns an error trying to decode buffers
// ('the number <> cannot be converted to a BigInt because it is not an integer'). We therefore store
// instead the hashes of the logs.
void this.#noteEncryptedLogHashesByTag.set(tag.toString(), hexHash);
void this.#noteEncryptedLogsByHash.set(hexHash, noteLog.toBuffer());
void this.#noteEncryptedLogTagsByBlock.set(block.number, tag.toString());
} catch (err) {
this.#log.warn(`Failed to add tagged note log to store: ${err}`);
}
});
});
void this.#encryptedLogsByBlock.set(block.number, block.body.encryptedLogs.toBuffer());
void this.#unencryptedLogsByBlock.set(block.number, block.body.unencryptedLogs.toBuffer());
});

return true;
});
}

deleteLogs(blocks: L2Block[]): Promise<boolean> {
async deleteLogs(blocks: L2Block[]): Promise<boolean> {
const noteTagsToDelete = await this.db.transaction(() => {
return blocks.flatMap(block => Array.from(this.#noteEncryptedLogTagsByBlock.getValues(block.number)));
});
const noteLogHashesToDelete = await this.db.transaction(() => {
return noteTagsToDelete.flatMap(tag => Array.from(this.#noteEncryptedLogHashesByTag.getValues(tag)));
});
return this.db.transaction(() => {
blocks.forEach(block => {
void this.#noteEncryptedLogs.delete(block.number);
void this.#encryptedLogs.delete(block.number);
void this.#unencryptedLogs.delete(block.number);
void this.#noteEncryptedLogsByBlock.delete(block.number);
void this.#encryptedLogsByBlock.delete(block.number);
void this.#unencryptedLogsByBlock.delete(block.number);
void this.#noteEncryptedLogTagsByBlock.delete(block.number);
});

noteTagsToDelete.forEach(tag => {
void this.#noteEncryptedLogHashesByTag.delete(tag.toString());
});

noteLogHashesToDelete.forEach(hash => {
void this.#noteEncryptedLogsByHash.delete(hash);
});

return true;
Expand All @@ -80,12 +125,12 @@ export class LogStore {
const logMap = (() => {
switch (logType) {
case LogType.ENCRYPTED:
return this.#encryptedLogs;
return this.#encryptedLogsByBlock;
case LogType.NOTEENCRYPTED:
return this.#noteEncryptedLogs;
return this.#noteEncryptedLogsByBlock;
case LogType.UNENCRYPTED:
default:
return this.#unencryptedLogs;
return this.#unencryptedLogsByBlock;
}
})();
const logTypeMap = (() => {
Expand All @@ -105,6 +150,28 @@ export class LogStore {
}
}

/**
* Gets all logs that match any of the received tags (i.e. logs with their first field equal to a tag).
* @param tags - The tags to filter the logs by.
* @returns For each received tag, an array of matching logs is returned. An empty array implies no logs match
* that tag.
*/
getLogsByTags(tags: Fr[]): Promise<EncryptedL2NoteLog[][]> {
return this.db.transaction(() => {
return tags.map(tag => {
const logHashes = Array.from(this.#noteEncryptedLogHashesByTag.getValues(tag.toString()));
return (
logHashes
.map(hash => this.#noteEncryptedLogsByHash.get(hash))
// addLogs should ensure that we never have undefined logs, but we filter them out regardless to protect
// ourselves from database corruption
.filter(noteLogBuffer => noteLogBuffer != undefined)
.map(noteLogBuffer => EncryptedL2NoteLog.fromBuffer(noteLogBuffer!))
);
});
});
}

/**
* Gets unencrypted logs based on the provided filter.
* @param filter - The filter to apply to the logs.
Expand Down Expand Up @@ -154,7 +221,7 @@ export class LogStore {
const logs: ExtendedUnencryptedL2Log[] = [];

let maxLogsHit = false;
loopOverBlocks: for (const [blockNumber, logBuffer] of this.#unencryptedLogs.entries({ start, end })) {
loopOverBlocks: for (const [blockNumber, logBuffer] of this.#unencryptedLogsByBlock.entries({ start, end })) {
const unencryptedLogsInBlock = UnencryptedL2BlockL2Logs.fromBuffer(logBuffer);
for (let txIndex = filter.afterLog?.txIndex ?? 0; txIndex < unencryptedLogsInBlock.txLogs.length; txIndex++) {
const txLogs = unencryptedLogsInBlock.txLogs[txIndex].unrollLogs();
Expand Down Expand Up @@ -199,12 +266,12 @@ export class LogStore {
const logMap = (() => {
switch (logType) {
case LogType.ENCRYPTED:
return this.#encryptedLogs;
return this.#encryptedLogsByBlock;
case LogType.NOTEENCRYPTED:
return this.#noteEncryptedLogs;
return this.#noteEncryptedLogsByBlock;
case LogType.UNENCRYPTED:
default:
return this.#unencryptedLogs;
return this.#unencryptedLogsByBlock;
}
})();
const logTypeMap = (() => {
Expand Down
Loading
Loading