From 339398efba364e89ed7fc6cc317f4d046e1341fe Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Tue, 23 Jul 2024 14:42:25 +0700 Subject: [PATCH 1/5] feat: implement HashComputationLevel using LinkedList --- .../src/hashComputation.ts | 213 ++++++++++++++++++ .../src/hasher/as-sha256.ts | 9 +- .../src/hasher/hashtree.ts | 25 +- .../src/hasher/index.ts | 4 +- .../src/hasher/types.ts | 4 +- packages/persistent-merkle-tree/src/index.ts | 1 + packages/persistent-merkle-tree/src/node.ts | 53 +---- .../persistent-merkle-tree/src/subtree.ts | 45 ++-- packages/persistent-merkle-tree/src/tree.ts | 57 ++--- .../test/perf/hashComputation.test.ts | 41 ++++ .../test/perf/hasher.test.ts | 5 +- .../test/unit/batchHash.test.ts | 48 ---- .../test/unit/hashComputation.test.ts | 83 +++++++ .../test/unit/hasher.test.ts | 5 +- .../test/unit/node.test.ts | 7 +- .../test/unit/subtree.test.ts | 73 ++++-- .../test/unit/tree.test.ts | 36 ++- 17 files changed, 490 insertions(+), 219 deletions(-) create mode 100644 packages/persistent-merkle-tree/src/hashComputation.ts create mode 100644 packages/persistent-merkle-tree/test/perf/hashComputation.test.ts delete mode 100644 packages/persistent-merkle-tree/test/unit/batchHash.test.ts create mode 100644 packages/persistent-merkle-tree/test/unit/hashComputation.test.ts diff --git a/packages/persistent-merkle-tree/src/hashComputation.ts b/packages/persistent-merkle-tree/src/hashComputation.ts new file mode 100644 index 00000000..d7f48a25 --- /dev/null +++ b/packages/persistent-merkle-tree/src/hashComputation.ts @@ -0,0 +1,213 @@ +import type {Node} from "./node"; + +/** + * HashComputation to be later used to compute hash of nodes from bottom up. + * This is also an item of a linked list. + * ╔═════════════════════╗ ╔══════════════════════╗ + * ║ dest ║ ║ next_dest ║ + * ║ / \ ║ ========> ║ / \ ║ + * ║ src0 src1 ║ ║ next_src0 next_src1║ + * ╚═════════════════════╝ ╚══════════════════════╝ + */ +export type HashComputation = { + src0: Node; + src1: Node; + dest: Node; + next: HashComputation | null; +}; + +/** + * Model HashComputation[] at the same level that support reusing the same memory. + * Before every run, reset() should be called. + * After every run, clean() should be called. + */ +export class HashComputationLevel { + private _length: number; + private _totalLength: number; + // use LinkedList to avoid memory allocation when the list grows + // always have a fixed head although length is 0 + private head: HashComputation; + private tail: HashComputation | null; + private pointer: HashComputation | null; + + constructor() { + this._length = 0; + this._totalLength = 0; + this.head = { + src0: null as unknown as Node, + src1: null as unknown as Node, + dest: null as unknown as Node, + next: null, + }; + this.tail = null; + this.pointer = null; + } + + get length(): number { + return this._length; + } + + get totalLength(): number { + return this._totalLength; + } + + /** + * run before every run + */ + reset(): void { + // keep this.head + this.tail = null; + this._length = 0; + // totalLength is not reset + this.pointer = null; + } + + /** + * Append a new HashComputation to tail. + * This will overwrite the existing HashComputation if it is not null, or grow the list if needed. + */ + push(src0: Node, src1: Node, dest: Node): void { + if (this.tail !== null) { + let newTail = this.tail.next; + if (newTail !== null) { + newTail.src0 = src0; + newTail.src1 = src1; + newTail.dest = dest; + } else { + // grow the list + newTail = {src0, src1, dest, next: null}; + this.tail.next = newTail; + this._totalLength++; + } + this.tail = newTail; + this._length++; + return; + } + + // first item + this.head.src0 = src0; + this.head.src1 = src1; + this.head.dest = dest; + this.tail = this.head; + this._length = 1; + if (this._totalLength === 0) { + this._totalLength = 1; + } + // else _totalLength > 0, do not set + } + + /** + * run after every run + * hashComps may still refer to the old Nodes, we should release them to avoid memory leak. + */ + clean(): void { + let hc = this.tail?.next ?? null; + while (hc !== null) { + if (hc.src0 === null) { + // we may have already cleaned it in the previous run, return early + break; + } + hc.src0 = null as unknown as Node; + hc.src1 = null as unknown as Node; + hc.dest = null as unknown as Node; + hc = hc.next; + } + } + + /** + * Implement Iterator for this class + */ + next(): IteratorResult { + if (!this.pointer || this.tail === null) { + return {done: true, value: undefined}; + } + + // never yield value beyond the tail + const value = this.pointer; + const isNull = value.src0 === null; + this.pointer = this.pointer.next; + + return isNull ? {done: true, value: undefined} : {done: false, value}; + } + + /** + * This is convenient method to consume HashComputationLevel with for-of loop + * See "next" method above for the actual implementation + */ + [Symbol.iterator](): IterableIterator { + this.pointer = this.head; + return this; + } + + /** + * Not great due to memory allocation. + * Mainly used for testing. + */ + toArray(): HashComputation[] { + const hashComps: HashComputation[] = []; + for (const hc of this) { + hashComps.push(hc); + } + return hashComps; + } + + /** + * For testing. + */ + dump(): HashComputation[] { + const hashComps: HashComputation[] = []; + let hc: HashComputation | null = null; + for (hc = this.head; hc !== null; hc = hc.next) { + hashComps.push(hc); + } + return hashComps; + } +} + +/** + * Model HashComputationLevel[] at different levels. + */ +export class HashComputationGroup { + readonly byLevel: HashComputationLevel[]; + constructor() { + this.byLevel = []; + } + + reset(): void { + for (const level of this.byLevel) { + level.reset(); + } + } + + clean(): void { + for (const level of this.byLevel) { + level.clean(); + } + } +} + +/** + * Get HashComputations from a root node all the way to the leaf nodes. + */ +export function getHashComputations(node: Node, offset: number, hcByLevel: HashComputationLevel[]): void { + if (node.h0 === null) { + const hashComputations = levelAtIndex(hcByLevel, offset); + const {left, right} = node; + hashComputations.push(left, right, node); + // leaf nodes should have h0 to stop the recursion + getHashComputations(left, offset + 1, hcByLevel); + getHashComputations(right, offset + 1, hcByLevel); + } + + // else stop the recursion, node is hashed +} + +/** + * Utility to get HashComputationLevel at a specific index. + */ +export function levelAtIndex(hcByLevel: HashComputationLevel[], index: number): HashComputationLevel { + if (hcByLevel[index] === undefined) { + hcByLevel[index] = new HashComputationLevel(); + } + return hcByLevel[index]; +} diff --git a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts index d8228251..943f9b49 100644 --- a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts +++ b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts @@ -6,7 +6,8 @@ import { hashInto, } from "@chainsafe/as-sha256"; import type {Hasher} from "./types"; -import {HashComputation, Node} from "../node"; +import {Node} from "../node"; +import type {HashComputationLevel} from "../hashComputation"; import {doDigestNLevel, doMerkleizeInto} from "./util"; export const hasher: Hasher = { @@ -19,7 +20,7 @@ export const hasher: Hasher = { digestNLevel(data: Uint8Array, nLevel: number): Uint8Array { return doDigestNLevel(data, nLevel, hashInto); }, - executeHashComputations: (hashComputations: HashComputation[][]) => { + executeHashComputations: (hashComputations: HashComputationLevel[]) => { for (let level = hashComputations.length - 1; level >= 0; level--) { const hcArr = hashComputations[level]; if (!hcArr) { @@ -46,7 +47,8 @@ export const hasher: Hasher = { let src1_3: Node | null = null; let dest3: Node | null = null; - for (const [i, hc] of hcArr.entries()) { + let i = 0; + for (const hc of hcArr) { const indexInBatch = i % 4; switch (indexInBatch) { @@ -121,6 +123,7 @@ export const hasher: Hasher = { default: throw Error(`Unexpected indexInBatch ${indexInBatch}`); } + i++; } // remaining diff --git a/packages/persistent-merkle-tree/src/hasher/hashtree.ts b/packages/persistent-merkle-tree/src/hasher/hashtree.ts index 26de5609..f578d50c 100644 --- a/packages/persistent-merkle-tree/src/hasher/hashtree.ts +++ b/packages/persistent-merkle-tree/src/hasher/hashtree.ts @@ -1,6 +1,7 @@ import {hashInto} from "@chainsafe/hashtree"; import {Hasher, HashObject} from "./types"; -import {HashComputation, Node} from "../node"; +import {Node} from "../node"; +import type {HashComputationLevel} from "../hashComputation"; import {byteArrayIntoHashObject} from "@chainsafe/as-sha256/lib/hashObject"; import {doDigestNLevel, doMerkleizeInto} from "./util"; @@ -20,6 +21,8 @@ const uint8Output = new Uint8Array(PARALLEL_FACTOR * 32); // convenient reusable Uint8Array for hash64 const hash64Input = uint8Input.subarray(0, 64); const hash64Output = uint8Output.subarray(0, 32); +// size input array to 2 HashObject per computation * 32 bytes per object +const destNodes: Node[] = new Array(PARALLEL_FACTOR); export const hasher: Hasher = { name: "hashtree", @@ -43,7 +46,7 @@ export const hasher: Hasher = { digestNLevel(data: Uint8Array, nLevel: number): Uint8Array { return doDigestNLevel(data, nLevel, hashInto); }, - executeHashComputations(hashComputations: HashComputation[][]): void { + executeHashComputations(hashComputations: HashComputationLevel[]): void { for (let level = hashComputations.length - 1; level >= 0; level--) { const hcArr = hashComputations[level]; if (!hcArr) { @@ -56,25 +59,25 @@ export const hasher: Hasher = { continue; } - // size input array to 2 HashObject per computation * 32 bytes per object - // const input: Uint8Array = Uint8Array.from(new Array(hcArr.length * 2 * 32)); - let destNodes: Node[] = []; - // hash every 16 inputs at once to avoid memory allocation - for (const [i, {src0, src1, dest}] of hcArr.entries()) { + let i = 0; + for (const {src0, src1, dest} of hcArr) { + if (!src0 || !src1 || !dest) { + throw new Error(`Invalid HashComputation at index ${i}`); + } const indexInBatch = i % PARALLEL_FACTOR; const offset = indexInBatch * 16; hashObjectToUint32Array(src0, uint32Input, offset); hashObjectToUint32Array(src1, uint32Input, offset + 8); - destNodes.push(dest); + destNodes[indexInBatch] = dest; if (indexInBatch === PARALLEL_FACTOR - 1) { hashInto(uint8Input, uint8Output); for (const [j, destNode] of destNodes.entries()) { byteArrayIntoHashObject(uint8Output, j * 32, destNode); } - destNodes = []; } + i++; } const remaining = hcArr.length % PARALLEL_FACTOR; @@ -84,8 +87,8 @@ export const hasher: Hasher = { const remainingOutput = uint8Output.subarray(0, remaining * 32); hashInto(remainingInput, remainingOutput); // destNodes was prepared above - for (const [i, destNode] of destNodes.entries()) { - byteArrayIntoHashObject(remainingOutput, i * 32, destNode); + for (let j = 0; j < remaining; j++) { + byteArrayIntoHashObject(remainingOutput, j * 32, destNodes[j]); } } } diff --git a/packages/persistent-merkle-tree/src/hasher/index.ts b/packages/persistent-merkle-tree/src/hasher/index.ts index dfa1d7e6..75442232 100644 --- a/packages/persistent-merkle-tree/src/hasher/index.ts +++ b/packages/persistent-merkle-tree/src/hasher/index.ts @@ -1,6 +1,6 @@ import {Hasher} from "./types"; import {hasher as nobleHasher} from "./noble"; -import type {HashComputation} from "../node"; +import type {HashComputationLevel} from "../hashComputation"; export * from "./types"; export * from "./util"; @@ -31,6 +31,6 @@ export function merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Arr hasher.merkleizeInto(data, padFor, output, offset); } -export function executeHashComputations(hashComputations: HashComputation[][]): void { +export function executeHashComputations(hashComputations: HashComputationLevel[]): void { hasher.executeHashComputations(hashComputations); } diff --git a/packages/persistent-merkle-tree/src/hasher/types.ts b/packages/persistent-merkle-tree/src/hasher/types.ts index ad72cfcb..9f5813f0 100644 --- a/packages/persistent-merkle-tree/src/hasher/types.ts +++ b/packages/persistent-merkle-tree/src/hasher/types.ts @@ -1,5 +1,5 @@ import type {HashObject} from "@chainsafe/as-sha256/lib/hashObject"; -import type {HashComputation} from "../node"; +import type {HashComputationLevel} from "../hashComputation"; export type {HashObject}; @@ -29,5 +29,5 @@ export type Hasher = { /** * Execute a batch of HashComputations */ - executeHashComputations(hashComputations: HashComputation[][]): void; + executeHashComputations(hashComputations: HashComputationLevel[]): void; }; diff --git a/packages/persistent-merkle-tree/src/index.ts b/packages/persistent-merkle-tree/src/index.ts index 5ff9c0b8..5311ca5a 100644 --- a/packages/persistent-merkle-tree/src/index.ts +++ b/packages/persistent-merkle-tree/src/index.ts @@ -1,6 +1,7 @@ export * from "./gindex"; export * from "./hasher"; export * from "./node"; +export * from "./hashComputation"; export * from "./packedNode"; export * from "./proof"; export * from "./subtree"; diff --git a/packages/persistent-merkle-tree/src/node.ts b/packages/persistent-merkle-tree/src/node.ts index 5d2b5bbe..5f80c667 100644 --- a/packages/persistent-merkle-tree/src/node.ts +++ b/packages/persistent-merkle-tree/src/node.ts @@ -1,21 +1,8 @@ import {HashObject} from "@chainsafe/as-sha256/lib/hashObject"; -import {executeHashComputations, hashObjectToUint8Array, hasher, uint8ArrayToHashObject} from "./hasher"; +import {hashObjectToUint8Array, hasher, uint8ArrayToHashObject} from "./hasher"; const TWO_POWER_32 = 2 ** 32; -export type HashComputation = { - src0: Node; - src1: Node; - dest: Node; -}; - -export type HashComputationGroup = { - // global array - byLevel: HashComputation[][]; - // offset from top - offset: number; -}; - /** * An immutable binary merkle tree node */ @@ -83,15 +70,6 @@ export class BranchNode extends Node { } } - batchHash(): Uint8Array { - executeHashComputations(this.hashComputations); - - if (this.h0 === null) { - throw Error("Root is not computed by batch"); - } - return this.root; - } - get rootHashObject(): HashObject { if (this.h0 === null) { hasher.digest64HashObjects(this.left.rootHashObject, this.right.rootHashObject, this); @@ -114,12 +92,6 @@ export class BranchNode extends Node { get right(): Node { return this._right; } - - get hashComputations(): HashComputation[][] { - const hashComputations: HashComputation[][] = []; - getHashComputations(this, 0, hashComputations); - return hashComputations; - } } /** @@ -398,26 +370,3 @@ export function bitwiseOrNodeH(node: Node, hIndex: number, value: number): void else if (hIndex === 7) node.h7 |= value; else throw Error("hIndex > 7"); } - -/** - * Get HashComputations from a root node all the way to the leaf nodes. - */ -export function getHashComputations(node: Node, offset: number, hashCompsByLevel: HashComputation[][]): void { - if (node.h0 === null) { - const hashComputations = arrayAtIndex(hashCompsByLevel, offset); - const {left, right} = node; - hashComputations.push({src0: left, src1: right, dest: node}); - // leaf nodes should have h0 to stop the recursion - getHashComputations(left, offset + 1, hashCompsByLevel); - getHashComputations(right, offset + 1, hashCompsByLevel); - } - - // else stop the recursion, node is hashed -} - -export function arrayAtIndex(twoDArray: T[][], index: number): T[] { - if (twoDArray[index] === undefined) { - twoDArray[index] = []; - } - return twoDArray[index]; -} diff --git a/packages/persistent-merkle-tree/src/subtree.ts b/packages/persistent-merkle-tree/src/subtree.ts index eea37f9f..44dc7987 100644 --- a/packages/persistent-merkle-tree/src/subtree.ts +++ b/packages/persistent-merkle-tree/src/subtree.ts @@ -1,4 +1,5 @@ -import {BranchNode, HashComputationGroup, Node, arrayAtIndex, getHashComputations} from "./node"; +import {BranchNode, Node} from "./node"; +import {getHashComputations, levelAtIndex, HashComputationLevel} from "./hashComputation"; import {zeroNode} from "./zeroNode"; export function subtreeFillToDepth(bottom: Node, depth: number): Node { @@ -38,12 +39,13 @@ export function subtreeFillToLength(bottom: Node, depth: number, length: number) /** * WARNING: Mutates the provided nodes array. * TODO: Don't mutate the nodes array. - * hashComps is an output parameter that will be filled with the hash computations if exists. + * hcByLevel is an output parameter that will be filled with the hash computations if exists. */ export function subtreeFillToContents( nodes: Node[], depth: number, - hashComps: HashComputationGroup | null = null + hcOffset = 0, + hcByLevel: HashComputationLevel[] | null = null ): Node { const maxLength = 2 ** depth; if (nodes.length > maxLength) { @@ -56,8 +58,8 @@ export function subtreeFillToContents( if (depth === 0) { const node = nodes[0]; - if (hashComps !== null) { - getHashComputations(node, hashComps.offset, hashComps.byLevel); + if (hcByLevel !== null) { + getHashComputations(node, hcOffset, hcByLevel); } return node; } @@ -69,15 +71,10 @@ export function subtreeFillToContents( const rightNode = nodes.length > 1 ? nodes[1] : zeroNode(0); const rootNode = new BranchNode(leftNode, rightNode); - if (hashComps !== null) { - const offset = hashComps.offset; - getHashComputations(leftNode, offset + 1, hashComps.byLevel); - getHashComputations(rightNode, offset + 1, hashComps.byLevel); - arrayAtIndex(hashComps.byLevel, offset).push({ - src0: leftNode, - src1: rightNode, - dest: rootNode, - }); + if (hcByLevel !== null) { + getHashComputations(leftNode, hcOffset + 1, hcByLevel); + getHashComputations(rightNode, hcOffset + 1, hcByLevel); + levelAtIndex(hcByLevel, hcOffset).push(leftNode, rightNode, rootNode); } return rootNode; @@ -88,7 +85,7 @@ export function subtreeFillToContents( for (let d = depth; d > 0; d--) { const countRemainder = count % 2; const countEven = count - countRemainder; - const offset = hashComps ? hashComps.offset + d - 1 : null; + const offset = hcByLevel ? hcOffset + d - 1 : null; // For each depth level compute the new BranchNodes and overwrite the nodes array for (let i = 0; i < countEven; i += 2) { @@ -96,16 +93,12 @@ export function subtreeFillToContents( const right = nodes[i + 1]; const node = new BranchNode(left, right); nodes[i / 2] = node; - if (offset !== null && hashComps !== null) { - arrayAtIndex(hashComps.byLevel, offset).push({ - src0: left, - src1: right, - dest: node, - }); + if (offset !== null && hcByLevel !== null) { + levelAtIndex(hcByLevel, offset).push(left, right, node); if (d === depth) { // bottom up strategy so we don't need to go down the tree except for the last level - getHashComputations(left, offset + 1, hashComps.byLevel); - getHashComputations(right, offset + 1, hashComps.byLevel); + getHashComputations(left, offset + 1, hcByLevel); + getHashComputations(right, offset + 1, hcByLevel); } } } @@ -115,13 +108,13 @@ export function subtreeFillToContents( const right = zeroNode(depth - d); const node = new BranchNode(left, right); nodes[countEven / 2] = node; - if (offset !== null && hashComps !== null) { + if (offset !== null && hcByLevel !== null) { if (d === depth) { // only go down on the last level - getHashComputations(left, offset + 1, hashComps.byLevel); + getHashComputations(left, offset + 1, hcByLevel); } // no need to getHashComputations for zero node - arrayAtIndex(hashComps.byLevel, offset).push({src0: left, src1: right, dest: node}); + levelAtIndex(hcByLevel, offset).push(left, right, node); } } diff --git a/packages/persistent-merkle-tree/src/tree.ts b/packages/persistent-merkle-tree/src/tree.ts index 1ade6129..33fbe57a 100644 --- a/packages/persistent-merkle-tree/src/tree.ts +++ b/packages/persistent-merkle-tree/src/tree.ts @@ -1,6 +1,7 @@ import {zeroNode} from "./zeroNode"; import {Gindex, GindexBitstring, convertGindexToBitstring} from "./gindex"; -import {Node, LeafNode, BranchNode, HashComputationGroup, arrayAtIndex} from "./node"; +import {Node, LeafNode, BranchNode} from "./node"; +import {HashComputationLevel, levelAtIndex} from "./hashComputation"; import {createNodeFromProof, createProof, Proof, ProofInput} from "./proof"; import {createSingleProof} from "./proof/single"; @@ -73,13 +74,6 @@ export class Tree { return this.rootNode.root; } - batchHash(): Uint8Array { - if (!this.rootNode.isLeaf()) { - return (this.rootNode as BranchNode).batchHash(); - } - return this.root; - } - /** * Return a copy of the tree */ @@ -341,14 +335,15 @@ export function setNodeAtDepth(rootNode: Node, nodesDepth: number, index: number * gindex and navigate upwards creating or caching nodes as necessary. Loop and repeat. * * Supports index up to `Number.MAX_SAFE_INTEGER`. - * @param hashComps a map of HashComputation[] by level (could be from 0 to `nodesDepth - 1`) + * @param hcByLevel an array of HashComputation[] by level (could be from 0 to `nodesDepth - 1`) */ export function setNodesAtDepth( rootNode: Node, nodesDepth: number, indexes: number[], nodes: Node[], - hashComps: HashComputationGroup | null = null + hcOffset = 0, + hcByLevel: HashComputationLevel[] | null = null ): Node { // depth depthi gindexes indexes // 0 1 1 0 @@ -367,8 +362,6 @@ export function setNodesAtDepth( if (nodesDepth === 0) { return nodes.length > 0 ? nodes[0] : rootNode; } - const hashCompsByLevel = hashComps?.byLevel ?? null; - const offset = hashComps?.offset ?? 0; /** * Contiguous filled stack of parent nodes. It get filled in the first descent @@ -435,33 +428,25 @@ export function setNodesAtDepth( // Next node is the very next to the right of current node if (index + 1 === indexes[i + 1]) { node = new BranchNode(nodes[i], nodes[i + 1]); - if (hashCompsByLevel != null) { + if (hcByLevel != null) { // go with level of dest node (level 0 goes with root node) // in this case dest node is nodesDept - 2, same for below - arrayAtIndex(hashCompsByLevel, nodesDepth - 1 + offset).push({ - src0: nodes[i], - src1: nodes[i + 1], - dest: node, - }); + levelAtIndex(hcByLevel, nodesDepth - 1 + hcOffset).push(nodes[i], nodes[i + 1], node); } // Move pointer one extra forward since node has consumed two nodes i++; } else { const oldNode = node; node = new BranchNode(nodes[i], oldNode.right); - if (hashCompsByLevel != null) { - arrayAtIndex(hashCompsByLevel, nodesDepth - 1 + offset).push({ - src0: nodes[i], - src1: oldNode.right, - dest: node, - }); + if (hcByLevel != null) { + levelAtIndex(hcByLevel, nodesDepth - 1 + hcOffset).push(nodes[i], oldNode.right, node); } } } else { const oldNode = node; node = new BranchNode(oldNode.left, nodes[i]); - if (hashCompsByLevel != null) { - arrayAtIndex(hashCompsByLevel, nodesDepth - 1 + offset).push({src0: oldNode.left, src1: nodes[i], dest: node}); + if (hcByLevel != null) { + levelAtIndex(hcByLevel, nodesDepth - 1 + hcOffset).push(oldNode.left, nodes[i], node); } } @@ -503,12 +488,8 @@ export function setNodesAtDepth( // Also, if still has to move upwards, rebind since the node won't be visited anymore const oldNode = node; node = new BranchNode(oldNode, parentNodeStack[d].right); - if (hashCompsByLevel != null) { - arrayAtIndex(hashCompsByLevel, depth + offset).push({ - src0: oldNode, - src1: parentNodeStack[d].right, - dest: node, - }); + if (hcByLevel != null) { + levelAtIndex(hcByLevel, depth + hcOffset).push(oldNode, parentNodeStack[d].right, node); } } else { // Only store the left node if it's at d = diffDepth @@ -521,19 +502,15 @@ export function setNodesAtDepth( if (leftNode !== undefined) { const oldNode = node; node = new BranchNode(leftNode, oldNode); - if (hashCompsByLevel != null) { - arrayAtIndex(hashCompsByLevel, depth + offset).push({src0: leftNode, src1: oldNode, dest: node}); + if (hcByLevel != null) { + levelAtIndex(hcByLevel, depth + hcOffset).push(leftNode, oldNode, node); } leftParentNodeStack[d] = undefined; } else { const oldNode = node; node = new BranchNode(parentNodeStack[d].left, oldNode); - if (hashCompsByLevel != null) { - arrayAtIndex(hashCompsByLevel, depth + offset).push({ - src0: parentNodeStack[d].left, - src1: oldNode, - dest: node, - }); + if (hcByLevel != null) { + levelAtIndex(hcByLevel, depth + hcOffset).push(parentNodeStack[d].left, oldNode, node); } } } diff --git a/packages/persistent-merkle-tree/test/perf/hashComputation.test.ts b/packages/persistent-merkle-tree/test/perf/hashComputation.test.ts new file mode 100644 index 00000000..38df8b62 --- /dev/null +++ b/packages/persistent-merkle-tree/test/perf/hashComputation.test.ts @@ -0,0 +1,41 @@ +import {itBench} from "@dapplion/benchmark"; +import { HashComputation, HashComputationLevel, LeafNode, zeroHash } from "../../src"; + +describe("HashComputationLevel", function () { + const src = LeafNode.fromRoot(zeroHash(0)); + const dest = LeafNode.fromRoot(zeroHash(1)); + const hashComp: HashComputation = {src0: src, src1: src, dest, next: null}; + + const length = 1_000_000; + + itBench({ + id: "HashComputationLevel.push then loop", + before: () => new HashComputationLevel(), + beforeEach: (level) => { + level.reset(); + return level; + }, + fn: (level: HashComputationLevel) => { + for (let i = 0; i < length; i++) { + level.push(src, src, dest); + } + level.clean(); + for (const hc of level) { + const {src0, src1, dest} = hc; + } + } + }); + + itBench({ + id: "HashComputation[] push then loop", + fn: () => { + const level: HashComputation[] = []; + for (let i = 0; i < length; i++) { + level.push(hashComp); + } + for (const hc of level) { + const {src0, src1, dest} = hc; + } + } + }) +}); diff --git a/packages/persistent-merkle-tree/test/perf/hasher.test.ts b/packages/persistent-merkle-tree/test/perf/hasher.test.ts index d71a0948..bb8c3ecf 100644 --- a/packages/persistent-merkle-tree/test/perf/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/perf/hasher.test.ts @@ -4,6 +4,7 @@ import {hasher as asShaHasher} from "../../src/hasher/as-sha256"; import {hasher as nobleHasher} from "../../src/hasher/noble"; import {hasher as hashtreeHasher} from "../../src/hasher/hashtree"; import {buildComparisonTrees} from "../utils/tree"; +import { HashComputationLevel, getHashComputations } from "../../src"; describe("hasher", function () { this.timeout(0); @@ -55,7 +56,9 @@ describe("hasher", function () { return tree; }, fn: (tree) => { - hasher.executeHashComputations(tree.hashComputations); + const hcByLevel: HashComputationLevel[] = []; + getHashComputations(tree, 0, hcByLevel); + hasher.executeHashComputations(hcByLevel); }, }); }); diff --git a/packages/persistent-merkle-tree/test/unit/batchHash.test.ts b/packages/persistent-merkle-tree/test/unit/batchHash.test.ts deleted file mode 100644 index cb863dc6..00000000 --- a/packages/persistent-merkle-tree/test/unit/batchHash.test.ts +++ /dev/null @@ -1,48 +0,0 @@ -import {expect} from "chai"; -import {countToDepth} from "../../src/gindex"; -import {BranchNode, LeafNode, Node} from "../../src/node"; -import {subtreeFillToContents} from "../../src/subtree"; -import {zeroNode} from "../../src/zeroNode"; - -describe("batchHash", function () { - const numNodes = [200, 201, 202, 203]; - // const numNodes = [32, 33, 64]; - for (const numNode of numNodes) { - it(`${numNode} nodes`, () => { - const rootNode = createList(numNode); - const root1 = rootNode.batchHash(); - const rootNode2 = createList(numNode); - const root2 = rootNode2.root; - expect(root2).to.be.deep.equal(root1); - - const depth = countToDepth(BigInt(numNode)) + 1; - resetNodes(rootNode, depth); - resetNodes(rootNode2, depth); - expect(rootNode.batchHash()).to.be.deep.equal(rootNode2.batchHash()); - }); - } -}); - -function resetNodes(node: Node, depth: number): void { - if (node.isLeaf()) return; - // do not reset zeroNode - if (node === zeroNode(depth)) return; - // this is to ask Node to calculate node again - node.h0 = null as unknown as number; - // in the old version, we should do - // node._root = null; - resetNodes(node.left, depth - 1); - resetNodes(node.right, depth - 1); -} - -function newLeafNodeFilled(i: number): LeafNode { - return LeafNode.fromRoot(new Uint8Array(Array.from({length: 32}, () => i % 256))); -} - -function createList(numNode: number): BranchNode { - const nodes = Array.from({length: numNode}, (_, i) => newLeafNodeFilled(i)); - // add 1 to countToDepth for mix_in_length spec - const depth = countToDepth(BigInt(numNode)) + 1; - const node = subtreeFillToContents(nodes, depth); - return node as BranchNode; -} diff --git a/packages/persistent-merkle-tree/test/unit/hashComputation.test.ts b/packages/persistent-merkle-tree/test/unit/hashComputation.test.ts new file mode 100644 index 00000000..da2a5150 --- /dev/null +++ b/packages/persistent-merkle-tree/test/unit/hashComputation.test.ts @@ -0,0 +1,83 @@ +import { expect } from "chai"; +import { zeroNode, Node } from "../../src"; +import {HashComputationLevel} from "../../src/hashComputation"; + +describe("HashComputationLevel", () => { + let hashComputationLevel: HashComputationLevel; + + beforeEach(() => { + hashComputationLevel = new HashComputationLevel(); + hashComputationLevel.push(zeroNode(0), zeroNode(0), zeroNode(1)); + }); + + it("should reset", () => { + hashComputationLevel.reset(); + expect(hashComputationLevel.length).to.be.equal(0); + expect(hashComputationLevel.totalLength).to.be.equal(1); + expect(toArray(hashComputationLevel)).to.be.deep.equal([]); + }); + + it("should push", () => { + hashComputationLevel.push(zeroNode(0), zeroNode(0), zeroNode(1)); + expect(hashComputationLevel.length).to.be.equal(2); + expect(hashComputationLevel.totalLength).to.be.equal(2); + const arr = toArray(hashComputationLevel); + expect(arr.length).to.be.equal(2); + expect(arr).to.be.deep.equal([ + {src0: zeroNode(0), src1: zeroNode(0), dest: zeroNode(1)}, + {src0: zeroNode(0), src1: zeroNode(0), dest: zeroNode(1)} + ]); + }); + + it("reset then push full", () => { + hashComputationLevel.push(zeroNode(0), zeroNode(0), zeroNode(1)); + hashComputationLevel.reset(); + hashComputationLevel.push(zeroNode(0), zeroNode(0), zeroNode(1)); + hashComputationLevel.push(zeroNode(0), zeroNode(0), zeroNode(1)); + hashComputationLevel.clean(); + expect(hashComputationLevel.length).to.be.equal(2); + expect(hashComputationLevel.totalLength).to.be.equal(2); + const arr = toArray(hashComputationLevel); + expect(arr).to.be.deep.equal([ + {src0: zeroNode(0), src1: zeroNode(0), dest: zeroNode(1)}, + {src0: zeroNode(0), src1: zeroNode(0), dest: zeroNode(1)} + ]); + }); + + it("reset then push partial", () => { + hashComputationLevel.push(zeroNode(0), zeroNode(0), zeroNode(1)); + // totalLength = 2 now + hashComputationLevel.reset(); + hashComputationLevel.push(zeroNode(0), zeroNode(0), zeroNode(1)); + hashComputationLevel.clean(); + expect(hashComputationLevel.length).to.be.equal(1); + expect(hashComputationLevel.totalLength).to.be.equal(2); + const arr = toArray(hashComputationLevel); + expect(arr).to.be.deep.equal([ + {src0: zeroNode(0), src1: zeroNode(0), dest: zeroNode(1)}, + ]); + }); + + it("clean", () => { + hashComputationLevel.push(zeroNode(0), zeroNode(0), zeroNode(1)); + hashComputationLevel.reset(); + hashComputationLevel.push(zeroNode(0), zeroNode(0), zeroNode(1)); + hashComputationLevel.clean(); + expect(hashComputationLevel.length).to.be.equal(1); + expect(hashComputationLevel.totalLength).to.be.equal(2); + const arr = toArray(hashComputationLevel); + expect(arr).to.be.deep.equal([ + {src0: zeroNode(0), src1: zeroNode(0), dest: zeroNode(1)}, + ]); + const all = hashComputationLevel.dump(); + const last = all[all.length - 1]; + expect(last.src0).to.be.null; + expect(last.src1).to.be.null; + expect(last.dest).to.be.null; + }); + +}); + +function toArray(hc: HashComputationLevel): {src0: Node; src1: Node; dest: Node}[] { + return hc.toArray().map(({src0, src1, dest}) => ({src0, src1, dest})); +} diff --git a/packages/persistent-merkle-tree/test/unit/hasher.test.ts b/packages/persistent-merkle-tree/test/unit/hasher.test.ts index 46f454b0..ee129fd0 100644 --- a/packages/persistent-merkle-tree/test/unit/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/unit/hasher.test.ts @@ -5,7 +5,7 @@ import {hasher as asSha256Hasher} from "../../src/hasher/as-sha256"; import {hasher as hashtreeHasher} from "../../src/hasher/hashtree"; import {linspace} from "../utils/misc"; import {buildComparisonTrees} from "../utils/tree"; -import {HashObject, LeafNode, subtreeFillToContents} from "../../src"; +import {HashComputationLevel, HashObject, LeafNode, getHashComputations, subtreeFillToContents} from "../../src"; import { expect } from "chai"; import { zeroHash } from "../../src/zeroHash"; @@ -61,7 +61,8 @@ describe("hashers", function () { for (const hasher of hashers) { it(hasher.name, () => { const [tree1, tree2] = buildComparisonTrees(8); - const hashComputations = tree2.hashComputations; + const hashComputations: HashComputationLevel[] = []; + getHashComputations(tree1, 0, hashComputations); hasher.executeHashComputations(hashComputations); expectEqualHex(tree1.root, tree2.root); }); diff --git a/packages/persistent-merkle-tree/test/unit/node.test.ts b/packages/persistent-merkle-tree/test/unit/node.test.ts index bbe8d7be..1fbcb54e 100644 --- a/packages/persistent-merkle-tree/test/unit/node.test.ts +++ b/packages/persistent-merkle-tree/test/unit/node.test.ts @@ -1,6 +1,7 @@ import {HashObject} from "@chainsafe/as-sha256"; import {expect} from "chai"; -import {BranchNode, HashComputation, LeafNode, countToDepth, getHashComputations, subtreeFillToContents} from "../../src"; +import {BranchNode, LeafNode, countToDepth, subtreeFillToContents} from "../../src"; +import {HashComputation, HashComputationLevel, getHashComputations} from "../../src/hashComputation"; describe("LeafNode uint", () => { const testCasesNode: { @@ -222,7 +223,7 @@ describe("getHashComputations", () => { const nodes = Array.from({length: numNode}, (_, i) => newLeafNodeFilled(i)); const depth = countToDepth(BigInt(numNode)); const rootNode = subtreeFillToContents(nodes, depth); - const hashComputations: HashComputation[][] = []; + const hashComputations: HashComputationLevel[] = []; getHashComputations(rootNode, 0, hashComputations); expect(hashComputations.length).to.equal(expectedLengths.length); for (let i = 0; i < hashComputations.length; i++) { @@ -252,7 +253,7 @@ describe("getHashComputations", () => { for (const {numNode, expectedLengths} of testCases2) { it(`list with ${numNode} nodes`, () => { const rootNode = createList(numNode); - const hashComputations: HashComputation[][] = []; + const hashComputations: HashComputationLevel[] = []; getHashComputations(rootNode, 0, hashComputations); expect(hashComputations.length).to.equal(expectedLengths.length); for (let i = 0; i < hashComputations.length; i++) { diff --git a/packages/persistent-merkle-tree/test/unit/subtree.test.ts b/packages/persistent-merkle-tree/test/unit/subtree.test.ts index 880838d7..d46f7ad6 100644 --- a/packages/persistent-merkle-tree/test/unit/subtree.test.ts +++ b/packages/persistent-merkle-tree/test/unit/subtree.test.ts @@ -1,5 +1,5 @@ import { expect } from "chai"; -import {subtreeFillToContents, LeafNode, getNodesAtDepth, executeHashComputations, BranchNode, Node} from "../../src"; +import {subtreeFillToContents, LeafNode, getNodesAtDepth, executeHashComputations, BranchNode, Node, HashComputationLevel, zeroNode} from "../../src"; describe("subtreeFillToContents", function () { // the hash computation takes time @@ -39,12 +39,8 @@ describe("subtreeFillToContents", function () { expectedNodes[i] = node; } - const hashComps = { - offset: 0, - byLevel: [], - }; - - const node = subtreeFillToContents(nodes, depth, hashComps); + const hcByLevel: HashComputationLevel[] = []; + const node = subtreeFillToContents(nodes, depth, 0, hcByLevel); const retrievedNodes = getNodesAtDepth(node, depth, 0, count); // Assert correct @@ -53,7 +49,7 @@ describe("subtreeFillToContents", function () { throw Error(`Wrong node at index ${i}`); } } - executeHashComputations(hashComps.byLevel); + executeHashComputations(hcByLevel); if (node.h0 === null) { throw Error("Root node h0 is null"); } @@ -62,7 +58,46 @@ describe("subtreeFillToContents", function () { } }); -describe("subtreeFillToContents - validator nodes", function () { +describe("subtreeFillToContents with hcByLevel", function () { + + it("depth = 0", () => { + // return zeroNode, no hash computations + const nodes = [LeafNode.fromZero()]; + const hcByLevel: HashComputationLevel[] = []; + subtreeFillToContents(nodes, 0, 0, hcByLevel); + expect(hcByLevel.length).to.equal(0); + }); + + it("depth = 1, bottom nodes are leaf nodes", () => { + // return BranchNode, hash computations + const nodes = [LeafNode.fromZero(), LeafNode.fromZero()]; + const hcByLevel: HashComputationLevel[] = []; + const node = subtreeFillToContents(nodes, 1, 0, hcByLevel); + expect(hcByLevel.length).to.equal(1); + expect(hcByLevel[0].length).to.equal(1); + executeHashComputations(hcByLevel); + if (node.h0 === null) { + throw Error("Root node h0 is null"); + } + expect(node.root).to.deep.equal(zeroNode(1).root); + }); + + it("depth = 1, bottom nodes are branch nodes", () => { + const node0 = new BranchNode(LeafNode.fromZero(), LeafNode.fromZero()); + const node1 = new BranchNode(LeafNode.fromZero(), LeafNode.fromZero()); + const nodes = [node0, node1]; + const hcByLevel: HashComputationLevel[] = []; + const node = subtreeFillToContents(nodes, 1, 0, hcByLevel); + expect(hcByLevel.length).to.equal(2); + expect(hcByLevel[0].length).to.equal(1); + expect(hcByLevel[1].length).to.equal(2); + executeHashComputations(hcByLevel); + if (node.h0 === null) { + throw Error("Root node h0 is null"); + } + expect(node.root).to.deep.equal(zeroNode(2).root); + }); + /** * 0 root * / \ @@ -90,20 +125,18 @@ describe("subtreeFillToContents - validator nodes", function () { // maxChunksToDepth in ssz returns 3 for 8 nodes const depth = 3; const root0 = subtreeFillToContents(nodesArr[0], depth); - const hashComps = { - offset: 0, - byLevel: new Array<[]>(), - }; - const node = subtreeFillToContents(nodesArr[1], depth, hashComps); - expect(hashComps.byLevel.length).to.equal(4); - expect(hashComps.byLevel[0].length).to.equal(1); - expect(hashComps.byLevel[1].length).to.equal(2); - expect(hashComps.byLevel[2].length).to.equal(4); - expect(hashComps.byLevel[3].length).to.equal(1); - executeHashComputations(hashComps.byLevel); + const hcByLevel: HashComputationLevel[] = []; + const node = subtreeFillToContents(nodesArr[1], depth, 0, hcByLevel); + expect(hcByLevel.length).to.equal(4); + expect(hcByLevel[0].length).to.equal(1); + expect(hcByLevel[1].length).to.equal(2); + expect(hcByLevel[2].length).to.equal(4); + expect(hcByLevel[3].length).to.equal(1); + executeHashComputations(hcByLevel); if (node.h0 === null) { throw Error("Root node h0 is null"); } + // node.root is computed in batch, root0.root is computed in a single call expect(node.root).to.deep.equal(root0.root); }); }); diff --git a/packages/persistent-merkle-tree/test/unit/tree.test.ts b/packages/persistent-merkle-tree/test/unit/tree.test.ts index a885466d..092ee122 100644 --- a/packages/persistent-merkle-tree/test/unit/tree.test.ts +++ b/packages/persistent-merkle-tree/test/unit/tree.test.ts @@ -13,6 +13,8 @@ import { BranchNode, HashComputation, getHashComputations, + HashComputationLevel, + executeHashComputations, } from "../../src"; describe("fixed-depth tree iteration", () => { @@ -59,7 +61,8 @@ describe("batchHash() vs root getter", () => { const depth = Math.ceil(Math.log2(length)); const tree = new Tree(subtreeFillToContents([...leaves], depth)); const tree2 = new Tree(subtreeFillToContents([...leaves], depth)); - expect(tree.batchHash()).to.be.deep.equal(tree2.root); + batchHash(tree.rootNode); + expect(tree.root).to.be.deep.equal(tree2.root); }); } }); @@ -124,7 +127,8 @@ describe("Tree.setNode vs Tree.setHashObjectFn", () => { tree2.setNodeWithFn(BigInt(18), getNewNodeFn); tree2.setNodeWithFn(BigInt(46), getNewNodeFn); tree2.setNodeWithFn(BigInt(60), getNewNodeFn); - expect(toHex((tree2.rootNode as BranchNode).batchHash())).to.equal("02607e58782c912e2f96f4ff9daf494d0d115e7c37e8c2b7ddce17213591151b"); + batchHash(tree2.rootNode); + expect(toHex(tree2.root)).to.equal("02607e58782c912e2f96f4ff9daf494d0d115e7c37e8c2b7ddce17213591151b"); }); it("Should throw for gindex 0", () => { @@ -164,7 +168,7 @@ describe("Tree batch setNodes", () => { const treeOk = new Tree(zeroNode(depth)); // cache all roots treeOk.root; - const hashComputationsOk: Array = Array.from({length: depth}, () => []); + const hashComputationsOk: Array = Array.from({length: depth}, () => new HashComputationLevel()); const tree = new Tree(zeroNode(depth)); tree.root; const gindexesBigint = gindexes.map((gindex) => BigInt(gindex)); @@ -183,14 +187,15 @@ describe("Tree batch setNodes", () => { it(`${id} - setNodesAtDepth()`, () => { const chunksNode = tree.rootNode; - const hashComputations: Array = Array.from({length: depth}, () => []); + const hcByLevel: HashComputationLevel[] = []; const newChunksNode = setNodesAtDepth( chunksNode, depth, indexes, gindexes.map((nodeValue) => LeafNode.fromRoot(Buffer.alloc(32, nodeValue))), // TODO: more test cases with positive offset? - {byLevel: hashComputations, offset: 0} + 0, + hcByLevel ); tree.rootNode = newChunksNode; const roots = getTreeRoots(tree, maxGindex); @@ -201,10 +206,12 @@ describe("Tree batch setNodes", () => { // TODO: need sort? // TODO: confirm all nodes in HashComputation are populated with HashObjects, h0 !== null for (let i = depth - 1; i >= 0; i--) { - expect(hashComputations[i].length).to.be.equal(hashComputationsOk[i].length, `incorrect length at depth ${i}`); - for (let j = 0; j < hashComputations[i].length; j++) { - const hcOk = hashComputationsOk[i][j]; - const hc = hashComputations[i][j]; + const hcArr = hcByLevel[i].toArray(); + const hcOkArr = hashComputationsOk[i].toArray(); + expect(hcArr.length).to.be.equal(hcOkArr.length, `incorrect length at depth ${i}`); + for (let j = 0; j < hcArr.length; j++) { + const hcOk = hcOkArr[j]; + const hc = hcArr[j]; expect(hc.src0.root).to.be.deep.equal(hcOk.src0.root); expect(hc.src1.root).to.be.deep.equal(hcOk.src1.root); expect(hc.dest.root).to.be.deep.equal(hcOk.dest.root); @@ -287,3 +294,14 @@ function toHex(bytes: Buffer | Uint8Array): string { return Buffer.from(bytes).toString("hex"); } +/** + * This is only a test utility function, don't want to use it in production because it allocates memory every time. + */ +function batchHash(node: Node): void { + const hashComputations: HashComputationLevel[] = []; + getHashComputations(node, 0, hashComputations); + executeHashComputations(hashComputations); + if (node.h0 === null) { + throw Error("Root node h0 is null"); + } +} From 40e1a4dacda934985ade6e46a7a98215866247ae Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Tue, 23 Jul 2024 15:22:24 +0700 Subject: [PATCH 2/5] feat: set default hasher to hashtree for benchmark --- .../persistent-merkle-tree/test/perf/node.test.ts | 10 +++++----- .../test/perf/validators.test.ts | 9 ++++----- .../persistent-merkle-tree/test/unit/tree.test.ts | 15 +-------------- .../test/utils/batchHash.ts | 15 +++++++++++++++ setHasher.mjs | 6 +++--- 5 files changed, 28 insertions(+), 27 deletions(-) create mode 100644 packages/persistent-merkle-tree/test/utils/batchHash.ts diff --git a/packages/persistent-merkle-tree/test/perf/node.test.ts b/packages/persistent-merkle-tree/test/perf/node.test.ts index c8066d40..5de5373b 100644 --- a/packages/persistent-merkle-tree/test/perf/node.test.ts +++ b/packages/persistent-merkle-tree/test/perf/node.test.ts @@ -1,6 +1,7 @@ import {itBench} from "@dapplion/benchmark"; -import {BranchNode, getHashComputations, getNodeH, HashComputation, LeafNode} from "../../src/node"; -import {countToDepth, subtreeFillToContents} from "../../src"; +import {BranchNode, getNodeH, LeafNode} from "../../src/node"; +import {countToDepth, getHashComputations, HashComputation, subtreeFillToContents} from "../../src"; +import {batchHash} from "../utils/batchHash"; describe("HashObject LeafNode", () => { // Number of new nodes created in processAttestations() on average @@ -50,8 +51,7 @@ describe("Node batchHash", () => { id: `getHashComputations ${numNode} nodes`, beforeEach: () => createList(numNode), fn: (rootNode: BranchNode) => { - const hashComputations: HashComputation[][] = []; - getHashComputations(rootNode, 0, hashComputations); + getHashComputations(rootNode, 0, []); }, }); @@ -59,7 +59,7 @@ describe("Node batchHash", () => { id: `batchHash ${numNode} nodes`, beforeEach: () => createList(numNode), fn: (rootNode: BranchNode) => { - rootNode.batchHash(); + batchHash(rootNode); }, }); diff --git a/packages/persistent-merkle-tree/test/perf/validators.test.ts b/packages/persistent-merkle-tree/test/perf/validators.test.ts index 452dd560..e27238a3 100644 --- a/packages/persistent-merkle-tree/test/perf/validators.test.ts +++ b/packages/persistent-merkle-tree/test/perf/validators.test.ts @@ -1,6 +1,5 @@ import {itBench, setBenchOpts} from "@dapplion/benchmark"; import { - BranchNode, LeafNode, subtreeFillToContents, Node, @@ -9,6 +8,7 @@ import { getHashComputations, } from "../../src"; import {MemoryTracker} from "../utils/memTracker"; +import {batchHash} from "../utils/batchHash"; /** * Below is measured on Mac M1. @@ -36,8 +36,7 @@ describe("Track the performance of validators", () => { const tracker = new MemoryTracker(); tracker.logDiff("Start"); - // const vc = 250_000; - const vc = 1_600_000; + const vc = 250_000; // see createValidatorList const depth = countToDepth(BigInt(vc)) + 1; // cache roots of zero nodes @@ -65,7 +64,7 @@ describe("Track the performance of validators", () => { return node; }, fn: (node) => { - (node as BranchNode).batchHash(); + batchHash(node); }, }); @@ -76,7 +75,7 @@ describe("Track the performance of validators", () => { return node; }, fn: (node) => { - (node as BranchNode).hashComputations; + getHashComputations(node, 0, []); }, }); }); diff --git a/packages/persistent-merkle-tree/test/unit/tree.test.ts b/packages/persistent-merkle-tree/test/unit/tree.test.ts index 092ee122..707c692a 100644 --- a/packages/persistent-merkle-tree/test/unit/tree.test.ts +++ b/packages/persistent-merkle-tree/test/unit/tree.test.ts @@ -10,12 +10,10 @@ import { uint8ArrayToHashObject, setNodesAtDepth, findDiffDepthi, - BranchNode, - HashComputation, getHashComputations, HashComputationLevel, - executeHashComputations, } from "../../src"; +import {batchHash} from "../utils/batchHash"; describe("fixed-depth tree iteration", () => { it("should properly navigate the zero tree", () => { @@ -294,14 +292,3 @@ function toHex(bytes: Buffer | Uint8Array): string { return Buffer.from(bytes).toString("hex"); } -/** - * This is only a test utility function, don't want to use it in production because it allocates memory every time. - */ -function batchHash(node: Node): void { - const hashComputations: HashComputationLevel[] = []; - getHashComputations(node, 0, hashComputations); - executeHashComputations(hashComputations); - if (node.h0 === null) { - throw Error("Root node h0 is null"); - } -} diff --git a/packages/persistent-merkle-tree/test/utils/batchHash.ts b/packages/persistent-merkle-tree/test/utils/batchHash.ts new file mode 100644 index 00000000..b6dc87a6 --- /dev/null +++ b/packages/persistent-merkle-tree/test/utils/batchHash.ts @@ -0,0 +1,15 @@ +import { HashComputationLevel, getHashComputations } from "../../src/hashComputation"; +import { executeHashComputations } from "../../src/hasher"; +import { Node } from "../../src/node"; + +/** + * This is only a test utility function, don't want to use it in production because it allocates memory every time. + */ +export function batchHash(node: Node): void { + const hashComputations: HashComputationLevel[] = []; + getHashComputations(node, 0, hashComputations); + executeHashComputations(hashComputations); + if (node.h0 === null) { + throw Error("Root node h0 is null"); + } +} \ No newline at end of file diff --git a/setHasher.mjs b/setHasher.mjs index 4643f73e..c210cdba 100644 --- a/setHasher.mjs +++ b/setHasher.mjs @@ -1,5 +1,5 @@ -// Set the hasher to as-sha256 -// Used to run benchmarks with with visibility into as-sha256 performance, useful for Lodestar +// Set the hasher to hashtree +// Used to run benchmarks with with visibility into hashtree performance, useful for Lodestar import {setHasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/index.js"; -import {hasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/as-sha256.js"; +import {hasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/hashtree.js"; setHasher(hasher); From be68a65b38eedb944bee73a9dc36fdf5710366a3 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Wed, 24 Jul 2024 15:03:21 +0700 Subject: [PATCH 3/5] fix: HashComputationLevel.reset() --- packages/persistent-merkle-tree/src/hashComputation.ts | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/persistent-merkle-tree/src/hashComputation.ts b/packages/persistent-merkle-tree/src/hashComputation.ts index d7f48a25..d6a8c7ab 100644 --- a/packages/persistent-merkle-tree/src/hashComputation.ts +++ b/packages/persistent-merkle-tree/src/hashComputation.ts @@ -55,7 +55,10 @@ export class HashComputationLevel { * run before every run */ reset(): void { - // keep this.head + // keep this.head object, only release the data + this.head.src0 = null as unknown as Node; + this.head.src1 = null as unknown as Node; + this.head.dest = null as unknown as Node; this.tail = null; this._length = 0; // totalLength is not reset From ed54907e32ccd72e84ead3e68997dcc7c850b0e8 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Sat, 3 Aug 2024 06:12:00 +0700 Subject: [PATCH 4/5] chore: add benchmark result --- .../test/perf/hashComputation.test.ts | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/persistent-merkle-tree/test/perf/hashComputation.test.ts b/packages/persistent-merkle-tree/test/perf/hashComputation.test.ts index 38df8b62..f93c0e57 100644 --- a/packages/persistent-merkle-tree/test/perf/hashComputation.test.ts +++ b/packages/persistent-merkle-tree/test/perf/hashComputation.test.ts @@ -1,12 +1,19 @@ import {itBench} from "@dapplion/benchmark"; import { HashComputation, HashComputationLevel, LeafNode, zeroHash } from "../../src"; +/** + * HashComputationLevel push then loop is faster than HashComputation[] push then loop + * This is on Mac M1: + * HashComputationLevel + ✓ HashComputationLevel.push then loop 58.75361 ops/s 17.02023 ms/op - 19 runs 0.835 s + ✓ HashComputation[] push then loop 36.51973 ops/s 27.38246 ms/op - 150 runs 4.63 s + */ describe("HashComputationLevel", function () { const src = LeafNode.fromRoot(zeroHash(0)); const dest = LeafNode.fromRoot(zeroHash(1)); const hashComp: HashComputation = {src0: src, src1: src, dest, next: null}; - const length = 1_000_000; + const length = 2_000_000; itBench({ id: "HashComputationLevel.push then loop", From 1e10df635c48ea62ad8bfcc9f43b9d314120b521 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Sat, 3 Aug 2024 06:39:24 +0700 Subject: [PATCH 5/5] fix: implement IterableIterator and add more comments --- .../src/hashComputation.ts | 32 ++++++++++++++----- 1 file changed, 24 insertions(+), 8 deletions(-) diff --git a/packages/persistent-merkle-tree/src/hashComputation.ts b/packages/persistent-merkle-tree/src/hashComputation.ts index d6a8c7ab..8d3d8e33 100644 --- a/packages/persistent-merkle-tree/src/hashComputation.ts +++ b/packages/persistent-merkle-tree/src/hashComputation.ts @@ -21,7 +21,7 @@ export type HashComputation = { * Before every run, reset() should be called. * After every run, clean() should be called. */ -export class HashComputationLevel { +export class HashComputationLevel implements IterableIterator { private _length: number; private _totalLength: number; // use LinkedList to avoid memory allocation when the list grows @@ -143,8 +143,8 @@ export class HashComputationLevel { } /** - * Not great due to memory allocation. - * Mainly used for testing. + * Not great due to memory allocation, for testing only. + * This converts all HashComputation with data to an array. */ toArray(): HashComputation[] { const hashComps: HashComputation[] = []; @@ -155,7 +155,8 @@ export class HashComputationLevel { } /** - * For testing. + * For testing only. + * This dumps all backed HashComputation objects, note that some HashComputation may not have data. */ dump(): HashComputation[] { const hashComps: HashComputation[] = []; @@ -191,15 +192,30 @@ export class HashComputationGroup { /** * Get HashComputations from a root node all the way to the leaf nodes. + * hcByLevel is the global array to store HashComputationLevel at different levels + * at this ${node}, we only add more HashComputations starting from ${index} + * + * ╔═══ hcByLevel ══════╗ + * ║ level 0 ║ + * ║ level 1 ║ + * ║ ... ║ + * ║ ║ node + * ║ ║ / \ + * ║ level ${index} ║ 01 02 + * ║ ║ / \ / \ + * ║ level ${index + 1} ║ 03 04 05 06 + * ║ ║ + * ║ ... ║ + * ╚════════════════════╝ */ -export function getHashComputations(node: Node, offset: number, hcByLevel: HashComputationLevel[]): void { +export function getHashComputations(node: Node, index: number, hcByLevel: HashComputationLevel[]): void { if (node.h0 === null) { - const hashComputations = levelAtIndex(hcByLevel, offset); + const hashComputations = levelAtIndex(hcByLevel, index); const {left, right} = node; hashComputations.push(left, right, node); // leaf nodes should have h0 to stop the recursion - getHashComputations(left, offset + 1, hcByLevel); - getHashComputations(right, offset + 1, hcByLevel); + getHashComputations(left, index + 1, hcByLevel); + getHashComputations(right, index + 1, hcByLevel); } // else stop the recursion, node is hashed