Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: implement HashComputationLevel using LinkedList #389

Merged
merged 5 commits into from
Aug 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
232 changes: 232 additions & 0 deletions packages/persistent-merkle-tree/src/hashComputation.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,232 @@
import type {Node} from "./node";

/**
* HashComputation to be later used to compute hash of nodes from bottom up.
* This is also an item of a linked list.
* ╔═════════════════════╗ ╔══════════════════════╗
* ║ dest ║ ║ next_dest ║
* ║ / \ ║ ========> ║ / \ ║
* ║ src0 src1 ║ ║ next_src0 next_src1║
* ╚═════════════════════╝ ╚══════════════════════╝
*/
export type HashComputation = {
src0: Node;
src1: Node;
dest: Node;
next: HashComputation | null;
};

/**
* Model HashComputation[] at the same level that support reusing the same memory.
* Before every run, reset() should be called.
* After every run, clean() should be called.
*/
export class HashComputationLevel implements IterableIterator<HashComputation> {
private _length: number;
private _totalLength: number;
// use LinkedList to avoid memory allocation when the list grows
// always have a fixed head although length is 0
private head: HashComputation;
private tail: HashComputation | null;
private pointer: HashComputation | null;

constructor() {
this._length = 0;
this._totalLength = 0;
this.head = {
src0: null as unknown as Node,
src1: null as unknown as Node,
dest: null as unknown as Node,
next: null,
};
this.tail = null;
this.pointer = null;
}

get length(): number {
return this._length;
}

get totalLength(): number {
return this._totalLength;
}

/**
* run before every run
*/
reset(): void {
// keep this.head object, only release the data
this.head.src0 = null as unknown as Node;
this.head.src1 = null as unknown as Node;
this.head.dest = null as unknown as Node;
this.tail = null;
this._length = 0;
// totalLength is not reset
this.pointer = null;
}

/**
* Append a new HashComputation to tail.
* This will overwrite the existing HashComputation if it is not null, or grow the list if needed.
*/
push(src0: Node, src1: Node, dest: Node): void {
if (this.tail !== null) {
let newTail = this.tail.next;
if (newTail !== null) {
newTail.src0 = src0;
newTail.src1 = src1;
newTail.dest = dest;
} else {
// grow the list
newTail = {src0, src1, dest, next: null};
wemeetagain marked this conversation as resolved.
Show resolved Hide resolved
this.tail.next = newTail;
this._totalLength++;
}
this.tail = newTail;
this._length++;
return;
}

// first item
this.head.src0 = src0;
this.head.src1 = src1;
this.head.dest = dest;
this.tail = this.head;
this._length = 1;
if (this._totalLength === 0) {
this._totalLength = 1;
}
// else _totalLength > 0, do not set
}

/**
* run after every run
* hashComps may still refer to the old Nodes, we should release them to avoid memory leak.
*/
clean(): void {
let hc = this.tail?.next ?? null;
while (hc !== null) {
if (hc.src0 === null) {
// we may have already cleaned it in the previous run, return early
break;
}
hc.src0 = null as unknown as Node;
hc.src1 = null as unknown as Node;
hc.dest = null as unknown as Node;
hc = hc.next;
}
}

/**
* Implement Iterator for this class
*/
next(): IteratorResult<HashComputation> {
if (!this.pointer || this.tail === null) {
return {done: true, value: undefined};
}

// never yield value beyond the tail
const value = this.pointer;
const isNull = value.src0 === null;
this.pointer = this.pointer.next;

return isNull ? {done: true, value: undefined} : {done: false, value};
}

/**
* This is convenient method to consume HashComputationLevel with for-of loop
* See "next" method above for the actual implementation
*/
[Symbol.iterator](): IterableIterator<HashComputation> {
this.pointer = this.head;
return this;
}

/**
* Not great due to memory allocation, for testing only.
* This converts all HashComputation with data to an array.
*/
toArray(): HashComputation[] {
const hashComps: HashComputation[] = [];
for (const hc of this) {
hashComps.push(hc);
}
return hashComps;
}

/**
* For testing only.
* This dumps all backed HashComputation objects, note that some HashComputation may not have data.
*/
dump(): HashComputation[] {
wemeetagain marked this conversation as resolved.
Show resolved Hide resolved
const hashComps: HashComputation[] = [];
let hc: HashComputation | null = null;
for (hc = this.head; hc !== null; hc = hc.next) {
hashComps.push(hc);
}
return hashComps;
}
}

/**
* Model HashComputationLevel[] at different levels.
*/
export class HashComputationGroup {
readonly byLevel: HashComputationLevel[];
constructor() {
this.byLevel = [];
}

reset(): void {
for (const level of this.byLevel) {
level.reset();
}
}

clean(): void {
for (const level of this.byLevel) {
level.clean();
}
}
}

/**
* Get HashComputations from a root node all the way to the leaf nodes.
wemeetagain marked this conversation as resolved.
Show resolved Hide resolved
* hcByLevel is the global array to store HashComputationLevel at different levels
* at this ${node}, we only add more HashComputations starting from ${index}
*
* ╔═══ hcByLevel ══════╗
* ║ level 0 ║
* ║ level 1 ║
* ║ ... ║
* ║ ║ node
* ║ ║ / \
* ║ level ${index} ║ 01 02
* ║ ║ / \ / \
* ║ level ${index + 1} ║ 03 04 05 06
* ║ ║
* ║ ... ║
* ╚════════════════════╝
*/
export function getHashComputations(node: Node, index: number, hcByLevel: HashComputationLevel[]): void {
if (node.h0 === null) {
const hashComputations = levelAtIndex(hcByLevel, index);
const {left, right} = node;
hashComputations.push(left, right, node);
// leaf nodes should have h0 to stop the recursion
getHashComputations(left, index + 1, hcByLevel);
getHashComputations(right, index + 1, hcByLevel);
}

// else stop the recursion, node is hashed
}

/**
* Utility to get HashComputationLevel at a specific index.
*/
export function levelAtIndex(hcByLevel: HashComputationLevel[], index: number): HashComputationLevel {
if (hcByLevel[index] === undefined) {
hcByLevel[index] = new HashComputationLevel();
}
return hcByLevel[index];
}
9 changes: 6 additions & 3 deletions packages/persistent-merkle-tree/src/hasher/as-sha256.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@ import {
hashInto,
} from "@chainsafe/as-sha256";
import type {Hasher} from "./types";
import {HashComputation, Node} from "../node";
import {Node} from "../node";
import type {HashComputationLevel} from "../hashComputation";
import {doDigestNLevel, doMerkleizeInto} from "./util";

export const hasher: Hasher = {
Expand All @@ -19,7 +20,7 @@ export const hasher: Hasher = {
digestNLevel(data: Uint8Array, nLevel: number): Uint8Array {
return doDigestNLevel(data, nLevel, hashInto);
},
executeHashComputations: (hashComputations: HashComputation[][]) => {
executeHashComputations: (hashComputations: HashComputationLevel[]) => {
for (let level = hashComputations.length - 1; level >= 0; level--) {
const hcArr = hashComputations[level];
if (!hcArr) {
Expand All @@ -46,7 +47,8 @@ export const hasher: Hasher = {
let src1_3: Node | null = null;
let dest3: Node | null = null;

for (const [i, hc] of hcArr.entries()) {
let i = 0;
for (const hc of hcArr) {
const indexInBatch = i % 4;

switch (indexInBatch) {
Expand Down Expand Up @@ -121,6 +123,7 @@ export const hasher: Hasher = {
default:
throw Error(`Unexpected indexInBatch ${indexInBatch}`);
}
i++;
}

// remaining
Expand Down
25 changes: 14 additions & 11 deletions packages/persistent-merkle-tree/src/hasher/hashtree.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import {hashInto} from "@chainsafe/hashtree";
import {Hasher, HashObject} from "./types";
import {HashComputation, Node} from "../node";
import {Node} from "../node";
import type {HashComputationLevel} from "../hashComputation";
import {byteArrayIntoHashObject} from "@chainsafe/as-sha256/lib/hashObject";
import {doDigestNLevel, doMerkleizeInto} from "./util";

Expand All @@ -20,6 +21,8 @@ const uint8Output = new Uint8Array(PARALLEL_FACTOR * 32);
// convenient reusable Uint8Array for hash64
const hash64Input = uint8Input.subarray(0, 64);
const hash64Output = uint8Output.subarray(0, 32);
// size input array to 2 HashObject per computation * 32 bytes per object
const destNodes: Node[] = new Array<Node>(PARALLEL_FACTOR);

export const hasher: Hasher = {
name: "hashtree",
Expand All @@ -43,7 +46,7 @@ export const hasher: Hasher = {
digestNLevel(data: Uint8Array, nLevel: number): Uint8Array {
return doDigestNLevel(data, nLevel, hashInto);
},
executeHashComputations(hashComputations: HashComputation[][]): void {
executeHashComputations(hashComputations: HashComputationLevel[]): void {
for (let level = hashComputations.length - 1; level >= 0; level--) {
const hcArr = hashComputations[level];
if (!hcArr) {
Expand All @@ -56,25 +59,25 @@ export const hasher: Hasher = {
continue;
}

// size input array to 2 HashObject per computation * 32 bytes per object
// const input: Uint8Array = Uint8Array.from(new Array(hcArr.length * 2 * 32));
let destNodes: Node[] = [];

// hash every 16 inputs at once to avoid memory allocation
for (const [i, {src0, src1, dest}] of hcArr.entries()) {
let i = 0;
for (const {src0, src1, dest} of hcArr) {
if (!src0 || !src1 || !dest) {
throw new Error(`Invalid HashComputation at index ${i}`);
}
const indexInBatch = i % PARALLEL_FACTOR;
const offset = indexInBatch * 16;

hashObjectToUint32Array(src0, uint32Input, offset);
hashObjectToUint32Array(src1, uint32Input, offset + 8);
destNodes.push(dest);
destNodes[indexInBatch] = dest;
if (indexInBatch === PARALLEL_FACTOR - 1) {
hashInto(uint8Input, uint8Output);
for (const [j, destNode] of destNodes.entries()) {
byteArrayIntoHashObject(uint8Output, j * 32, destNode);
}
destNodes = [];
}
i++;
}

const remaining = hcArr.length % PARALLEL_FACTOR;
Expand All @@ -84,8 +87,8 @@ export const hasher: Hasher = {
const remainingOutput = uint8Output.subarray(0, remaining * 32);
hashInto(remainingInput, remainingOutput);
// destNodes was prepared above
for (const [i, destNode] of destNodes.entries()) {
byteArrayIntoHashObject(remainingOutput, i * 32, destNode);
for (let j = 0; j < remaining; j++) {
byteArrayIntoHashObject(remainingOutput, j * 32, destNodes[j]);
}
}
}
Expand Down
4 changes: 2 additions & 2 deletions packages/persistent-merkle-tree/src/hasher/index.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import {Hasher} from "./types";
import {hasher as nobleHasher} from "./noble";
import type {HashComputation} from "../node";
import type {HashComputationLevel} from "../hashComputation";

export * from "./types";
export * from "./util";
Expand Down Expand Up @@ -31,6 +31,6 @@ export function merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Arr
hasher.merkleizeInto(data, padFor, output, offset);
}

export function executeHashComputations(hashComputations: HashComputation[][]): void {
export function executeHashComputations(hashComputations: HashComputationLevel[]): void {
hasher.executeHashComputations(hashComputations);
}
4 changes: 2 additions & 2 deletions packages/persistent-merkle-tree/src/hasher/types.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import type {HashObject} from "@chainsafe/as-sha256/lib/hashObject";
import type {HashComputation} from "../node";
import type {HashComputationLevel} from "../hashComputation";

export type {HashObject};

Expand Down Expand Up @@ -29,5 +29,5 @@ export type Hasher = {
/**
* Execute a batch of HashComputations
*/
executeHashComputations(hashComputations: HashComputation[][]): void;
executeHashComputations(hashComputations: HashComputationLevel[]): void;
};
1 change: 1 addition & 0 deletions packages/persistent-merkle-tree/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
export * from "./gindex";
export * from "./hasher";
export * from "./node";
export * from "./hashComputation";
export * from "./packedNode";
export * from "./proof";
export * from "./subtree";
Expand Down
Loading
Loading