Skip to content

Commit

Permalink
fix: tests to allow for async initialization of hasher
Browse files Browse the repository at this point in the history
  • Loading branch information
matthewkeil committed Jan 5, 2025
1 parent 62a9576 commit ddd5c17
Show file tree
Hide file tree
Showing 5 changed files with 159 additions and 130 deletions.
53 changes: 32 additions & 21 deletions packages/as-sha256/test/perf/index.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ describe("digestTwoHashObjects vs digest64 vs digest", () => {
sha256 = await AssemblyScriptSha256Hasher.initialize();
});

const input = Buffer.from("gajindergajindergajindergajindergajindergajindergajindergajinder", "utf8");
const input1 = "gajindergajindergajindergajinder";
const input2 = "gajindergajindergajindergajinder";
const buffer1 = Buffer.from(input1, "utf-8");
Expand All @@ -32,22 +33,27 @@ describe("digestTwoHashObjects vs digest64 vs digest", () => {
for (let j = 0; j < iterations; j++) sha256.digest2Bytes32(buffer1, buffer2);
});

// itBench(`digest ${iterations} times`, () => {
// for (let j = 0; j < iterations; j++) sha256.digest(input);
// });
itBench(`digest ${iterations} times`, () => {
for (let j = 0; j < iterations; j++) sha256.digest(input);
});
});

// describe("digest different Buffers", () => {
// const randomBuffer = (length: number): Uint8Array =>
// Buffer.from(Array.from({length}, () => Math.round(Math.random() * 255)));
describe("digest different Buffers", () => {
let sha256: AssemblyScriptSha256Hasher;
before(async function () {
sha256 = await AssemblyScriptSha256Hasher.initialize();
});

const randomBuffer = (length: number): Uint8Array =>
Buffer.from(Array.from({length}, () => Math.round(Math.random() * 255)));

// for (const length of [32, 64, 128, 256, 512, 1024]) {
// const buffer = randomBuffer(length);
// itBench(`input length ${length}`, () => {
// sha256.digest(buffer);
// });
// }
// });
for (const length of [32, 64, 128, 256, 512, 1024]) {
const buffer = randomBuffer(length);
itBench(`input length ${length}`, () => {
sha256.digest(buffer);
});
}
});

/**
* time java: 2968 336927.2237196765 hashes/sec
Expand All @@ -57,16 +63,21 @@ describe("digestTwoHashObjects vs digest64 vs digest", () => {
* digest 1000000 times 0.8279731 ops/s 1.207769 s/op - 82 runs 100 s
* => we are at 8279731 hashes/sec
*/
// describe("hash - compare to java", () => {
// // java statistic for same test: https://gist.github.com/scoroberts/a60d61a2cc3afba1e8813b338ecd1501
describe("hash - compare to java", () => {
// java statistic for same test: https://gist.github.com/scoroberts/a60d61a2cc3afba1e8813b338ecd1501

// const iterations = 1000000;
// const input = Buffer.from("lwkjt23uy45pojsdf;lnwo45y23po5i;lknwe;lknasdflnqw3uo5", "utf8");
let sha256: AssemblyScriptSha256Hasher;
before(async function () {
sha256 = await AssemblyScriptSha256Hasher.initialize();
});

// itBench(`digest ${iterations} times`, () => {
// for (let i = 0; i < iterations; i++) sha256.digest(input);
// });
// });
const iterations = 1000000;
const input = Buffer.from("lwkjt23uy45pojsdf;lnwo45y23po5i;lknwe;lknasdflnqw3uo5", "utf8");

itBench(`digest ${iterations} times`, () => {
for (let i = 0; i < iterations; i++) sha256.digest(input);
});
});

// Aug 10 2021
// utils
Expand Down
18 changes: 9 additions & 9 deletions packages/as-sha256/test/perf/simd.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,19 +23,19 @@ describe("digest64 vs batchHash4UintArray64s vs digest64HashObjects vs batchHash
sha256 = await AssemblyScriptSha256Hasher.initialize();
});

// const input = Buffer.from("gajindergajindergajindergajindergajindergajindergajindergajinder", "utf8");
const input = Buffer.from("gajindergajindergajindergajindergajindergajindergajindergajinder", "utf8");
// total number of time running hash for 200000 balances
const iterations = 50023;
// itBench(`digest64 ${iterations * 4} times`, () => {
// for (let j = 0; j < iterations * 4; j++) sha256.digest64(input);
// });
itBench(`digest64 ${iterations * 4} times`, () => {
for (let j = 0; j < iterations * 4; j++) sha256.digest64(input);
});

// // batchHash4UintArray64s do 4 sha256 in parallel
// itBench(`hash ${iterations * 4} times using batchHash4UintArray64s`, () => {
// for (let j = 0; j < iterations; j++) {
// sha256.batchHash4UintArray64s([input, input, input, input]);
// }
// });
itBench(`hash ${iterations * 4} times using batchHash4UintArray64s`, () => {
for (let j = 0; j < iterations; j++) {
sha256.batchHash4UintArray64s([input, input, input, input]);
}
});

const hashObject = byteArrayToHashObject(Buffer.from("gajindergajindergajindergajinder", "utf8"), 0);
itBench(`digest64HashObjects ${iterations * 4} times`, () => {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import {itBench} from "@dapplion/benchmark";
import { HashComputation, HashComputationLevel, LeafNode, zeroHash } from "../../src/index.js";
import {HashComputation, HashComputationLevel, LeafNode, zeroHash} from "../../src/index.js";

/**
* HashComputationLevel push then loop is faster than HashComputation[] push then loop
Expand Down Expand Up @@ -30,7 +30,7 @@ describe("HashComputationLevel", function () {
for (const hc of level) {
const {src0, src1, dest} = hc;
}
}
},
});

itBench({
Expand All @@ -43,6 +43,6 @@ describe("HashComputationLevel", function () {
for (const hc of level) {
const {src0, src1, dest} = hc;
}
}
})
},
});
});
4 changes: 2 additions & 2 deletions packages/persistent-merkle-tree/test/perf/hasher.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -93,9 +93,9 @@ describe("hashtree", function () {

itBench({
id: `get root`,
beforeEach: () => {
beforeEach: async () => {
const [tree] = buildComparisonTrees(16);
setHasher(hashtreeHasher);
await setHasher(hashtreeHasher);
return tree;
},
fn: (tree) => {
Expand Down
206 changes: 112 additions & 94 deletions packages/persistent-merkle-tree/test/unit/hasher.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,28 @@ import {hasher as nobleHasher} from "../../src/hasher/noble.js";
import {hasher as asSha256Hasher} from "../../src/hasher/as-sha256.js";
import {hasher as hashtreeHasher} from "../../src/hasher/hashtree.js";
import {buildComparisonTrees} from "../utils/tree.js";
import {HashComputationLevel, HashObject, LeafNode, getHashComputations, subtreeFillToContents} from "../../src/index.js";
import { expect } from "chai";
import { zeroHash } from "../../src/zeroHash.js";

const hashers = [hashtreeHasher, asSha256Hasher, nobleHasher];
import {
HashComputationLevel,
HashObject,
Hasher,
LeafNode,
getHashComputations,
subtreeFillToContents,
} from "../../src/index.js";
import {expect} from "chai";
import {zeroHash} from "../../src/zeroHash.js";

describe("hashers", function () {
const hashers: Hasher[] = [hashtreeHasher, asSha256Hasher, nobleHasher];

before(async function () {
for (const hasher of hashers) {
if (typeof hasher.initialize === "function") {
await hasher.initialize();
}
}
});

describe("digest64 vs digest64HashObjects methods should be the same", () => {
for (const hasher of hashers) {
it(`${hasher.name} hasher`, () => {
Expand Down Expand Up @@ -44,10 +59,10 @@ describe("hashers", function () {
const root2 = Buffer.alloc(32, 0xff);
const hashObject2 = uint8ArrayToHashObject(root2);
const ho1 = {} as HashObject;
nobleHasher.digest64HashObjects(hashObject1, hashObject2, ho1)
nobleHasher.digest64HashObjects(hashObject1, hashObject2, ho1);
const hash1 = hashObjectToUint8Array(ho1);
const ho2 = {} as HashObject;
asSha256Hasher.digest64HashObjects(hashObject1, hashObject2, ho2)
asSha256Hasher.digest64HashObjects(hashObject1, hashObject2, ho2);
const hash2 = hashObjectToUint8Array(ho2);
const ho3 = {} as HashObject;
hashtreeHasher.digest64HashObjects(hashObject1, hashObject2, ho3);
Expand All @@ -67,103 +82,106 @@ describe("hashers", function () {
});
}
});
});

describe("hasher.digestNLevel", function () {
const hashers = [nobleHasher, hashtreeHasher, asSha256Hasher];
for (const hasher of hashers) {
const numValidators = [1, 2, 3, 4];
for (const numValidator of numValidators) {
it (`${hasher.name} digestNLevel ${numValidator} validators = ${8 * numValidator} chunk(s)`, () => {
const nodes = Array.from({length: 8 * numValidator}, (_, i) => LeafNode.fromRoot(Buffer.alloc(32, i + numValidator)));
const hashInput = Buffer.concat(nodes.map((node) => node.root));
const hashOutput = hasher.digestNLevel(hashInput, 3).slice();
for (let i = 0; i < numValidator; i++) {
const root = subtreeFillToContents(nodes.slice(i * 8, (i + 1) * 8), 3).root;
expectEqualHex(hashOutput.subarray(i * 32, (i + 1) * 32), root);
}
});
describe("hasher.digestNLevel", function () {
for (const hasher of hashers) {
const numValidators = [1, 2, 3, 4];
for (const numValidator of numValidators) {
it(`${hasher.name} digestNLevel ${numValidator} validators = ${8 * numValidator} chunk(s)`, () => {
const nodes = Array.from({length: 8 * numValidator}, (_, i) =>
LeafNode.fromRoot(Buffer.alloc(32, i + numValidator))
);
const hashInput = Buffer.concat(nodes.map((node) => node.root));
const hashOutput = hasher.digestNLevel(hashInput, 3).slice();
for (let i = 0; i < numValidator; i++) {
const root = subtreeFillToContents(nodes.slice(i * 8, (i + 1) * 8), 3).root;
expectEqualHex(hashOutput.subarray(i * 32, (i + 1) * 32), root);
}
});
}
}
}
});


describe("hasher.merkleizeBlocksBytes", function () {
const numNodes = [0, 1, 2, 3, 4, 5, 6, 7, 8];
for (const hasher of [nobleHasher, hashtreeHasher, asSha256Hasher]) {
it (`${hasher.name} should throw error if not multiple of 64 bytes`, () => {
const data = Buffer.alloc(63, 0);
const output = Buffer.alloc(32);
expect(() => hasher.merkleizeBlocksBytes(data, 2, output, 0)).to.throw("Invalid input length");
});

for (const numNode of numNodes) {
it(`${hasher.name}.merkleizeBlocksBytes for ${numNode} nodes`, () => {
});

const nodes = Array.from({length: numNode}, (_, i) => LeafNode.fromRoot(Buffer.alloc(32, i)));
const data = Buffer.concat(nodes.map((node) => node.root));
describe("hasher.merkleizeBlocksBytes", function () {
const numNodes = [0, 1, 2, 3, 4, 5, 6, 7, 8];
for (const hasher of hashers) {
it(`${hasher.name} should throw error if not multiple of 64 bytes`, () => {
const data = Buffer.alloc(63, 0);
const output = Buffer.alloc(32);
const chunkCount = Math.max(numNode, 1);
const padData = numNode % 2 === 1 ? Buffer.concat([data, zeroHash(0)]) : data;
hasher.merkleizeBlocksBytes(padData, chunkCount, output, 0);
const depth = Math.ceil(Math.log2(chunkCount));
const root = subtreeFillToContents(nodes, depth).root;
expectEqualHex(output, root);
expect(() => hasher.merkleizeBlocksBytes(data, 2, output, 0)).to.throw("Invalid input length");
});
}
}
});

/**
* The same to the previous test, but using the merkleizeBlockArray method
*/
describe("hasher.merkleizeBlockArray", function () {
for (const hasher of [nobleHasher, hashtreeHasher, asSha256Hasher]) {
it (`${hasher.name} should throw error if invalid blockLimit`, () => {
const data = Buffer.alloc(64, 0);
const output = Buffer.alloc(32);
expect(() => hasher.merkleizeBlockArray([data], 2, 2, output, 0)).to.throw("Invalid blockLimit, expect to be less than or equal blocks.length 1, got 2");
});

it (`${hasher.name} should throw error if not multiple of 64 bytes`, () => {
const data = Buffer.alloc(63, 0);
const output = Buffer.alloc(32);
expect(() => hasher.merkleizeBlockArray([data], 1, 2, output, 0)).to.throw("Invalid block length, expect to be 64 bytes, got 63");
});
for (const numNode of numNodes) {
it(`${hasher.name}.merkleizeBlocksBytes for ${numNode} nodes`, () => {
const nodes = Array.from({length: numNode}, (_, i) => LeafNode.fromRoot(Buffer.alloc(32, i)));
const data = Buffer.concat(nodes.map((node) => node.root));
const output = Buffer.alloc(32);
const chunkCount = Math.max(numNode, 1);
const padData = numNode % 2 === 1 ? Buffer.concat([data, zeroHash(0)]) : data;
hasher.merkleizeBlocksBytes(padData, chunkCount, output, 0);
const depth = Math.ceil(Math.log2(chunkCount));
const root = subtreeFillToContents(nodes, depth).root;
expectEqualHex(output, root);
});
}
}
});

it (`${hasher.name} should throw error if chunkCount < 1`, () => {
const data = Buffer.alloc(64, 0);
const output = Buffer.alloc(32);
const chunkCount = 0;
expect(() => hasher.merkleizeBlockArray([data], 1, chunkCount, output, 0)).to.throw("Invalid padFor, expect to be at least 1, got 0");
});
/**
* The same to the previous test, but using the merkleizeBlockArray method
*/
describe("hasher.merkleizeBlockArray", function () {
for (const hasher of hashers) {
it(`${hasher.name} should throw error if invalid blockLimit`, () => {
const data = Buffer.alloc(64, 0);
const output = Buffer.alloc(32);
expect(() => hasher.merkleizeBlockArray([data], 2, 2, output, 0)).to.throw(
"Invalid blockLimit, expect to be less than or equal blocks.length 1, got 2"
);
});

// hashtree has a buffer of 16 * 64 bytes = 32 nodes
const numNodes = [64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79];
for (const numNode of numNodes) {
it(`${hasher.name}.merkleizeBlockArray for ${numNode} nodes`, () => {
it(`${hasher.name} should throw error if not multiple of 64 bytes`, () => {
const data = Buffer.alloc(63, 0);
const output = Buffer.alloc(32);
expect(() => hasher.merkleizeBlockArray([data], 1, 2, output, 0)).to.throw(
"Invalid block length, expect to be 64 bytes, got 63"
);
});

const nodes = Array.from({length: numNode}, (_, i) => LeafNode.fromRoot(Buffer.alloc(32, i)));
const data = Buffer.concat(nodes.map((node) => node.root));
it(`${hasher.name} should throw error if chunkCount < 1`, () => {
const data = Buffer.alloc(64, 0);
const output = Buffer.alloc(32);
// depth of 79 nodes are 7, make it 10 to test the padding
const chunkCount = Math.max(numNode, 10);
const padData = numNode % 2 === 1 ? Buffer.concat([data, zeroHash(0)]) : data;
expect(padData.length % 64).to.equal(0);
const blocks: Uint8Array[] = [];
for (let i = 0; i < padData.length; i += 64) {
blocks.push(padData.slice(i, i + 64));
}
const blockLimit = blocks.length;
// should be able to run with above blocks, however add some redundant blocks similar to the consumer
blocks.push(Buffer.alloc(64, 1));
blocks.push(Buffer.alloc(64, 2));
hasher.merkleizeBlockArray(blocks, blockLimit, chunkCount, output, 0);
const depth = Math.ceil(Math.log2(chunkCount));
const root = subtreeFillToContents(nodes, depth).root;
expectEqualHex(output, root);
const chunkCount = 0;
expect(() => hasher.merkleizeBlockArray([data], 1, chunkCount, output, 0)).to.throw(
"Invalid padFor, expect to be at least 1, got 0"
);
});

// hashtree has a buffer of 16 * 64 bytes = 32 nodes
const numNodes = [64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79];
for (const numNode of numNodes) {
it(`${hasher.name}.merkleizeBlockArray for ${numNode} nodes`, () => {
const nodes = Array.from({length: numNode}, (_, i) => LeafNode.fromRoot(Buffer.alloc(32, i)));
const data = Buffer.concat(nodes.map((node) => node.root));
const output = Buffer.alloc(32);
// depth of 79 nodes are 7, make it 10 to test the padding
const chunkCount = Math.max(numNode, 10);
const padData = numNode % 2 === 1 ? Buffer.concat([data, zeroHash(0)]) : data;
expect(padData.length % 64).to.equal(0);
const blocks: Uint8Array[] = [];
for (let i = 0; i < padData.length; i += 64) {
blocks.push(padData.slice(i, i + 64));
}
const blockLimit = blocks.length;
// should be able to run with above blocks, however add some redundant blocks similar to the consumer
blocks.push(Buffer.alloc(64, 1));
blocks.push(Buffer.alloc(64, 2));
hasher.merkleizeBlockArray(blocks, blockLimit, chunkCount, output, 0);
const depth = Math.ceil(Math.log2(chunkCount));
const root = subtreeFillToContents(nodes, depth).root;
expectEqualHex(output, root);
});
}
}
}
});
});

0 comments on commit ddd5c17

Please sign in to comment.