From 2380430cf07b5e9b746d0af2cc30b846d2a7c172 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Mon, 15 Apr 2024 14:44:32 +0700 Subject: [PATCH 001/113] feat: initial implementation of batch hash --- .../src/hasher/as-sha256.ts | 3 +- .../src/hasher/index.ts | 6 +- .../src/hasher/noble.ts | 3 + .../src/hasher/types.ts | 1 + packages/persistent-merkle-tree/src/node.ts | 76 ++++++++++++++++ packages/persistent-merkle-tree/src/tree.ts | 7 ++ .../test/perf/validators.test.ts | 91 +++++++++++++------ .../test/unit/batchHash.test.ts | 48 ++++++++++ .../test/unit/tree.test.ts | 3 +- 9 files changed, 205 insertions(+), 33 deletions(-) create mode 100644 packages/persistent-merkle-tree/test/unit/batchHash.test.ts diff --git a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts index 07095345..141e2c9f 100644 --- a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts +++ b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts @@ -1,7 +1,8 @@ -import {digest2Bytes32, digest64HashObjects} from "@chainsafe/as-sha256"; +import {digest2Bytes32, digest64HashObjects, hash8HashObjects} from "@chainsafe/as-sha256"; import type {Hasher} from "./types"; export const hasher: Hasher = { digest64: digest2Bytes32, digest64HashObjects, + hash8HashObjects, }; diff --git a/packages/persistent-merkle-tree/src/hasher/index.ts b/packages/persistent-merkle-tree/src/hasher/index.ts index d6e99ce3..81f26444 100644 --- a/packages/persistent-merkle-tree/src/hasher/index.ts +++ b/packages/persistent-merkle-tree/src/hasher/index.ts @@ -1,5 +1,6 @@ import {Hasher} from "./types"; -import {hasher as nobleHasher} from "./noble"; +// import {hasher as nobleHasher} from "./noble"; +import {hasher as csHasher} from "./as-sha256"; export {HashObject} from "@chainsafe/as-sha256/lib/hashObject"; export * from "./types"; @@ -8,7 +9,8 @@ export * from "./util"; /** * Hasher used across the SSZ codebase */ -export let hasher: Hasher = nobleHasher; +// export let hasher: Hasher = nobleHasher; +export let hasher: Hasher = csHasher; /** * Set the hasher to be used across the SSZ codebase diff --git a/packages/persistent-merkle-tree/src/hasher/noble.ts b/packages/persistent-merkle-tree/src/hasher/noble.ts index 7877f97e..8dcd8c1a 100644 --- a/packages/persistent-merkle-tree/src/hasher/noble.ts +++ b/packages/persistent-merkle-tree/src/hasher/noble.ts @@ -7,4 +7,7 @@ const digest64 = (a: Uint8Array, b: Uint8Array): Uint8Array => sha256.create().u export const hasher: Hasher = { digest64, digest64HashObjects: (a, b) => uint8ArrayToHashObject(digest64(hashObjectToUint8Array(a), hashObjectToUint8Array(b))), + hash8HashObjects: () => { + throw Error("not implemented"); + }, }; diff --git a/packages/persistent-merkle-tree/src/hasher/types.ts b/packages/persistent-merkle-tree/src/hasher/types.ts index 9691ddb9..9566d7cd 100644 --- a/packages/persistent-merkle-tree/src/hasher/types.ts +++ b/packages/persistent-merkle-tree/src/hasher/types.ts @@ -9,4 +9,5 @@ export type Hasher = { * Hash two 32-byte HashObjects */ digest64HashObjects(a: HashObject, b: HashObject): HashObject; + hash8HashObjects(inputs: HashObject[]): HashObject[]; }; diff --git a/packages/persistent-merkle-tree/src/node.ts b/packages/persistent-merkle-tree/src/node.ts index 48e820e0..895877d0 100644 --- a/packages/persistent-merkle-tree/src/node.ts +++ b/packages/persistent-merkle-tree/src/node.ts @@ -3,6 +3,12 @@ import {hashObjectToUint8Array, hasher, uint8ArrayToHashObject} from "./hasher"; const TWO_POWER_32 = 2 ** 32; +type HashComputation = { + src0: Node; + src1: Node; + dest: Node; +}; + /** * An immutable binary merkle tree node */ @@ -70,6 +76,76 @@ export class BranchNode extends Node { } } + // TODO: private, unit tests + getHashComputation(level: number, hashCompsByLevel: Map): void { + if (this.h0 === null) { + let hashComputations = hashCompsByLevel.get(level); + if (hashComputations === undefined) { + hashComputations = []; + hashCompsByLevel.set(level, hashComputations); + } + hashComputations.push({src0: this.left, src1: this.right, dest: this}); + if (!this.left.isLeaf()) { + (this.left as BranchNode).getHashComputation(level + 1, hashCompsByLevel); + } + if (!this.right.isLeaf()) { + (this.right as BranchNode).getHashComputation(level + 1, hashCompsByLevel); + } + + return; + } + + // else stop the recursion, LeafNode should have h0 + } + + batchHash(): Uint8Array { + const hashCompsByLevel = new Map(); + this.getHashComputation(0, hashCompsByLevel); + const levelsDesc = Array.from(hashCompsByLevel.keys()).sort((a, b) => b - a); + for (const level of levelsDesc) { + const hcArr = hashCompsByLevel.get(level); + if (!hcArr) { + // should not happen + throw Error(`no hash computations for level ${level}`); + } + // HashComputations of the same level are safe to batch + const batch = Math.floor(hcArr.length / 4); + for (let i = 0; i < batch; i++) { + const item0 = hcArr[i * 4]; + const item1 = hcArr[i * 4 + 1]; + const item2 = hcArr[i * 4 + 2]; + const item3 = hcArr[i * 4 + 3]; + + const [dest0, dest1, dest2, dest3] = hasher.hash8HashObjects([ + item0.src0, + item0.src1, + item1.src0, + item1.src1, + item2.src0, + item2.src1, + item3.src0, + item3.src1, + ]); + + item0.dest.applyHash(dest0); + item1.dest.applyHash(dest1); + item2.dest.applyHash(dest2); + item3.dest.applyHash(dest3); + } + // compute remaining separatedly + const remLen = hcArr.length % 4; + for (let i = remLen - 1; i >= 0; i--) { + const {src0, src1, dest} = hcArr[hcArr.length - i - 1]; + dest.applyHash(hasher.digest64HashObjects(src0, src1)); + } + } + + if (this.h0 === null) { + throw Error("Root is not computed by batch"); + } + return this.root; + } + get rootHashObject(): HashObject { if (this.h0 === null) { super.applyHash(hasher.digest64HashObjects(this.left.rootHashObject, this.right.rootHashObject)); diff --git a/packages/persistent-merkle-tree/src/tree.ts b/packages/persistent-merkle-tree/src/tree.ts index 47fdde83..32d38582 100644 --- a/packages/persistent-merkle-tree/src/tree.ts +++ b/packages/persistent-merkle-tree/src/tree.ts @@ -73,6 +73,13 @@ export class Tree { return this.rootNode.root; } + batchHash(): Uint8Array { + if (!this.rootNode.isLeaf()) { + return (this.rootNode as BranchNode).batchHash(); + } + return this.root; + } + /** * Return a copy of the tree */ diff --git a/packages/persistent-merkle-tree/test/perf/validators.test.ts b/packages/persistent-merkle-tree/test/perf/validators.test.ts index ff116f7c..a9b316f8 100644 --- a/packages/persistent-merkle-tree/test/perf/validators.test.ts +++ b/packages/persistent-merkle-tree/test/perf/validators.test.ts @@ -1,66 +1,99 @@ -import {itBench} from "@dapplion/benchmark"; -import {BranchNode, LeafNode, subtreeFillToContents, Node, countToDepth} from "../../src"; +import {itBench, setBenchOpts} from "@dapplion/benchmark"; +import {BranchNode, LeafNode, subtreeFillToContents, Node, countToDepth, zeroNode} from "../../src"; import {MemoryTracker} from "../utils/memTracker"; +/** + * Below is measured on Mac M1. + * It takes less than 10% of the original hashTreeRoot() time to traverse the tree. + * The remaining time depends on how better is the batch hash. + * Below it shows: `batchHash = 101ms + 0.81 * hash` where 101ms is the time to traverse and precompute hash computations by level + * Track the performance of validators + ✓ 1600000 validators root getter 0.8134348 ops/s 1.229355 s/op - 12 runs 16.4 s + ✓ 1600000 validators batchHash() 0.9135884 ops/s 1.094585 s/op - 13 runs 15.8 s + ✓ 1600000 validators hashComputations 9.857173 ops/s 101.4490 ms/op - 17 runs 2.90 s + + * Refer to SIMD, it shows `batchHash = 0.81 * hash` + digest64 vs hash4Inputs vs hash8HashObjects + ✓ digest64 50023 times 27.09631 ops/s 36.90539 ms/op - 259 runs 10.1 s + ✓ hash 200092 times using hash4Inputs 8.393366 ops/s 119.1417 ms/op - 81 runs 10.2 s + ✓ hash 200092 times using hash8HashObjects 8.433091 ops/s 118.5805 ms/op - 81 runs 10.2 s + */ describe("Track the performance of validators", () => { + setBenchOpts({ + maxMs: 2 * 60 * 1000, + }); if (global.gc) { global.gc(); } const tracker = new MemoryTracker(); tracker.logDiff("Start"); - const node = createValidatorList(250_000); + // const vc = 250_000; + const vc = 1_600_000; + // see createValidatorList + const depth = countToDepth(BigInt(vc)) + 1; + // cache roots of zero nodes + zeroNode(depth).root; + const node = createValidatorList(vc); tracker.logDiff("Create validator tree"); node.root; tracker.logDiff("Calculate tree root"); itBench({ - id: "250k validators", + id: `${vc} validators root getter`, beforeEach: () => { - resetNodes(node); + resetNodes(node, depth); return node; }, fn: (node) => { node.root; }, }); + + itBench({ + id: `${vc} validators batchHash()`, + beforeEach: () => { + resetNodes(node, depth); + return node; + }, + fn: (node) => { + (node as BranchNode).batchHash(); + }, + }); + + itBench({ + id: `${vc} validators hashComputations`, + beforeEach: () => { + resetNodes(node, depth); + return node; + }, + fn: (node) => { + const hashComputationsByLevel = new Map(); + (node as BranchNode).getHashComputation(0, hashComputationsByLevel); + }, + }); }); -function resetNodes(node: Node): void { +function resetNodes(node: Node, depth: number): void { if (node.isLeaf()) return; + // do not reset zeroNode + if (node === zeroNode(depth)) return; // this is to ask Node to calculate node again node.h0 = null as unknown as number; - // in the old version, we should do - // node._root = null; - resetNodes(node.left); - resetNodes(node.right); + resetNodes(node.left, depth - 1); + resetNodes(node.right, depth - 1); } function createValidator(i: number): Node { - const nodes: Node[] = []; - // pubkey, 48 bytes => 2 nodes - const pubkeyNode1 = newLeafNodeFilled(i); - const pubkeyNode2 = newLeafNodeFilled(i); - nodes.push(new BranchNode(pubkeyNode1, pubkeyNode2)); - // withdrawalCredentials, 32 bytes => 1 node - nodes.push(newLeafNodeFilled(i)); - // effectiveBalance, 8 bytes => 1 node - nodes.push(newLeafNodeFilled(i)); - // slashed => 1 node - nodes.push(LeafNode.fromRoot(new Uint8Array(32))); - // 4 epoch nodes, 8 bytes => 1 node - nodes.push(newLeafNodeFilled(i)); - nodes.push(newLeafNodeFilled(i)); - nodes.push(newLeafNodeFilled(i)); - nodes.push(newLeafNodeFilled(i)); - - return subtreeFillToContents(nodes, countToDepth(BigInt(nodes.length))); + return newLeafNodeFilled(i); } function createValidatorList(numValidator: number): Node { const validators = Array.from({length: numValidator}, (_, i) => createValidator(i)); // add 1 to countToDepth for mix_in_length spec - return subtreeFillToContents(validators, countToDepth(BigInt(numValidator)) + 1); + const depth = countToDepth(BigInt(numValidator)) + 1; + const rootNode = subtreeFillToContents(validators, depth); + return rootNode; } function newLeafNodeFilled(i: number): LeafNode { diff --git a/packages/persistent-merkle-tree/test/unit/batchHash.test.ts b/packages/persistent-merkle-tree/test/unit/batchHash.test.ts new file mode 100644 index 00000000..36fc9890 --- /dev/null +++ b/packages/persistent-merkle-tree/test/unit/batchHash.test.ts @@ -0,0 +1,48 @@ +import {expect} from "chai"; +import {countToDepth} from "../../src/gindex"; +import {BranchNode, LeafNode, Node} from "../../src/node"; +import {subtreeFillToContents} from "../../src/subtree"; +import {zeroNode} from "../../src/zeroNode"; + +describe("batchHash", function () { + // const numNodes = [200, 201, 202, 203]; + const numNodes = [32, 33, 64]; + for (const numNode of numNodes) { + it(`${numNode} nodes`, () => { + const rootNode = createList(numNode); + const root1 = rootNode.batchHash(); + const rootNode2 = createList(numNode); + const root2 = rootNode2.root; + expect(root2).to.be.deep.equal(root1); + + const depth = countToDepth(BigInt(numNode)) + 1; + resetNodes(rootNode, depth); + resetNodes(rootNode2, depth); + expect(rootNode.batchHash()).to.be.deep.equal(rootNode2.batchHash()); + }); + } +}); + +function resetNodes(node: Node, depth: number): void { + if (node.isLeaf()) return; + // do not reset zeroNode + if (node === zeroNode(depth)) return; + // this is to ask Node to calculate node again + node.h0 = null as unknown as number; + // in the old version, we should do + // node._root = null; + resetNodes(node.left, depth - 1); + resetNodes(node.right, depth - 1); +} + +function newLeafNodeFilled(i: number): LeafNode { + return LeafNode.fromRoot(new Uint8Array(Array.from({length: 32}, () => i % 255))); +} + +function createList(numNode: number): BranchNode { + const nodes = Array.from({length: numNode}, (_, i) => newLeafNodeFilled(i)); + // add 1 to countToDepth for mix_in_length spec + const depth = countToDepth(BigInt(numNode)) + 1; + const node = subtreeFillToContents(nodes, depth); + return node as BranchNode; +} diff --git a/packages/persistent-merkle-tree/test/unit/tree.test.ts b/packages/persistent-merkle-tree/test/unit/tree.test.ts index 86eca941..bea3b6f9 100644 --- a/packages/persistent-merkle-tree/test/unit/tree.test.ts +++ b/packages/persistent-merkle-tree/test/unit/tree.test.ts @@ -10,6 +10,7 @@ import { uint8ArrayToHashObject, setNodesAtDepth, findDiffDepthi, + BranchNode, } from "../../src"; describe("fixed-depth tree iteration", () => { @@ -108,7 +109,7 @@ describe("Tree.setNode vs Tree.setHashObjectFn", () => { tree2.setNodeWithFn(BigInt(18), getNewNodeFn); tree2.setNodeWithFn(BigInt(46), getNewNodeFn); tree2.setNodeWithFn(BigInt(60), getNewNodeFn); - expect(toHex(tree2.root)).to.equal("02607e58782c912e2f96f4ff9daf494d0d115e7c37e8c2b7ddce17213591151b"); + expect(toHex((tree2.rootNode as BranchNode).batchHash())).to.equal("02607e58782c912e2f96f4ff9daf494d0d115e7c37e8c2b7ddce17213591151b"); }); it("Should throw for gindex 0", () => { From 48854edddc39f67d9e531fea46256452ba90ec96 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Thu, 25 Apr 2024 17:52:18 +0700 Subject: [PATCH 002/113] feat: track HashComputation in setNodesAtDepth() --- packages/persistent-merkle-tree/src/node.ts | 4 +- packages/persistent-merkle-tree/src/tree.ts | 50 ++++++++++++++++--- .../test/unit/tree.test.ts | 44 +++++++++++++++- 3 files changed, 87 insertions(+), 11 deletions(-) diff --git a/packages/persistent-merkle-tree/src/node.ts b/packages/persistent-merkle-tree/src/node.ts index 895877d0..21a54981 100644 --- a/packages/persistent-merkle-tree/src/node.ts +++ b/packages/persistent-merkle-tree/src/node.ts @@ -3,7 +3,7 @@ import {hashObjectToUint8Array, hasher, uint8ArrayToHashObject} from "./hasher"; const TWO_POWER_32 = 2 ** 32; -type HashComputation = { +export type HashComputation = { src0: Node; src1: Node; dest: Node; @@ -76,7 +76,7 @@ export class BranchNode extends Node { } } - // TODO: private, unit tests + // TODO: private, unit tests, use Array[HashComputation[]] for better performance getHashComputation(level: number, hashCompsByLevel: Map): void { if (this.h0 === null) { let hashComputations = hashCompsByLevel.get(level); diff --git a/packages/persistent-merkle-tree/src/tree.ts b/packages/persistent-merkle-tree/src/tree.ts index 32d38582..3ccd6fec 100644 --- a/packages/persistent-merkle-tree/src/tree.ts +++ b/packages/persistent-merkle-tree/src/tree.ts @@ -1,6 +1,6 @@ import {zeroNode} from "./zeroNode"; import {Gindex, GindexBitstring, convertGindexToBitstring} from "./gindex"; -import {Node, LeafNode, BranchNode} from "./node"; +import {Node, LeafNode, BranchNode, HashComputation} from "./node"; import {createNodeFromProof, createProof, Proof, ProofInput} from "./proof"; import {createSingleProof} from "./proof/single"; @@ -341,8 +341,15 @@ export function setNodeAtDepth(rootNode: Node, nodesDepth: number, index: number * gindex and navigate upwards creating or caching nodes as necessary. Loop and repeat. * * Supports index up to `Number.MAX_SAFE_INTEGER`. + * TODO: add offset to consume from ssz */ -export function setNodesAtDepth(rootNode: Node, nodesDepth: number, indexes: number[], nodes: Node[]): Node { +export function setNodesAtDepth( + rootNode: Node, + nodesDepth: number, + indexes: number[], + nodes: Node[], + hashCompsByLevel: Array | null = null +): Node { // depth depthi gindexes indexes // 0 1 1 0 // 1 0 2 3 0 1 @@ -426,13 +433,26 @@ export function setNodesAtDepth(rootNode: Node, nodesDepth: number, indexes: num // Next node is the very next to the right of current node if (index + 1 === indexes[i + 1]) { node = new BranchNode(nodes[i], nodes[i + 1]); + if (hashCompsByLevel != null) { + // go with level of dest node (level 0 goes with root node) + // in this case dest node is nodesDept - 2, same for below + hashCompsByLevel[nodesDepth - 1].push({src0: nodes[i], src1: nodes[i + 1], dest: node}); + } // Move pointer one extra forward since node has consumed two nodes i++; } else { - node = new BranchNode(nodes[i], node.right); + const oldNode = node; + node = new BranchNode(nodes[i], oldNode.right); + if (hashCompsByLevel != null) { + hashCompsByLevel[nodesDepth - 1].push({src0: nodes[i], src1: oldNode.right, dest: node}); + } } } else { - node = new BranchNode(node.left, nodes[i]); + const oldNode = node; + node = new BranchNode(oldNode.left, nodes[i]); + if (hashCompsByLevel != null) { + hashCompsByLevel[nodesDepth - 1].push({src0: oldNode.left, src1: nodes[i], dest: node}); + } } // Here `node` is the new BranchNode at depthi `depthiParent` @@ -463,11 +483,19 @@ export function setNodesAtDepth(rootNode: Node, nodesDepth: number, indexes: num for (let d = depthiParent + 1; d <= diffDepthi; d++) { // If node is on the left, store for latter // If node is on the right merge with stored left node + const depth = nodesDepth - d - 1; + if (depth < 0) { + throw Error(`Invalid depth ${depth}, d=${d}, nodesDepth=${nodesDepth}`); + } if (isLeftNode(d, index)) { if (isLastIndex || d !== diffDepthi) { // If it's last index, bind with parent since it won't navigate to the right anymore // Also, if still has to move upwards, rebind since the node won't be visited anymore - node = new BranchNode(node, parentNodeStack[d].right); + const oldNode = node; + node = new BranchNode(oldNode, parentNodeStack[d].right); + if (hashCompsByLevel != null) { + hashCompsByLevel[depth].push({src0: oldNode, src1: parentNodeStack[d].right, dest: node}); + } } else { // Only store the left node if it's at d = diffDepth leftParentNodeStack[d] = node; @@ -477,10 +505,18 @@ export function setNodesAtDepth(rootNode: Node, nodesDepth: number, indexes: num const leftNode = leftParentNodeStack[d]; if (leftNode !== undefined) { - node = new BranchNode(leftNode, node); + const oldNode = node; + node = new BranchNode(leftNode, oldNode); + if (hashCompsByLevel != null) { + hashCompsByLevel[depth].push({src0: leftNode, src1: oldNode, dest: node}); + } leftParentNodeStack[d] = undefined; } else { - node = new BranchNode(parentNodeStack[d].left, node); + const oldNode = node; + node = new BranchNode(parentNodeStack[d].left, oldNode); + if (hashCompsByLevel != null) { + hashCompsByLevel[depth].push({src0: parentNodeStack[d].left, src1: oldNode, dest: node}); + } } } } diff --git a/packages/persistent-merkle-tree/test/unit/tree.test.ts b/packages/persistent-merkle-tree/test/unit/tree.test.ts index bea3b6f9..4eb44a4a 100644 --- a/packages/persistent-merkle-tree/test/unit/tree.test.ts +++ b/packages/persistent-merkle-tree/test/unit/tree.test.ts @@ -11,6 +11,7 @@ import { setNodesAtDepth, findDiffDepthi, BranchNode, + HashComputation, } from "../../src"; describe("fixed-depth tree iteration", () => { @@ -139,14 +140,20 @@ describe("Tree batch setNodes", () => { {depth: 5, gindexes: [33]}, {depth: 5, gindexes: [33, 34]}, {depth: 10, gindexes: [1024, 1061, 1098, 1135, 1172, 1209, 1246, 1283]}, - {depth: 40, gindexes: [1157505940782, 1349082402477, 1759777921993]}, + {depth: 40, gindexes: [Math.pow(2, 40) + 1000, Math.pow(2, 40) + 1_000_000, Math.pow(2, 40) + 1_000_000_000]}, + // TODO: make sure index < 0xffffffff for findDiffDepthi not to return NaN + // {depth: 40, gindexes: [1157505940782, 1349082402477, 1759777921993]}, ]; for (const {depth, gindexes} of testCases) { const id = `depth ${depth} ${JSON.stringify(gindexes)}`; // Prepare tree const treeOk = new Tree(zeroNode(depth)); + // cache all roots + treeOk.root; + const hashComputationsOk: Array = Array.from({length: depth}, () => []); const tree = new Tree(zeroNode(depth)); + tree.root; const gindexesBigint = gindexes.map((gindex) => BigInt(gindex)); const index0 = 2 ** depth; const indexes = gindexes.map((gindex) => gindex - index0); @@ -155,6 +162,7 @@ describe("Tree batch setNodes", () => { for (let i = 0; i < gindexesBigint.length; i++) { treeOk.setNode(gindexesBigint[i], LeafNode.fromRoot(Buffer.alloc(32, gindexes[i]))); } + getHashComputation(treeOk.rootNode, 0, hashComputationsOk); // For the large test cases, only compare the rootNode root (gindex 1) const maxGindex = depth > 6 ? 1 : 2 ** (depth + 1); @@ -162,15 +170,32 @@ describe("Tree batch setNodes", () => { it(`${id} - setNodesAtDepth()`, () => { const chunksNode = tree.rootNode; + const hashComputations: Array = Array.from({length: depth}, () => []); const newChunksNode = setNodesAtDepth( chunksNode, depth, indexes, - gindexes.map((nodeValue) => LeafNode.fromRoot(Buffer.alloc(32, nodeValue))) + gindexes.map((nodeValue) => LeafNode.fromRoot(Buffer.alloc(32, nodeValue))), + hashComputations ); tree.rootNode = newChunksNode; const roots = getTreeRoots(tree, maxGindex); + // compute root to compare easiers + treeOk.root; + tree.root; + // TODO: need sort? + // TODO: confirm all nodes in HashComputation are populated with HashObjects, h0 !== null + for (let i = depth - 1; i > 0; i--) { + expect(hashComputations[i].length).to.be.equal(hashComputationsOk[i].length, `incorrect length at depth ${i}`); + for (let j = 0; j < hashComputations[i].length; j++) { + const hcOk = hashComputationsOk[i][j]; + const hc = hashComputations[i][j]; + expect(hc.src0.root).to.be.deep.equal(hcOk.src0.root); + expect(hc.src1.root).to.be.deep.equal(hcOk.src1.root); + expect(hc.dest.root).to.be.deep.equal(hcOk.dest.root); + } + } try { expect(roots).to.deep.equal(rootsOk); } catch (e) { @@ -247,3 +272,18 @@ function getTreeRoots(tree: Tree, maxGindex: number): string[] { function toHex(bytes: Buffer | Uint8Array): string { return Buffer.from(bytes).toString("hex"); } + +function getHashComputation(node: Node, level: number, hashCompsByLevel: Array): void { + if (node.h0 === null) { + const hashComputations = hashCompsByLevel[level]; + hashComputations.push({src0: node.left, src1: node.right, dest: node}); + if (!node.left.isLeaf()) { + getHashComputation(node.left, level + 1, hashCompsByLevel); + } + if (!node.right.isLeaf()) { + getHashComputation(node.right, level + 1, hashCompsByLevel); + } + } + + // else stop the recursion, LeafNode should have h0 +} From 6820252a799eaa00271d79735fcf1052c0719025 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Fri, 26 Apr 2024 14:39:36 +0700 Subject: [PATCH 003/113] fix: findDiffDepthi to support more than 31 bytes --- packages/persistent-merkle-tree/src/tree.ts | 41 +++++++++++++++++++ .../test/unit/tree.test.ts | 1 + 2 files changed, 42 insertions(+) diff --git a/packages/persistent-merkle-tree/src/tree.ts b/packages/persistent-merkle-tree/src/tree.ts index 3ccd6fec..3103bd2d 100644 --- a/packages/persistent-merkle-tree/src/tree.ts +++ b/packages/persistent-merkle-tree/src/tree.ts @@ -781,6 +781,47 @@ export function findDiffDepthi(from: number, to: number): number { return findDiffDepthi32Bits(from, to); } +/** + * depth depthi gindexes indexes + * 0 1 1 0 + * 1 0 2 3 0 1 + * 2 - 4 5 6 7 0 1 2 3 + * + * **Conditions**: + * - `from` and `to` must not be equal + * + * @param from Index + * @param to Index + */ +const NUMBER_32_MAX = 0xffffffff; +export function findDiffDepthi(from: number, to: number): number { + if (from === to || from < 0 || to < 0) { + throw Error(`Expect different positive inputs, from=${from} to=${to}`); + } + // 0 -> 0, 1 -> 1, 2 -> 2, 3 -> 2, 4 -> 3 + const numBits0 = from > 0 ? Math.ceil(Math.log2(from + 1)) : 0; + const numBits1 = to > 0 ? Math.ceil(Math.log2(to + 1)) : 0; + + // these indexes stay in 2 sides of a merkle tree + if (numBits0 !== numBits1) { + // Must offset by one to match the depthi scale + return Math.max(numBits0, numBits1) - 1; + } + + // same number of bits + if (numBits0 > 32) { + const highBits0 = Math.floor(from / NUMBER_32_MAX); + const highBits1 = Math.floor(to / NUMBER_32_MAX); + if (highBits0 === highBits1) { + // different part is just low bits + return findDiffDepthi32Bits(from & NUMBER_32_MAX, to & NUMBER_32_MAX); + } + return 32 + findDiffDepthi32Bits(highBits0, highBits1); + } + + return findDiffDepthi32Bits(from, to); +} + /** * Returns true if the `index` at `depth` is a left node, false if it is a right node. * diff --git a/packages/persistent-merkle-tree/test/unit/tree.test.ts b/packages/persistent-merkle-tree/test/unit/tree.test.ts index 4eb44a4a..c63a8ec1 100644 --- a/packages/persistent-merkle-tree/test/unit/tree.test.ts +++ b/packages/persistent-merkle-tree/test/unit/tree.test.ts @@ -12,6 +12,7 @@ import { findDiffDepthi, BranchNode, HashComputation, + findDiffDepthi, } from "../../src"; describe("fixed-depth tree iteration", () => { From c370e3e3b3245f200ac8a66fdfdbafa42f599ebc Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Fri, 26 Apr 2024 14:44:56 +0700 Subject: [PATCH 004/113] fix: test setNodesAtDepth get computation for > 32 bits --- packages/persistent-merkle-tree/test/unit/tree.test.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/persistent-merkle-tree/test/unit/tree.test.ts b/packages/persistent-merkle-tree/test/unit/tree.test.ts index c63a8ec1..1d263fbf 100644 --- a/packages/persistent-merkle-tree/test/unit/tree.test.ts +++ b/packages/persistent-merkle-tree/test/unit/tree.test.ts @@ -142,8 +142,7 @@ describe("Tree batch setNodes", () => { {depth: 5, gindexes: [33, 34]}, {depth: 10, gindexes: [1024, 1061, 1098, 1135, 1172, 1209, 1246, 1283]}, {depth: 40, gindexes: [Math.pow(2, 40) + 1000, Math.pow(2, 40) + 1_000_000, Math.pow(2, 40) + 1_000_000_000]}, - // TODO: make sure index < 0xffffffff for findDiffDepthi not to return NaN - // {depth: 40, gindexes: [1157505940782, 1349082402477, 1759777921993]}, + {depth: 40, gindexes: [1157505940782, 1349082402477, 1759777921993]}, ]; for (const {depth, gindexes} of testCases) { From b65b06a06ca0042064609a84374760a9cb555c06 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Thu, 23 May 2024 08:43:15 +0700 Subject: [PATCH 005/113] feat: implement getHashComputations and executeHashComputations --- packages/persistent-merkle-tree/src/node.ts | 138 +++++++++++--------- 1 file changed, 76 insertions(+), 62 deletions(-) diff --git a/packages/persistent-merkle-tree/src/node.ts b/packages/persistent-merkle-tree/src/node.ts index 21a54981..90b459f5 100644 --- a/packages/persistent-merkle-tree/src/node.ts +++ b/packages/persistent-merkle-tree/src/node.ts @@ -9,6 +9,13 @@ export type HashComputation = { dest: Node; }; +export type HashComputationGroup = { + // global array + byLevel: Array; + // offset from top + offset: number; +}; + /** * An immutable binary merkle tree node */ @@ -76,69 +83,10 @@ export class BranchNode extends Node { } } - // TODO: private, unit tests, use Array[HashComputation[]] for better performance - getHashComputation(level: number, hashCompsByLevel: Map): void { - if (this.h0 === null) { - let hashComputations = hashCompsByLevel.get(level); - if (hashComputations === undefined) { - hashComputations = []; - hashCompsByLevel.set(level, hashComputations); - } - hashComputations.push({src0: this.left, src1: this.right, dest: this}); - if (!this.left.isLeaf()) { - (this.left as BranchNode).getHashComputation(level + 1, hashCompsByLevel); - } - if (!this.right.isLeaf()) { - (this.right as BranchNode).getHashComputation(level + 1, hashCompsByLevel); - } - - return; - } - - // else stop the recursion, LeafNode should have h0 - } - batchHash(): Uint8Array { - const hashCompsByLevel = new Map(); - this.getHashComputation(0, hashCompsByLevel); - const levelsDesc = Array.from(hashCompsByLevel.keys()).sort((a, b) => b - a); - for (const level of levelsDesc) { - const hcArr = hashCompsByLevel.get(level); - if (!hcArr) { - // should not happen - throw Error(`no hash computations for level ${level}`); - } - // HashComputations of the same level are safe to batch - const batch = Math.floor(hcArr.length / 4); - for (let i = 0; i < batch; i++) { - const item0 = hcArr[i * 4]; - const item1 = hcArr[i * 4 + 1]; - const item2 = hcArr[i * 4 + 2]; - const item3 = hcArr[i * 4 + 3]; - - const [dest0, dest1, dest2, dest3] = hasher.hash8HashObjects([ - item0.src0, - item0.src1, - item1.src0, - item1.src1, - item2.src0, - item2.src1, - item3.src0, - item3.src1, - ]); - - item0.dest.applyHash(dest0); - item1.dest.applyHash(dest1); - item2.dest.applyHash(dest2); - item3.dest.applyHash(dest3); - } - // compute remaining separatedly - const remLen = hcArr.length % 4; - for (let i = remLen - 1; i >= 0; i--) { - const {src0, src1, dest} = hcArr[hcArr.length - i - 1]; - dest.applyHash(hasher.digest64HashObjects(src0, src1)); - } - } + const hashComputations: HashComputation[][] = []; + getHashComputations(this, 0, hashComputations); + executeHashComputations(hashComputations); if (this.h0 === null) { throw Error("Root is not computed by batch"); @@ -446,3 +394,69 @@ export function bitwiseOrNodeH(node: Node, hIndex: number, value: number): void else if (hIndex === 7) node.h7 |= value; else throw Error("hIndex > 7"); } + +/** + * Given an array of HashComputation, execute them from the end + * The consumer has the root node so it should be able to get the final root from there + */ +export function executeHashComputations(hashComputations: Array): void { + for (let level = hashComputations.length - 1; level >= 0; level--) { + const hcArr = hashComputations[level]; + if (!hcArr) { + // should not happen + throw Error(`no hash computations for level ${level}`); + } + // HashComputations of the same level are safe to batch + const batch = Math.floor(hcArr.length / 4); + for (let i = 0; i < batch; i++) { + const item0 = hcArr[i * 4]; + const item1 = hcArr[i * 4 + 1]; + const item2 = hcArr[i * 4 + 2]; + const item3 = hcArr[i * 4 + 3]; + + const [dest0, dest1, dest2, dest3] = hasher.hash8HashObjects([ + item0.src0, + item0.src1, + item1.src0, + item1.src1, + item2.src0, + item2.src1, + item3.src0, + item3.src1, + ]); + + item0.dest.applyHash(dest0); + item1.dest.applyHash(dest1); + item2.dest.applyHash(dest2); + item3.dest.applyHash(dest3); + } + // compute remaining separatedly + const remLen = hcArr.length % 4; + for (let i = remLen - 1; i >= 0; i--) { + const {src0, src1, dest} = hcArr[hcArr.length - i - 1]; + dest.applyHash(hasher.digest64HashObjects(src0, src1)); + } + } +} + +export function getHashComputations(node: Node, offset: number, hashCompsByLevel: Array): void { + if (node.h0 === null) { + const hashComputations = arrayAtIndex(hashCompsByLevel, offset); + hashComputations.push({src0: node.left, src1: node.right, dest: node}); + if (!node.left.isLeaf()) { + getHashComputations(node.left, offset + 1, hashCompsByLevel); + } + if (!node.right.isLeaf()) { + getHashComputations(node.right, offset + 1, hashCompsByLevel); + } + } + + // else stop the recursion, LeafNode should have h0 +} + +export function arrayAtIndex(twoDArray: Array, index: number): T[] { + if (twoDArray[index] === undefined) { + twoDArray[index] = []; + } + return twoDArray[index]; +} From 086daf9c9d7fe6bc248fb12d07c1051b073d6346 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Thu, 23 May 2024 08:44:17 +0700 Subject: [PATCH 006/113] fix: setNodesAtDepth() with HashComputationGroup --- packages/persistent-merkle-tree/src/tree.ts | 36 ++++++++++++++----- .../test/unit/tree.test.ts | 35 +++++++++--------- 2 files changed, 45 insertions(+), 26 deletions(-) diff --git a/packages/persistent-merkle-tree/src/tree.ts b/packages/persistent-merkle-tree/src/tree.ts index 3103bd2d..b80fe785 100644 --- a/packages/persistent-merkle-tree/src/tree.ts +++ b/packages/persistent-merkle-tree/src/tree.ts @@ -1,6 +1,6 @@ import {zeroNode} from "./zeroNode"; import {Gindex, GindexBitstring, convertGindexToBitstring} from "./gindex"; -import {Node, LeafNode, BranchNode, HashComputation} from "./node"; +import {Node, LeafNode, BranchNode, HashComputation, HashComputationGroup, arrayAtIndex} from "./node"; import {createNodeFromProof, createProof, Proof, ProofInput} from "./proof"; import {createSingleProof} from "./proof/single"; @@ -341,14 +341,14 @@ export function setNodeAtDepth(rootNode: Node, nodesDepth: number, index: number * gindex and navigate upwards creating or caching nodes as necessary. Loop and repeat. * * Supports index up to `Number.MAX_SAFE_INTEGER`. - * TODO: add offset to consume from ssz + * @param hashComps a map of HashComputation[] by level (could be from 0 to `nodesDepth - 1`) */ export function setNodesAtDepth( rootNode: Node, nodesDepth: number, indexes: number[], nodes: Node[], - hashCompsByLevel: Array | null = null + hashComps: HashComputationGroup | null = null ): Node { // depth depthi gindexes indexes // 0 1 1 0 @@ -367,6 +367,8 @@ export function setNodesAtDepth( if (nodesDepth === 0) { return nodes.length > 0 ? nodes[0] : rootNode; } + const hashCompsByLevel = hashComps?.byLevel ?? null; + const offset = hashComps?.offset ?? 0; /** * Contiguous filled stack of parent nodes. It get filled in the first descent @@ -436,7 +438,11 @@ export function setNodesAtDepth( if (hashCompsByLevel != null) { // go with level of dest node (level 0 goes with root node) // in this case dest node is nodesDept - 2, same for below - hashCompsByLevel[nodesDepth - 1].push({src0: nodes[i], src1: nodes[i + 1], dest: node}); + arrayAtIndex(hashCompsByLevel, nodesDepth - 1 + offset).push({ + src0: nodes[i], + src1: nodes[i + 1], + dest: node, + }); } // Move pointer one extra forward since node has consumed two nodes i++; @@ -444,14 +450,18 @@ export function setNodesAtDepth( const oldNode = node; node = new BranchNode(nodes[i], oldNode.right); if (hashCompsByLevel != null) { - hashCompsByLevel[nodesDepth - 1].push({src0: nodes[i], src1: oldNode.right, dest: node}); + arrayAtIndex(hashCompsByLevel, nodesDepth - 1 + offset).push({ + src0: nodes[i], + src1: oldNode.right, + dest: node, + }); } } } else { const oldNode = node; node = new BranchNode(oldNode.left, nodes[i]); if (hashCompsByLevel != null) { - hashCompsByLevel[nodesDepth - 1].push({src0: oldNode.left, src1: nodes[i], dest: node}); + arrayAtIndex(hashCompsByLevel, nodesDepth - 1 + offset).push({src0: oldNode.left, src1: nodes[i], dest: node}); } } @@ -494,7 +504,11 @@ export function setNodesAtDepth( const oldNode = node; node = new BranchNode(oldNode, parentNodeStack[d].right); if (hashCompsByLevel != null) { - hashCompsByLevel[depth].push({src0: oldNode, src1: parentNodeStack[d].right, dest: node}); + arrayAtIndex(hashCompsByLevel, depth + offset).push({ + src0: oldNode, + src1: parentNodeStack[d].right, + dest: node, + }); } } else { // Only store the left node if it's at d = diffDepth @@ -508,14 +522,18 @@ export function setNodesAtDepth( const oldNode = node; node = new BranchNode(leftNode, oldNode); if (hashCompsByLevel != null) { - hashCompsByLevel[depth].push({src0: leftNode, src1: oldNode, dest: node}); + arrayAtIndex(hashCompsByLevel, depth + offset).push({src0: leftNode, src1: oldNode, dest: node}); } leftParentNodeStack[d] = undefined; } else { const oldNode = node; node = new BranchNode(parentNodeStack[d].left, oldNode); if (hashCompsByLevel != null) { - hashCompsByLevel[depth].push({src0: parentNodeStack[d].left, src1: oldNode, dest: node}); + arrayAtIndex(hashCompsByLevel, depth + offset).push({ + src0: parentNodeStack[d].left, + src1: oldNode, + dest: node, + }); } } } diff --git a/packages/persistent-merkle-tree/test/unit/tree.test.ts b/packages/persistent-merkle-tree/test/unit/tree.test.ts index 1d263fbf..c4c6f74d 100644 --- a/packages/persistent-merkle-tree/test/unit/tree.test.ts +++ b/packages/persistent-merkle-tree/test/unit/tree.test.ts @@ -13,6 +13,7 @@ import { BranchNode, HashComputation, findDiffDepthi, + getHashComputations, } from "../../src"; describe("fixed-depth tree iteration", () => { @@ -51,6 +52,19 @@ describe("fixed-depth tree iteration", () => { }); }); +describe("batchHash() vs root getter", () => { + const lengths = [4, 5, 6, 7, 10, 100, 1000]; + for (const length of lengths) { + it(`length=${length}`, () => { + const leaves = Array.from({length: length}, (_, i) => LeafNode.fromRoot(Buffer.alloc(32, i % 256))); + const depth = Math.ceil(Math.log2(length)); + const tree = new Tree(subtreeFillToContents([...leaves], depth)); + const tree2 = new Tree(subtreeFillToContents([...leaves], depth)); + expect(tree.batchHash()).to.be.deep.equal(tree2.root); + }); + } +}); + describe("subtree mutation", () => { let tree: Tree; beforeEach(() => { @@ -162,7 +176,7 @@ describe("Tree batch setNodes", () => { for (let i = 0; i < gindexesBigint.length; i++) { treeOk.setNode(gindexesBigint[i], LeafNode.fromRoot(Buffer.alloc(32, gindexes[i]))); } - getHashComputation(treeOk.rootNode, 0, hashComputationsOk); + getHashComputations(treeOk.rootNode, 0, hashComputationsOk); // For the large test cases, only compare the rootNode root (gindex 1) const maxGindex = depth > 6 ? 1 : 2 ** (depth + 1); @@ -176,7 +190,8 @@ describe("Tree batch setNodes", () => { depth, indexes, gindexes.map((nodeValue) => LeafNode.fromRoot(Buffer.alloc(32, nodeValue))), - hashComputations + // TODO: more test cases with positive offset? + {byLevel: hashComputations, offset: 0} ); tree.rootNode = newChunksNode; const roots = getTreeRoots(tree, maxGindex); @@ -186,7 +201,7 @@ describe("Tree batch setNodes", () => { tree.root; // TODO: need sort? // TODO: confirm all nodes in HashComputation are populated with HashObjects, h0 !== null - for (let i = depth - 1; i > 0; i--) { + for (let i = depth - 1; i >= 0; i--) { expect(hashComputations[i].length).to.be.equal(hashComputationsOk[i].length, `incorrect length at depth ${i}`); for (let j = 0; j < hashComputations[i].length; j++) { const hcOk = hashComputationsOk[i][j]; @@ -273,17 +288,3 @@ function toHex(bytes: Buffer | Uint8Array): string { return Buffer.from(bytes).toString("hex"); } -function getHashComputation(node: Node, level: number, hashCompsByLevel: Array): void { - if (node.h0 === null) { - const hashComputations = hashCompsByLevel[level]; - hashComputations.push({src0: node.left, src1: node.right, dest: node}); - if (!node.left.isLeaf()) { - getHashComputation(node.left, level + 1, hashCompsByLevel); - } - if (!node.right.isLeaf()) { - getHashComputation(node.right, level + 1, hashCompsByLevel); - } - } - - // else stop the recursion, LeafNode should have h0 -} From b82e71822041cbbf0773b00b57e9212f6dfd3772 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Fri, 24 May 2024 09:45:52 +0700 Subject: [PATCH 007/113] feat: implement ViewDU.hashTreeRoot() using batch hash --- packages/ssz/src/branchNodeStruct.ts | 26 ++++-- packages/ssz/src/type/arrayBasic.ts | 17 +++- packages/ssz/src/type/bitArray.ts | 6 +- packages/ssz/src/type/byteArray.ts | 5 +- packages/ssz/src/type/composite.ts | 3 +- packages/ssz/src/type/container.ts | 5 +- packages/ssz/src/type/listBasic.ts | 20 +++-- packages/ssz/src/type/listComposite.ts | 20 +++-- packages/ssz/src/type/optional.ts | 5 +- packages/ssz/src/type/union.ts | 5 +- packages/ssz/src/type/vectorBasic.ts | 10 ++- packages/ssz/src/type/vectorComposite.ts | 10 ++- packages/ssz/src/view/arrayBasic.ts | 11 ++- packages/ssz/src/view/arrayComposite.ts | 11 ++- packages/ssz/src/viewDU/abstract.ts | 21 ++++- packages/ssz/src/viewDU/arrayBasic.ts | 30 +++++-- packages/ssz/src/viewDU/arrayComposite.ts | 48 ++++++++-- packages/ssz/src/viewDU/bitArray.ts | 8 +- packages/ssz/src/viewDU/container.ts | 40 +++++++-- .../ssz/src/viewDU/containerNodeStruct.ts | 17 ++-- packages/ssz/src/viewDU/listBasic.ts | 17 ++-- packages/ssz/src/viewDU/listComposite.ts | 15 +++- .../test/unit/byType/bitArray/tree.test.ts | 17 ++++ .../test/unit/byType/bitVector/tree.test.ts | 17 ++++ .../test/unit/byType/container/tree.test.ts | 89 +++++++++++++++++++ .../test/unit/byType/listBasic/tree.test.ts | 52 +++++++++++ .../unit/byType/listComposite/tree.test.ts | 71 +++++++++++++++ 27 files changed, 519 insertions(+), 77 deletions(-) diff --git a/packages/ssz/src/branchNodeStruct.ts b/packages/ssz/src/branchNodeStruct.ts index 471716c4..be38fd88 100644 --- a/packages/ssz/src/branchNodeStruct.ts +++ b/packages/ssz/src/branchNodeStruct.ts @@ -2,22 +2,27 @@ import {HashObject} from "@chainsafe/as-sha256/lib/hashObject"; import {hashObjectToUint8Array, Node} from "@chainsafe/persistent-merkle-tree"; /** - * BranchNode whose children's data is represented as a struct, not a tree. + * BranchNode whose children's data is represented as a struct, the backed tree is lazily computed from the struct. * * This approach is usefull for memory efficiency of data that is not modified often, for example the validators * registry in Ethereum consensus `state.validators`. The tradeoff is that getting the hash, are proofs is more * expensive because the tree has to be recreated every time. */ export class BranchNodeStruct extends Node { + /** + * this represents the backed tree which is lazily computed from value + */ + private _rootNode: Node | null = null; constructor(private readonly valueToNode: (value: T) => Node, readonly value: T) { // First null value is to save an extra variable to check if a node has a root or not super(null as unknown as number, 0, 0, 0, 0, 0, 0, 0); + this._rootNode = null; } get rootHashObject(): HashObject { + // return this.rootNode.rootHashObject; if (this.h0 === null) { - const node = this.valueToNode(this.value); - super.applyHash(node.rootHashObject); + super.applyHash(this.rootNode.rootHashObject); } return this; } @@ -31,10 +36,21 @@ export class BranchNodeStruct extends Node { } get left(): Node { - return this.valueToNode(this.value).left; + return this.rootNode.left; } get right(): Node { - return this.valueToNode(this.value).right; + return this.rootNode.right; + } + + /** + * Singleton implementation to make sure there is single backed tree for this node. + * This is important for batching HashComputations + */ + private get rootNode(): Node { + if (this._rootNode === null) { + this._rootNode = this.valueToNode(this.value); + } + return this._rootNode; } } diff --git a/packages/ssz/src/type/arrayBasic.ts b/packages/ssz/src/type/arrayBasic.ts index c731e02a..c10161fe 100644 --- a/packages/ssz/src/type/arrayBasic.ts +++ b/packages/ssz/src/type/arrayBasic.ts @@ -1,10 +1,12 @@ import { BranchNode, + HashComputationGroup, LeafNode, Node, getNodesAtDepth, packedNodeRootsToBytes, packedRootsBytesToNode, + arrayAtIndex, } from "@chainsafe/persistent-merkle-tree"; import {Type, ValueOf, ByteViews} from "./abstract"; import {BasicType} from "./basic"; @@ -39,14 +41,23 @@ export function addLengthNode(chunksNode: Node, length: number): Node { return new BranchNode(chunksNode, LeafNode.fromUint32(length)); } -export function setChunksNode(rootNode: Node, chunksNode: Node, newLength?: number): Node { +export function setChunksNode( + rootNode: Node, + chunksNode: Node, + newLength: number | null, + hashComps: HashComputationGroup | null +): Node { const lengthNode = - newLength !== undefined + newLength !== null ? // If newLength is set, create a new node for length LeafNode.fromUint32(newLength) : // else re-use existing node (rootNode.right as LeafNode); - return new BranchNode(chunksNode, lengthNode); + const branchNode = new BranchNode(chunksNode, lengthNode); + if (hashComps !== null) { + arrayAtIndex(hashComps.byLevel, hashComps.offset).push({src0: chunksNode, src1: lengthNode, dest: branchNode}); + } + return branchNode; } export type ArrayProps = {isList: true; limit: number} | {isList: false; length: number}; diff --git a/packages/ssz/src/type/bitArray.ts b/packages/ssz/src/type/bitArray.ts index 5351286f..22556986 100644 --- a/packages/ssz/src/type/bitArray.ts +++ b/packages/ssz/src/type/bitArray.ts @@ -1,4 +1,4 @@ -import {concatGindices, Gindex, Node, toGindex, Tree} from "@chainsafe/persistent-merkle-tree"; +import {concatGindices, Gindex, HashComputationGroup, Node, toGindex, Tree} from "@chainsafe/persistent-merkle-tree"; import {fromHexString, toHexString, byteArrayEquals} from "../util/byteArray"; import {splitIntoRootChunks} from "../util/merkleize"; import {CompositeType, LENGTH_GINDEX} from "./composite"; @@ -29,8 +29,8 @@ export abstract class BitArrayType extends CompositeType extends Type { /** INTERNAL METHOD: Given a Tree View, returns a `Node` with all its updated data */ abstract commitView(view: TV): Node; /** INTERNAL METHOD: Given a Deferred Update Tree View returns a `Node` with all its updated data */ - abstract commitViewDU(view: TVDU): Node; + abstract commitViewDU(view: TVDU, hashComps?: HashComputationGroup | null): Node; /** INTERNAL METHOD: Return the cache of a Deferred Update Tree View. May return `undefined` if this ViewDU has no cache */ abstract cacheOfViewDU(view: TVDU): unknown; diff --git a/packages/ssz/src/type/container.ts b/packages/ssz/src/type/container.ts index 97b10aa1..57e8215b 100644 --- a/packages/ssz/src/type/container.ts +++ b/packages/ssz/src/type/container.ts @@ -7,6 +7,7 @@ import { toGindex, concatGindices, getNode, + HashComputationGroup, } from "@chainsafe/persistent-merkle-tree"; import {maxChunksToDepth} from "../util/merkleize"; import {Require} from "../util/types"; @@ -162,8 +163,8 @@ export class ContainerType>> extends return view.node; } - commitViewDU(view: ContainerTreeViewDUType): Node { - view.commit(); + commitViewDU(view: ContainerTreeViewDUType, hashComps: HashComputationGroup | null = null): Node { + view.commit(hashComps); return view.node; } diff --git a/packages/ssz/src/type/listBasic.ts b/packages/ssz/src/type/listBasic.ts index 53ae8783..b4c6ff9e 100644 --- a/packages/ssz/src/type/listBasic.ts +++ b/packages/ssz/src/type/listBasic.ts @@ -1,4 +1,4 @@ -import {LeafNode, Node, Tree} from "@chainsafe/persistent-merkle-tree"; +import {HashComputationGroup, LeafNode, Node, Tree} from "@chainsafe/persistent-merkle-tree"; import {ValueOf} from "./abstract"; import {BasicType} from "./basic"; import {ByteViews} from "./composite"; @@ -93,8 +93,8 @@ export class ListBasicType> return view.node; } - commitViewDU(view: ListBasicTreeViewDU): Node { - view.commit(); + commitViewDU(view: ListBasicTreeViewDU, hashComps: HashComputationGroup | null = null): Node { + view.commit(hashComps); return view.node; } @@ -144,8 +144,18 @@ export class ListBasicType> return node.left; } - tree_setChunksNode(rootNode: Node, chunksNode: Node, newLength?: number): Node { - return setChunksNode(rootNode, chunksNode, newLength); + tree_chunksNodeOffset(): number { + // one more level for length, see setChunksNode below + return 1; + } + + tree_setChunksNode( + rootNode: Node, + chunksNode: Node, + newLength: number | null, + hashComps: HashComputationGroup | null + ): Node { + return setChunksNode(rootNode, chunksNode, newLength, hashComps); } // Merkleization diff --git a/packages/ssz/src/type/listComposite.ts b/packages/ssz/src/type/listComposite.ts index 48fc37df..8e97f90e 100644 --- a/packages/ssz/src/type/listComposite.ts +++ b/packages/ssz/src/type/listComposite.ts @@ -1,4 +1,4 @@ -import {Node, Tree} from "@chainsafe/persistent-merkle-tree"; +import {HashComputationGroup, Node, Tree} from "@chainsafe/persistent-merkle-tree"; import { mixInLength, maxChunksToDepth, @@ -97,8 +97,8 @@ export class ListCompositeType< return view.node; } - commitViewDU(view: ListCompositeTreeViewDU): Node { - view.commit(); + commitViewDU(view: ListCompositeTreeViewDU, hashComps: HashComputationGroup | null = null): Node { + view.commit(hashComps); return view.node; } @@ -150,8 +150,18 @@ export class ListCompositeType< return node.left; } - tree_setChunksNode(rootNode: Node, chunksNode: Node, newLength?: number): Node { - return setChunksNode(rootNode, chunksNode, newLength); + tree_chunksNodeOffset(): number { + // one more level for length, see setChunksNode below + return 1; + } + + tree_setChunksNode( + rootNode: Node, + chunksNode: Node, + newLength: number | null, + hashComps: HashComputationGroup | null + ): Node { + return setChunksNode(rootNode, chunksNode, newLength, hashComps); } // Merkleization diff --git a/packages/ssz/src/type/optional.ts b/packages/ssz/src/type/optional.ts index 59c38d6b..7bec6e3d 100644 --- a/packages/ssz/src/type/optional.ts +++ b/packages/ssz/src/type/optional.ts @@ -1,4 +1,4 @@ -import {concatGindices, Gindex, Node, Tree, zeroNode} from "@chainsafe/persistent-merkle-tree"; +import {concatGindices, Gindex, HashComputationGroup, Node, Tree, zeroNode} from "@chainsafe/persistent-merkle-tree"; import {mixInLength} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; @@ -75,7 +75,8 @@ export class OptionalType> extends CompositeTy } // TODO add an OptionalViewDU - commitViewDU(view: ValueOfType): Node { + // TODO: batch + commitViewDU(view: ValueOfType, hashComps: HashComputationGroup | null = null): Node { return this.value_toTree(view); } diff --git a/packages/ssz/src/type/union.ts b/packages/ssz/src/type/union.ts index c6fea712..93cbed51 100644 --- a/packages/ssz/src/type/union.ts +++ b/packages/ssz/src/type/union.ts @@ -1,4 +1,4 @@ -import {concatGindices, getNode, Gindex, Node, Tree} from "@chainsafe/persistent-merkle-tree"; +import {concatGindices, getNode, Gindex, HashComputationGroup, Node, Tree} from "@chainsafe/persistent-merkle-tree"; import {mixInLength} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; @@ -106,7 +106,8 @@ export class UnionType[]> extends CompositeType< return this.value_toTree(view); } - commitViewDU(view: ValueOfTypes): Node { + // TODO: batch + commitViewDU(view: ValueOfTypes, hashComps: HashComputationGroup | null = null): Node { return this.value_toTree(view); } diff --git a/packages/ssz/src/type/vectorBasic.ts b/packages/ssz/src/type/vectorBasic.ts index 061008e0..c6b1b459 100644 --- a/packages/ssz/src/type/vectorBasic.ts +++ b/packages/ssz/src/type/vectorBasic.ts @@ -1,4 +1,4 @@ -import {Node, Tree} from "@chainsafe/persistent-merkle-tree"; +import {HashComputationGroup, Node, Tree} from "@chainsafe/persistent-merkle-tree"; import {maxChunksToDepth, splitIntoRootChunks} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; @@ -83,8 +83,8 @@ export class VectorBasicType> return view.node; } - commitViewDU(view: ArrayBasicTreeViewDU): Node { - view.commit(); + commitViewDU(view: ArrayBasicTreeViewDU, hashComps: HashComputationGroup | null = null): Node { + view.commit(hashComps); return view.node; } @@ -132,6 +132,10 @@ export class VectorBasicType> return node; } + tree_chunksNodeOffset(): number { + return 0; + } + tree_setChunksNode(rootNode: Node, chunksNode: Node): Node { return chunksNode; } diff --git a/packages/ssz/src/type/vectorComposite.ts b/packages/ssz/src/type/vectorComposite.ts index 68455bb1..9f0d7f8c 100644 --- a/packages/ssz/src/type/vectorComposite.ts +++ b/packages/ssz/src/type/vectorComposite.ts @@ -1,4 +1,4 @@ -import {Node, Tree} from "@chainsafe/persistent-merkle-tree"; +import {HashComputationGroup, Node, Tree} from "@chainsafe/persistent-merkle-tree"; import {maxChunksToDepth} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; @@ -90,8 +90,8 @@ export class VectorCompositeType< return view.node; } - commitViewDU(view: ArrayCompositeTreeViewDU): Node { - view.commit(); + commitViewDU(view: ArrayCompositeTreeViewDU, hashComps: HashComputationGroup | null = null): Node { + view.commit(hashComps); return view.node; } @@ -139,6 +139,10 @@ export class VectorCompositeType< return node; } + tree_chunksNodeOffset(): number { + return 0; + } + tree_setChunksNode(rootNode: Node, chunksNode: Node): Node { return chunksNode; } diff --git a/packages/ssz/src/view/arrayBasic.ts b/packages/ssz/src/view/arrayBasic.ts index e96ce1d1..ed34128c 100644 --- a/packages/ssz/src/view/arrayBasic.ts +++ b/packages/ssz/src/view/arrayBasic.ts @@ -1,4 +1,4 @@ -import {getNodesAtDepth, LeafNode, Node, Tree} from "@chainsafe/persistent-merkle-tree"; +import {getNodesAtDepth, HashComputationGroup, LeafNode, Node, Tree} from "@chainsafe/persistent-merkle-tree"; import {ValueOf} from "../type/abstract"; import {BasicType} from "../type/basic"; import {CompositeType} from "../type/composite"; @@ -21,8 +21,15 @@ export type ArrayBasicType> = CompositeTy tree_setLength(tree: Tree, length: number): void; /** INTERNAL METHOD: Return the chunks node from a root node */ tree_getChunksNode(rootNode: Node): Node; + /** INTERNAL METHOD: Return the offset from root for HashComputation */ + tree_chunksNodeOffset(): number; /** INTERNAL METHOD: Return a new root node with changed chunks node and length */ - tree_setChunksNode(rootNode: Node, chunksNode: Node, newLength?: number): Node; + tree_setChunksNode( + rootNode: Node, + chunksNode: Node, + newLength: number | null, + hashComps: HashComputationGroup | null + ): Node; }; export class ArrayBasicTreeView> extends TreeView> { diff --git a/packages/ssz/src/view/arrayComposite.ts b/packages/ssz/src/view/arrayComposite.ts index 252a3587..65463e08 100644 --- a/packages/ssz/src/view/arrayComposite.ts +++ b/packages/ssz/src/view/arrayComposite.ts @@ -1,4 +1,4 @@ -import {getNodesAtDepth, Node, toGindexBitstring, Tree} from "@chainsafe/persistent-merkle-tree"; +import {getNodesAtDepth, HashComputationGroup, Node, toGindexBitstring, Tree} from "@chainsafe/persistent-merkle-tree"; import {ValueOf} from "../type/abstract"; import {CompositeType, CompositeView, CompositeViewDU} from "../type/composite"; import {TreeView} from "./abstract"; @@ -16,8 +16,15 @@ export type ArrayCompositeType< tree_setLength(tree: Tree, length: number): void; /** INTERNAL METHOD: Return the chunks node from a root node */ tree_getChunksNode(rootNode: Node): Node; + /** INTERNAL METHOD: Return the offset from root for HashComputation */ + tree_chunksNodeOffset(): number; /** INTERNAL METHOD: Return a new root node with changed chunks node and length */ - tree_setChunksNode(rootNode: Node, chunksNode: Node, newLength?: number): Node; + tree_setChunksNode( + rootNode: Node, + chunksNode: Node, + newLength: number | null, + hashComps: HashComputationGroup | null + ): Node; }; export class ArrayCompositeTreeView< diff --git a/packages/ssz/src/viewDU/abstract.ts b/packages/ssz/src/viewDU/abstract.ts index 29878637..e9cb8abb 100644 --- a/packages/ssz/src/viewDU/abstract.ts +++ b/packages/ssz/src/viewDU/abstract.ts @@ -1,3 +1,5 @@ +import {hasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/index"; +import {HashComputationGroup, executeHashComputations} from "@chainsafe/persistent-merkle-tree"; import {ByteViews, CompositeType} from "../type/composite"; import {TreeView} from "../view/abstract"; @@ -19,7 +21,7 @@ export abstract class TreeViewDU> extend return values; } - commit(): void { + /** + * When we need to compute HashComputations (hashComps != null): + * - if old _rootNode is hashed, then only need to put pending changes to HashComputationGroup + * - if old _rootNode is not hashed, need to traverse and put to HashComputationGroup + */ + commit(hashComps: HashComputationGroup | null = null): void { + const isOldRootHashed = this._rootNode.h0 !== null; if (this.nodesChanged.size === 0) { + if (!isOldRootHashed && hashComps !== null) { + getHashComputations(this._rootNode, hashComps.offset, hashComps.byLevel); + } return; } @@ -164,15 +173,26 @@ export class ArrayBasicTreeViewDU> extend } const chunksNode = this.type.tree_getChunksNode(this._rootNode); - // TODO: Ensure fast setNodesAtDepth() method is correct - const newChunksNode = setNodesAtDepth(chunksNode, this.type.chunkDepth, indexes, nodes); + const hashCompsThis = + hashComps != null && isOldRootHashed + ? { + byLevel: hashComps.byLevel, + offset: hashComps.offset + this.type.tree_chunksNodeOffset(), + } + : null; + const newChunksNode = setNodesAtDepth(chunksNode, this.type.chunkDepth, indexes, nodes, hashCompsThis); this._rootNode = this.type.tree_setChunksNode( this._rootNode, newChunksNode, - this.dirtyLength ? this._length : undefined + this.dirtyLength ? this._length : null, + isOldRootHashed ? hashComps : null ); + if (!isOldRootHashed && hashComps !== null) { + getHashComputations(this._rootNode, hashComps.offset, hashComps.byLevel); + } + this.nodesChanged.clear(); this.dirtyLength = false; } diff --git a/packages/ssz/src/viewDU/arrayComposite.ts b/packages/ssz/src/viewDU/arrayComposite.ts index 81ca9e02..a257bcbe 100644 --- a/packages/ssz/src/viewDU/arrayComposite.ts +++ b/packages/ssz/src/viewDU/arrayComposite.ts @@ -1,4 +1,11 @@ -import {getNodeAtDepth, getNodesAtDepth, Node, setNodesAtDepth} from "@chainsafe/persistent-merkle-tree"; +import { + getHashComputations, + getNodeAtDepth, + getNodesAtDepth, + HashComputationGroup, + Node, + setNodesAtDepth, +} from "@chainsafe/persistent-merkle-tree"; import {ValueOf} from "../type/abstract"; import {CompositeType, CompositeView, CompositeViewDU} from "../type/composite"; import {ArrayCompositeType} from "../view/arrayComposite"; @@ -163,15 +170,33 @@ export class ArrayCompositeTreeViewDU< return values; } - commit(): void { + /** + * When we need to compute HashComputations (hashComps != null): + * - if old _rootNode is hashed, then only need to put pending changes to HashComputationGroup + * - if old _rootNode is not hashed, need to traverse and put to HashComputationGroup + */ + commit(hashComps: HashComputationGroup | null = null): void { + const isOldRootHashed = this._rootNode.h0 !== null; if (this.viewsChanged.size === 0) { + if (!isOldRootHashed && hashComps !== null) { + getHashComputations(this._rootNode, hashComps.offset, hashComps.byLevel); + } return; } const nodesChanged: {index: number; node: Node}[] = []; + // each view may mutate HashComputationGroup at offset + depth + const hashCompsView = + hashComps != null && isOldRootHashed + ? { + byLevel: hashComps.byLevel, + // Depth includes the extra level for the length node + offset: hashComps.offset + this.type.depth, + } + : null; for (const [index, view] of this.viewsChanged) { - const node = this.type.elementType.commitViewDU(view); + const node = this.type.elementType.commitViewDU(view, hashCompsView); // Set new node in nodes array to ensure data represented in the tree and fast nodes access is equal this.nodes[index] = node; nodesChanged.push({index, node}); @@ -187,15 +212,26 @@ export class ArrayCompositeTreeViewDU< const nodes = nodesChangedSorted.map((entry) => entry.node); const chunksNode = this.type.tree_getChunksNode(this._rootNode); - // TODO: Ensure fast setNodesAtDepth() method is correct - const newChunksNode = setNodesAtDepth(chunksNode, this.type.chunkDepth, indexes, nodes); + const hashCompsThis = + hashComps != null && isOldRootHashed + ? { + byLevel: hashComps.byLevel, + offset: hashComps.offset + this.type.tree_chunksNodeOffset(), + } + : null; + const newChunksNode = setNodesAtDepth(chunksNode, this.type.chunkDepth, indexes, nodes, hashCompsThis); this._rootNode = this.type.tree_setChunksNode( this._rootNode, newChunksNode, - this.dirtyLength ? this._length : undefined + this.dirtyLength ? this._length : null, + hashComps ); + if (!isOldRootHashed && hashComps !== null) { + getHashComputations(this._rootNode, hashComps.offset, hashComps.byLevel); + } + this.viewsChanged.clear(); this.dirtyLength = false; } diff --git a/packages/ssz/src/viewDU/bitArray.ts b/packages/ssz/src/viewDU/bitArray.ts index 1c3a5421..50c7f4c6 100644 --- a/packages/ssz/src/viewDU/bitArray.ts +++ b/packages/ssz/src/viewDU/bitArray.ts @@ -1,4 +1,4 @@ -import {Node} from "@chainsafe/persistent-merkle-tree"; +import {HashComputationGroup, Node, getHashComputations} from "@chainsafe/persistent-merkle-tree"; import {BitArray} from "../value/bitArray"; import {CompositeType} from "../type/composite"; import {TreeViewDU} from "./abstract"; @@ -22,10 +22,14 @@ export class BitArrayTreeViewDU extends TreeViewDU>> extends }; } - commit(): void { + /** + * When we need to compute HashComputations (hashComps != null): + * - if old _rootNode is hashed, then only need to put pending changes to HashComputationGroup + * - if old _rootNode is not hashed, need to traverse and put to HashComputationGroup + */ + commit(hashComps: HashComputationGroup | null = null): void { + const isOldRootHashed = this._rootNode.h0 !== null; if (this.nodesChanged.size === 0 && this.viewsChanged.size === 0) { + if (!isOldRootHashed && hashComps !== null) { + getHashComputations(this._rootNode, hashComps.offset, hashComps.byLevel); + } return; } const nodesChanged: {index: number; node: Node}[] = []; + let hashCompsView: HashComputationGroup | null = null; + // if old root is not hashed, no need to pass HashComputationGroup to child view bc we need to do full traversal here + if (hashComps != null && isOldRootHashed) { + // each view may mutate HashComputationGroup at offset + depth + hashCompsView = {byLevel: hashComps.byLevel, offset: hashComps.offset + this.type.depth}; + } for (const [index, view] of this.viewsChanged) { const fieldType = this.type.fieldsEntries[index].fieldType as unknown as CompositeTypeAny; - const node = fieldType.commitViewDU(view); + const node = fieldType.commitViewDU(view, hashCompsView); // Set new node in nodes array to ensure data represented in the tree and fast nodes access is equal this.nodes[index] = node; nodesChanged.push({index, node}); @@ -96,7 +112,17 @@ class ContainerTreeViewDU>> extends const indexes = nodesChangedSorted.map((entry) => entry.index); const nodes = nodesChangedSorted.map((entry) => entry.node); - this._rootNode = setNodesAtDepth(this._rootNode, this.type.depth, indexes, nodes); + this._rootNode = setNodesAtDepth( + this._rootNode, + this.type.depth, + indexes, + nodes, + isOldRootHashed ? hashComps : null + ); + + if (!isOldRootHashed && hashComps !== null) { + getHashComputations(this._rootNode, hashComps.offset, hashComps.byLevel); + } this.nodesChanged.clear(); this.viewsChanged.clear(); @@ -119,7 +145,11 @@ class ContainerTreeViewDU>> extends * Same method to `type/container.ts` that call ViewDU.serializeToBytes() of internal fields. */ serializeToBytes(output: ByteViews, offset: number): number { - this.commit(); + // it's the responsibility of consumer to call commit() before calling this method + // if we do the commit() here, it'll lose all HashComputations that we want to batch + if (this.nodesChanged.size !== 0 || this.viewsChanged.size !== 0) { + throw Error(`Must commit changes before serializeToBytes(Uint8Array(${output.uint8Array.length}, ${offset})`); + } let fixedIndex = offset; let variableIndex = offset + this.type.fixedEnd; diff --git a/packages/ssz/src/viewDU/containerNodeStruct.ts b/packages/ssz/src/viewDU/containerNodeStruct.ts index c69cd45a..eb850faf 100644 --- a/packages/ssz/src/viewDU/containerNodeStruct.ts +++ b/packages/ssz/src/viewDU/containerNodeStruct.ts @@ -1,4 +1,4 @@ -import {Node} from "@chainsafe/persistent-merkle-tree"; +import {HashComputationGroup, Node, getHashComputations} from "@chainsafe/persistent-merkle-tree"; import {Type, ValueOf} from "../type/abstract"; import {isCompositeType} from "../type/composite"; import {BranchNodeStruct} from "../branchNodeStruct"; @@ -27,15 +27,16 @@ class ContainerTreeViewDU>> extends return; } - commit(): void { - if (this.valueChanged === null) { - return; + commit(hashComps: HashComputationGroup | null = null): void { + if (this.valueChanged !== null) { + const value = this.valueChanged; + this.valueChanged = null; + this._rootNode = this.type.value_toTree(value) as BranchNodeStruct>; } - const value = this.valueChanged; - this.valueChanged = null; - - this._rootNode = this.type.value_toTree(value) as BranchNodeStruct>; + if (hashComps !== null && this._rootNode.h0 === null) { + getHashComputations(this._rootNode, hashComps.offset, hashComps.byLevel); + } } protected clearCache(): void { diff --git a/packages/ssz/src/viewDU/listBasic.ts b/packages/ssz/src/viewDU/listBasic.ts index 549168a0..fed05791 100644 --- a/packages/ssz/src/viewDU/listBasic.ts +++ b/packages/ssz/src/viewDU/listBasic.ts @@ -46,11 +46,14 @@ export class ListBasicTreeViewDU> extends */ sliceTo(index: number): this { if (index < 0) { - throw new Error(`Does not support sliceTo() with negative index ${index}`); + throw Error(`Does not support sliceTo() with negative index ${index}`); } - // Commit before getting rootNode to ensure all pending data is in the rootNode - this.commit(); + // it's the responsibility of consumer to call commit() before calling this method + // if we do the commit() here, it'll lose all HashComputations that we want to batch + if (this.nodesChanged.size > 0) { + throw Error(`Must commit changes before sliceTo(${index})`); + } // All nodes beyond length are already zero if (index >= this._length - 1) { @@ -76,7 +79,7 @@ export class ListBasicTreeViewDU> extends // Must set new length and commit to tree to restore the same tree at that index const newLength = index + 1; - const newRootNode = this.type.tree_setChunksNode(rootNode, newChunksNode, newLength); + const newRootNode = this.type.tree_setChunksNode(rootNode, newChunksNode, newLength, null); return this.type.getViewDU(newRootNode) as this; } @@ -84,7 +87,11 @@ export class ListBasicTreeViewDU> extends * Same method to `type/listBasic.ts` leveraging cached nodes. */ serializeToBytes(output: ByteViews, offset: number): number { - this.commit(); + // it's the responsibility of consumer to call commit() before calling this method + // if we do the commit() here, it'll lose all HashComputations that we want to batch + if (this.nodesChanged.size > 0) { + throw Error(`Must commit changes before serializeToBytes(Uint8Array(${output.uint8Array.length}), ${offset})`); + } const {nodes, nodesPopulated} = this.cache; const chunksNode = this.type.tree_getChunksNode(this._rootNode); return tree_serializeToBytesArrayBasic( diff --git a/packages/ssz/src/viewDU/listComposite.ts b/packages/ssz/src/viewDU/listComposite.ts index 1eff31d5..b25a27e5 100644 --- a/packages/ssz/src/viewDU/listComposite.ts +++ b/packages/ssz/src/viewDU/listComposite.ts @@ -44,8 +44,11 @@ export class ListCompositeTreeViewDU< * Note: Using index = -1, returns an empty list of length 0. */ sliceTo(index: number): this { - // Commit before getting rootNode to ensure all pending data is in the rootNode - this.commit(); + // it's the responsibility of consumer to call commit() before calling this method + // if we do the commit() here, it'll lose all HashComputations that we want to batch + if (this.viewsChanged.size > 0) { + throw Error(`Must commit changes before sliceTo(${index})`); + } const rootNode = this._rootNode; const length = this._length; @@ -61,7 +64,7 @@ export class ListCompositeTreeViewDU< // Must set new length and commit to tree to restore the same tree at that index const newLength = index + 1; - const newRootNode = this.type.tree_setChunksNode(rootNode, newChunksNode, newLength); + const newRootNode = this.type.tree_setChunksNode(rootNode, newChunksNode, newLength, null); return this.type.getViewDU(newRootNode) as this; } @@ -111,7 +114,11 @@ export class ListCompositeTreeViewDU< * Same method to `type/listComposite.ts` leveraging cached nodes. */ serializeToBytes(output: ByteViews, offset: number): number { - this.commit(); + // it's the responsibility of consumer to call commit() before calling this method + // if we do the commit() here, it'll lose all HashComputations that we want to batch + if (this.viewsChanged.size > 0) { + throw Error(`Must commit changes before serializeToBytes(Uint8Array(${output.uint8Array.length}, ${offset})`); + } this.populateAllNodes(); const chunksNode = this.type.tree_getChunksNode(this._rootNode); return tree_serializeToBytesArrayComposite( diff --git a/packages/ssz/test/unit/byType/bitArray/tree.test.ts b/packages/ssz/test/unit/byType/bitArray/tree.test.ts index 8d33314b..b6adf522 100644 --- a/packages/ssz/test/unit/byType/bitArray/tree.test.ts +++ b/packages/ssz/test/unit/byType/bitArray/tree.test.ts @@ -1,3 +1,4 @@ +import {expect} from "chai"; import {BitVectorType, BitListType, BitArray} from "../../../../src"; import {runViewTestMutation} from "../runViewTestMutation"; @@ -50,6 +51,22 @@ for (const type of [new BitVectorType(4), new BitListType(4)]) { }); } +describe("BitArray batchHash", () => { + const sszType = new BitListType(4); + const value = fromNum(4, 0b0010); + const expectedRoot = sszType.toView(value).hashTreeRoot(); + + it("fresh ViewDU", () => { + expect(sszType.toViewDU(value).hashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("set then hashTreeRoot", () => { + const viewDU = sszType.toViewDU(fromNum(4, 0b0011)); + viewDU.set(0, false); + expect(sszType.toViewDU(value).hashTreeRoot()).to.be.deep.equal(expectedRoot); + }); +}); + function fromNum(bitLen: number, num: number): BitArray { const bitArray = BitArray.fromBitLen(bitLen); for (let i = 0; i < bitLen; i++) { diff --git a/packages/ssz/test/unit/byType/bitVector/tree.test.ts b/packages/ssz/test/unit/byType/bitVector/tree.test.ts index 8dbe47ef..c1a48290 100644 --- a/packages/ssz/test/unit/byType/bitVector/tree.test.ts +++ b/packages/ssz/test/unit/byType/bitVector/tree.test.ts @@ -1,3 +1,4 @@ +import {expect} from "chai"; import {BitVectorType, BitArray} from "../../../../src"; import {runViewTestMutation} from "../runViewTestMutation"; @@ -48,6 +49,22 @@ runViewTestMutation({ ], }); +describe("BitVector batchHash", () => { + const sszType = new BitVectorType(4); + const value = fromNum(4, 0b0010); + const expectedRoot = sszType.toView(value).hashTreeRoot(); + + it("fresh ViewDU", () => { + expect(sszType.toViewDU(value).hashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("set then hashTreeRoot", () => { + const viewDU = sszType.toViewDU(fromNum(4, 0b0011)); + viewDU.set(0, false); + expect(sszType.toViewDU(value).hashTreeRoot()).to.be.deep.equal(expectedRoot); + }); +}); + function fromNum(bitLen: number, num: number): BitArray { const bitArray = BitArray.fromBitLen(bitLen); for (let i = 0; i < bitLen; i++) { diff --git a/packages/ssz/test/unit/byType/container/tree.test.ts b/packages/ssz/test/unit/byType/container/tree.test.ts index 91a68c4a..bca2fd4f 100644 --- a/packages/ssz/test/unit/byType/container/tree.test.ts +++ b/packages/ssz/test/unit/byType/container/tree.test.ts @@ -1,5 +1,6 @@ import {expect} from "chai"; import { + BooleanType, ByteVectorType, ContainerNodeStructType, ContainerType, @@ -7,6 +8,7 @@ import { ListCompositeType, NoneType, toHexString, + UintNumberType, UnionType, ValueOf, } from "../../../../src"; @@ -218,3 +220,90 @@ runViewTestMutation({ }, ], }); + +describe("ContainerViewDU batchHash", function () { + const childContainerType = new ContainerType({b0: uint64NumInfType, b1: uint64NumInfType}); + const parentContainerType = new ContainerType({ + // a basic type + a: uint64NumType, + b: childContainerType, + }); + + const value = {a: 10, b: {b0: 100, b1: 101}}; + const expectedRoot = parentContainerType.toView(value).hashTreeRoot(); + + it("fresh ViewDU", () => { + expect(parentContainerType.toViewDU(value).hashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify basic type", () => { + const viewDU = parentContainerType.toViewDU({a: 9, b: {b0: 100, b1: 101}}); + viewDU.hashTreeRoot(); + viewDU.a += 1; + expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify full child container", () => { + const viewDU = parentContainerType.toViewDU({a: 10, b: {b0: 99, b1: 999}}); + viewDU.hashTreeRoot(); + viewDU.b = childContainerType.toViewDU({b0: 100, b1: 101}); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify partial child container", () => { + const viewDU = parentContainerType.toViewDU({a: 10, b: {b0: 99, b1: 999}}); + viewDU.hashTreeRoot(); + viewDU.b.b0 = 100; + viewDU.b.b1 = 101; + expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); + }); +}); + +describe("ContainerNodeStruct batchHash", function () { + const EpochInf = new UintNumberType(8, {clipInfinity: true}); + + // Ethereum consensus validator type + const containerType = new ContainerNodeStructType({ + pubkey: new ByteVectorType(48), + withdrawalCredentials: new ByteVectorType(32), + effectiveBalance: new UintNumberType(8), + slashed: new BooleanType(), + activationEligibilityEpoch: EpochInf, + activationEpoch: EpochInf, + exitEpoch: EpochInf, + withdrawableEpoch: EpochInf, + }); + const value = { + pubkey: Buffer.alloc(48, 0xaa), + withdrawalCredentials: Buffer.alloc(32, 0xbb), + effectiveBalance: 32e9, + slashed: false, + activationEligibilityEpoch: 1_000_000, + activationEpoch: 2_000_000, + exitEpoch: 3_000_000, + withdrawableEpoch: 4_000_000, + }; + const expectedRoot = containerType.toView(value).hashTreeRoot(); + + it("fresh ViewDU", () => { + expect(containerType.toViewDU(value).hashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify basic type", () => { + const viewDU = containerType.toViewDU({...value, exitEpoch: 3}); + viewDU.hashTreeRoot(); + viewDU.exitEpoch *= 1_000_000; + expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("modify basic type", () => { + const viewDU = containerType.toViewDU({ + ...value, + exitEpoch: value.exitEpoch + 1, + withdrawableEpoch: value.withdrawableEpoch + 1, + }); + viewDU.exitEpoch -= 1; + viewDU.withdrawableEpoch -= 1; + expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); + }); +}); diff --git a/packages/ssz/test/unit/byType/listBasic/tree.test.ts b/packages/ssz/test/unit/byType/listBasic/tree.test.ts index d7f56b4d..3924564c 100644 --- a/packages/ssz/test/unit/byType/listBasic/tree.test.ts +++ b/packages/ssz/test/unit/byType/listBasic/tree.test.ts @@ -240,3 +240,55 @@ describe("ListBasicType.sliceTo", () => { }); } }); + +describe("ListBasicType batchHash", function () { + const value = [1, 2, 3, 4]; + const expectedRoot = ListN64Uint64NumberType.toView(value).hashTreeRoot(); + + it("fresh ViewDU", () => { + expect(ListN64Uint64NumberType.toViewDU(value).hashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("push then hashTreeRoot()", () => { + const viewDU = ListN64Uint64NumberType.defaultViewDU(); + viewDU.push(1); + viewDU.push(2); + viewDU.push(3); + viewDU.push(4); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("push then modify then hashTreeRoot()", () => { + const viewDU = ListN64Uint64NumberType.defaultViewDU(); + viewDU.push(1); + viewDU.push(2); + viewDU.push(3); + viewDU.push(44); + viewDU.set(3, 4); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify", () => { + const viewDU = ListN64Uint64NumberType.defaultViewDU(); + viewDU.push(1); + viewDU.push(2); + viewDU.push(33); + viewDU.push(44); + viewDU.hashTreeRoot(); + viewDU.set(2, 3); + viewDU.set(3, 4); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + // similar to a fresh ViewDU but it's good to test + it("sliceTo()", () => { + const viewDU = ListN64Uint64NumberType.defaultViewDU(); + viewDU.push(1); + viewDU.push(2); + viewDU.push(3); + viewDU.push(4); + viewDU.push(5); + viewDU.hashTreeRoot(); + expect(viewDU.sliceTo(3).hashTreeRoot()).to.be.deep.equal(expectedRoot); + }); +}); diff --git a/packages/ssz/test/unit/byType/listComposite/tree.test.ts b/packages/ssz/test/unit/byType/listComposite/tree.test.ts index 21fab6f1..2f146313 100644 --- a/packages/ssz/test/unit/byType/listComposite/tree.test.ts +++ b/packages/ssz/test/unit/byType/listComposite/tree.test.ts @@ -213,3 +213,74 @@ describe("ListCompositeType.sliceFrom", () => { } }); }); + +describe("ListCompositeType batchHash", () => { + const value = [ + {a: 1, b: 2}, + {a: 3, b: 4}, + ]; + const expectedRoot = listOfContainersType.toView(value).hashTreeRoot(); + + it("fresh ViewDU", () => { + expect(listOfContainersType.toViewDU(value).hashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("push then hashTreeRoot()", () => { + const viewDU = listOfContainersType.defaultViewDU(); + viewDU.push(containerUintsType.toViewDU({a: 1, b: 2})); + viewDU.push(containerUintsType.toViewDU({a: 3, b: 4})); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify full non-hashed child element", () => { + const viewDU = listOfContainersType.defaultViewDU(); + viewDU.push(containerUintsType.toViewDU({a: 1, b: 2})); + viewDU.push(containerUintsType.toViewDU({a: 33, b: 44})); + viewDU.hashTreeRoot(); + viewDU.set(1, containerUintsType.toViewDU({a: 3, b: 4})); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify partially hashed child element", () => { + const viewDU = listOfContainersType.defaultViewDU(); + viewDU.push(containerUintsType.toViewDU({a: 1, b: 2})); + viewDU.push(containerUintsType.toViewDU({a: 33, b: 44})); + viewDU.hashTreeRoot(); + const item1 = containerUintsType.toViewDU({a: 3, b: 44}); + item1.hashTreeRoot(); + item1.b = 4; + viewDU.set(1, item1); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify full hashed child element", () => { + const viewDU = listOfContainersType.defaultViewDU(); + viewDU.push(containerUintsType.toViewDU({a: 1, b: 2})); + viewDU.push(containerUintsType.toViewDU({a: 33, b: 44})); + viewDU.hashTreeRoot(); + const item1 = containerUintsType.toViewDU({a: 3, b: 4}); + item1.hashTreeRoot(); + viewDU.set(1, item1); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify partial child element", () => { + const viewDU = listOfContainersType.defaultViewDU(); + viewDU.push(containerUintsType.toViewDU({a: 1, b: 2})); + viewDU.push(containerUintsType.toViewDU({a: 33, b: 44})); + viewDU.hashTreeRoot(); + viewDU.get(1).a = 3; + viewDU.get(1).b = 4; + expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + // similar to a fresh ViewDU but it's good to test + it("sliceTo()", () => { + const viewDU = listOfContainersType.defaultViewDU(); + viewDU.push(containerUintsType.toViewDU({a: 1, b: 2})); + viewDU.push(containerUintsType.toViewDU({a: 3, b: 4})); + viewDU.push(containerUintsType.toViewDU({a: 5, b: 6})); + viewDU.hashTreeRoot(); + expect(viewDU.sliceTo(1).hashTreeRoot()).to.be.deep.equal(expectedRoot); + }); +}); From 75a44222ea2090f2893067b4456a605c6b282052 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Fri, 24 May 2024 16:22:39 +0700 Subject: [PATCH 008/113] fix: precompute hash for zero nodes --- packages/persistent-merkle-tree/src/zeroNode.ts | 5 +++++ packages/persistent-merkle-tree/tsconfig.json | 1 + 2 files changed, 6 insertions(+) diff --git a/packages/persistent-merkle-tree/src/zeroNode.ts b/packages/persistent-merkle-tree/src/zeroNode.ts index 1fe49118..bbd9c98e 100644 --- a/packages/persistent-merkle-tree/src/zeroNode.ts +++ b/packages/persistent-merkle-tree/src/zeroNode.ts @@ -19,6 +19,11 @@ export function zeroNode(height: number): Node { for (let i = zeroes.length; i <= height; i++) { zeroes[i] = new BranchNode(zeroes[i - 1], zeroes[i - 1]); } + + // make sure hash is precomputed in order not to put zeroNodes to HashComputation + // otherwise get OOM + zeroes[height].root; } + return zeroes[height]; } diff --git a/packages/persistent-merkle-tree/tsconfig.json b/packages/persistent-merkle-tree/tsconfig.json index 4c79a45c..11d74128 100644 --- a/packages/persistent-merkle-tree/tsconfig.json +++ b/packages/persistent-merkle-tree/tsconfig.json @@ -5,6 +5,7 @@ "module": "commonjs", "lib": ["esnext", "dom"], "outDir": "lib", + "sourceMap": true, "strict": true, "strictNullChecks": true, "pretty": true, From 008cf3d5d0d00495fd0e3a1182fe1c45e0c4fae9 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Sat, 25 May 2024 08:29:26 +0700 Subject: [PATCH 009/113] chore: add eth2 BeaconState test --- .../ssz/test/unit/eth2/beaconState.test.ts | 201 ++++++++++++++++++ 1 file changed, 201 insertions(+) create mode 100644 packages/ssz/test/unit/eth2/beaconState.test.ts diff --git a/packages/ssz/test/unit/eth2/beaconState.test.ts b/packages/ssz/test/unit/eth2/beaconState.test.ts new file mode 100644 index 00000000..5c41d0ab --- /dev/null +++ b/packages/ssz/test/unit/eth2/beaconState.test.ts @@ -0,0 +1,201 @@ +import {expect} from "chai"; +import {BeaconState} from "../../lodestarTypes/deneb/sszTypes"; +import {ListUintNum64Type} from "../../../src/type/listUintNum64"; +import {altair, phase0, ssz} from "../../lodestarTypes"; +import {BitArray, fromHexString} from "../../../src"; + +const VALIDATOR_REGISTRY_LIMIT = 1099511627776; +export const Balances = new ListUintNum64Type(VALIDATOR_REGISTRY_LIMIT); + +// TODO - batch: mix the commit() or hashTreeRoot()? +describe("BeaconState ViewDU batch hash", function () { + const view = BeaconState.defaultView(); + const viewDU = BeaconState.defaultViewDU(); + + it("BeaconState ViewDU should have same hashTreeRoot() to View", () => { + // genesisTime + viewDU.genesisTime = view.genesisTime = 1e9; + expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // genesisValidatorsRoot + viewDU.genesisValidatorsRoot = view.genesisValidatorsRoot = Buffer.alloc(32, 1); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // fork + const fork: phase0.Fork = { + epoch: 1000, + previousVersion: fromHexString("0x03001020"), + currentVersion: fromHexString("0x04001020"), + }; + view.fork = BeaconState.fields.fork.toView(fork); + viewDU.fork = BeaconState.fields.fork.toViewDU(fork); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // latestBlockHeader + const latestBlockHeader: phase0.BeaconBlockHeader = { + slot: 1000, + proposerIndex: 1, + parentRoot: fromHexString("0xac80c66f413218e2c9c7bcb2408ccdceacf3bcd7e7df58474e0c6aa9d7f328a0"), + stateRoot: fromHexString("0xed29eed3dbee72caf3b13df84d01ebda1482dbd0ce084e1ce8862b4acb740ed8"), + bodyRoot: fromHexString("0x32c644ca1b5d1583d445e9d41c81b3e98465fefad4f0db16084cbce7f1b7b849"), + }; + view.latestBlockHeader = BeaconState.fields.latestBlockHeader.toView(latestBlockHeader); + viewDU.latestBlockHeader = BeaconState.fields.latestBlockHeader.toViewDU(latestBlockHeader); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // blockRoots + const blockRoots = ssz.phase0.HistoricalBlockRoots.defaultValue(); + blockRoots[0] = fromHexString("0x1234"); + view.blockRoots = ssz.phase0.HistoricalBlockRoots.toView(blockRoots); + viewDU.blockRoots = ssz.phase0.HistoricalBlockRoots.toViewDU(blockRoots); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // stateRoots + const stateRoots = ssz.phase0.HistoricalStateRoots.defaultValue(); + stateRoots[0] = fromHexString("0x5678"); + view.stateRoots = ssz.phase0.HistoricalStateRoots.toView(stateRoots); + viewDU.stateRoots = ssz.phase0.HistoricalStateRoots.toViewDU(stateRoots); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // historical_roots Frozen in Capella, replaced by historical_summaries + // Eth1 + const eth1Data: phase0.Eth1Data = { + depositRoot: fromHexString("0x1234"), + depositCount: 1000, + blockHash: fromHexString("0x5678"), + }; + view.eth1Data = BeaconState.fields.eth1Data.toView(eth1Data); + viewDU.eth1Data = BeaconState.fields.eth1Data.toViewDU(eth1Data); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // Eth1DataVotes + const eth1DataVotes = ssz.phase0.Eth1DataVotes.defaultValue(); + eth1DataVotes[0] = eth1Data; + view.eth1DataVotes = ssz.phase0.Eth1DataVotes.toView(eth1DataVotes); + viewDU.eth1DataVotes = ssz.phase0.Eth1DataVotes.toViewDU(eth1DataVotes); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // Eth1DepositIndex + view.eth1DepositIndex = 1000; + viewDU.eth1DepositIndex = 1000; + expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // validators + const validator = { + pubkey: Buffer.alloc(48, 0xaa), + withdrawalCredentials: Buffer.alloc(32, 0xbb), + effectiveBalance: 32e9, + slashed: false, + activationEligibilityEpoch: 1_000_000, + activationEpoch: 2_000_000, + exitEpoch: 3_000_000, + withdrawableEpoch: 4_000_000, + }; + view.validators = BeaconState.fields.validators.toView([validator]); + viewDU.validators = BeaconState.fields.validators.toViewDU([validator]); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // balances + view.balances = BeaconState.fields.balances.toView([1000, 2000, 3000]); + viewDU.balances = Balances.toViewDU([1000, 2000, 3000]); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // randaoMixes + const randaoMixes = ssz.phase0.RandaoMixes.defaultValue(); + randaoMixes[0] = fromHexString("0x1234"); + view.randaoMixes = ssz.phase0.RandaoMixes.toView(randaoMixes); + viewDU.randaoMixes = ssz.phase0.RandaoMixes.toViewDU(randaoMixes); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // slashings + view.slashings = BeaconState.fields.slashings.toView(Array.from({length: 64}, () => BigInt(1000))); + viewDU.slashings = BeaconState.fields.slashings.toViewDU(Array.from({length: 64}, () => BigInt(1000))); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // previousEpochAttestations + view.previousEpochParticipation = BeaconState.fields.previousEpochParticipation.toView([1, 2, 3]); + viewDU.previousEpochParticipation = BeaconState.fields.previousEpochParticipation.toViewDU([1, 2, 3]); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // currentEpochAttestations + view.currentEpochParticipation = BeaconState.fields.currentEpochParticipation.toView([1, 2, 3]); + viewDU.currentEpochParticipation = BeaconState.fields.currentEpochParticipation.toViewDU([1, 2, 3]); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // justificationBits + view.justificationBits = BeaconState.fields.justificationBits.toView( + BitArray.fromBoolArray([true, false, true, true]) + ); + viewDU.justificationBits = BeaconState.fields.justificationBits.toViewDU( + BitArray.fromBoolArray([true, false, true, true]) + ); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // previousJustifiedCheckpoint + const checkpoint: phase0.Checkpoint = { + epoch: 1000, + root: fromHexString("0x1234"), + }; + view.previousJustifiedCheckpoint = BeaconState.fields.previousJustifiedCheckpoint.toView(checkpoint); + viewDU.previousJustifiedCheckpoint = BeaconState.fields.previousJustifiedCheckpoint.toViewDU(checkpoint); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // currentJustifiedCheckpoint + view.currentJustifiedCheckpoint = BeaconState.fields.currentJustifiedCheckpoint.toView(checkpoint); + viewDU.currentJustifiedCheckpoint = BeaconState.fields.currentJustifiedCheckpoint.toViewDU(checkpoint); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // finalizedCheckpoint + view.finalizedCheckpoint = BeaconState.fields.finalizedCheckpoint.toView(checkpoint); + viewDU.finalizedCheckpoint = BeaconState.fields.finalizedCheckpoint.toViewDU(checkpoint); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // inactivityScores + view.inactivityScores = BeaconState.fields.inactivityScores.toView([1, 2, 3]); + viewDU.inactivityScores = BeaconState.fields.inactivityScores.toViewDU([1, 2, 3]); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // currentSyncCommittee + const syncCommittee: altair.SyncCommittee = { + pubkeys: Array.from({length: 32}, () => Buffer.alloc(48, 0xaa)), + aggregatePubkey: fromHexString("0x1234"), + }; + view.currentSyncCommittee = BeaconState.fields.currentSyncCommittee.toView(syncCommittee); + viewDU.currentSyncCommittee = BeaconState.fields.currentSyncCommittee.toViewDU(syncCommittee); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // nextSyncCommittee + view.nextSyncCommittee = BeaconState.fields.nextSyncCommittee.toView(syncCommittee); + viewDU.nextSyncCommittee = BeaconState.fields.nextSyncCommittee.toViewDU(syncCommittee); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // latestExecutionPayloadHeader + const latestExecutionPayloadHeader = BeaconState.fields.latestExecutionPayloadHeader.defaultValue(); + latestExecutionPayloadHeader.blockNumber = 1000; + latestExecutionPayloadHeader.parentHash = fromHexString( + "0xac80c66f413218e2c9c7bcb2408ccdceacf3bcd7e7df58474e0c6aa9d7f328a0" + ); + view.latestExecutionPayloadHeader = + BeaconState.fields.latestExecutionPayloadHeader.toView(latestExecutionPayloadHeader); + viewDU.latestExecutionPayloadHeader = + BeaconState.fields.latestExecutionPayloadHeader.toViewDU(latestExecutionPayloadHeader); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // nextWithdrawalIndex + viewDU.nextWithdrawalIndex = view.nextWithdrawalIndex = 1000; + expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // nextWithdrawalValidatorIndex + viewDU.nextWithdrawalValidatorIndex = view.nextWithdrawalValidatorIndex = 1000; + expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // historicalSummaries + const historicalSummaries = { + blockSummaryRoot: fromHexString("0xac80c66f413218e2c9c7bcb2408ccdceacf3bcd7e7df58474e0c6aa9d7f328a0"), + stateSummaryRoot: fromHexString("0x32c644ca1b5d1583d445e9d41c81b3e98465fefad4f0db16084cbce7f1b7b849"), + }; + view.historicalSummaries = BeaconState.fields.historicalSummaries.toView([historicalSummaries]); + viewDU.historicalSummaries = BeaconState.fields.historicalSummaries.toViewDU([historicalSummaries]); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + }); +}); From 613353539ab5257265195c7be2e954196fe7a784 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Sat, 25 May 2024 08:58:05 +0700 Subject: [PATCH 010/113] fix: merge issue --- .../src/hasher/as-sha256.ts | 4 +- .../src/hasher/noble.ts | 4 +- .../src/hasher/types.ts | 2 +- packages/persistent-merkle-tree/src/node.ts | 2 +- packages/persistent-merkle-tree/src/tree.ts | 40 ------------------- packages/ssz/src/viewDU/listComposite.ts | 2 +- 6 files changed, 7 insertions(+), 47 deletions(-) diff --git a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts index 141e2c9f..cad61ab2 100644 --- a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts +++ b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts @@ -1,8 +1,8 @@ -import {digest2Bytes32, digest64HashObjects, hash8HashObjects} from "@chainsafe/as-sha256"; +import {digest2Bytes32, digest64HashObjects, batchHash4HashObjectInputs} from "@chainsafe/as-sha256"; import type {Hasher} from "./types"; export const hasher: Hasher = { digest64: digest2Bytes32, digest64HashObjects, - hash8HashObjects, + batchHash4HashObjectInputs, }; diff --git a/packages/persistent-merkle-tree/src/hasher/noble.ts b/packages/persistent-merkle-tree/src/hasher/noble.ts index 8dcd8c1a..ecac183b 100644 --- a/packages/persistent-merkle-tree/src/hasher/noble.ts +++ b/packages/persistent-merkle-tree/src/hasher/noble.ts @@ -7,7 +7,7 @@ const digest64 = (a: Uint8Array, b: Uint8Array): Uint8Array => sha256.create().u export const hasher: Hasher = { digest64, digest64HashObjects: (a, b) => uint8ArrayToHashObject(digest64(hashObjectToUint8Array(a), hashObjectToUint8Array(b))), - hash8HashObjects: () => { - throw Error("not implemented"); + batchHash4HashObjectInputs: () => { + throw Error("TODO: not implemented"); }, }; diff --git a/packages/persistent-merkle-tree/src/hasher/types.ts b/packages/persistent-merkle-tree/src/hasher/types.ts index 9566d7cd..ec361b76 100644 --- a/packages/persistent-merkle-tree/src/hasher/types.ts +++ b/packages/persistent-merkle-tree/src/hasher/types.ts @@ -9,5 +9,5 @@ export type Hasher = { * Hash two 32-byte HashObjects */ digest64HashObjects(a: HashObject, b: HashObject): HashObject; - hash8HashObjects(inputs: HashObject[]): HashObject[]; + batchHash4HashObjectInputs(inputs: HashObject[]): HashObject[]; }; diff --git a/packages/persistent-merkle-tree/src/node.ts b/packages/persistent-merkle-tree/src/node.ts index 90b459f5..cf5bc0b9 100644 --- a/packages/persistent-merkle-tree/src/node.ts +++ b/packages/persistent-merkle-tree/src/node.ts @@ -414,7 +414,7 @@ export function executeHashComputations(hashComputations: Array 0, 1 -> 1, 2 -> 2, 3 -> 2, 4 -> 3 - const numBits0 = from > 0 ? Math.ceil(Math.log2(from + 1)) : 0; - const numBits1 = to > 0 ? Math.ceil(Math.log2(to + 1)) : 0; - - // these indexes stay in 2 sides of a merkle tree - if (numBits0 !== numBits1) { - // Must offset by one to match the depthi scale - return Math.max(numBits0, numBits1) - 1; - } - - // same number of bits - if (numBits0 > 32) { - const highBits0 = Math.floor(from / NUMBER_32_MAX); - const highBits1 = Math.floor(to / NUMBER_32_MAX); - if (highBits0 === highBits1) { - // different part is just low bits - return findDiffDepthi32Bits(from & NUMBER_32_MAX, to & NUMBER_32_MAX); - } - return 32 + findDiffDepthi32Bits(highBits0, highBits1); - } - - return findDiffDepthi32Bits(from, to); -} /** * Returns true if the `index` at `depth` is a left node, false if it is a right node. diff --git a/packages/ssz/src/viewDU/listComposite.ts b/packages/ssz/src/viewDU/listComposite.ts index b25a27e5..1522a55a 100644 --- a/packages/ssz/src/viewDU/listComposite.ts +++ b/packages/ssz/src/viewDU/listComposite.ts @@ -105,7 +105,7 @@ export class ListCompositeTreeViewDU< newLength = nodes.length; } - const newRootNode = this.type.tree_setChunksNode(this._rootNode, newChunksNode, newLength); + const newRootNode = this.type.tree_setChunksNode(this._rootNode, newChunksNode, newLength, null); return this.type.getViewDU(newRootNode) as this; } From e60101a6052bf5971589c4977d4459d2b827bf18 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Sat, 25 May 2024 09:50:23 +0700 Subject: [PATCH 011/113] feat: generalize Hasher interface --- .../src/hasher/as-sha256.ts | 25 +++++++++++- .../src/hasher/noble.ts | 17 ++++++++- .../src/hasher/types.ts | 5 ++- packages/persistent-merkle-tree/src/node.ts | 38 ++++++------------- 4 files changed, 53 insertions(+), 32 deletions(-) diff --git a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts index cad61ab2..560ae421 100644 --- a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts +++ b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts @@ -1,8 +1,29 @@ -import {digest2Bytes32, digest64HashObjects, batchHash4HashObjectInputs} from "@chainsafe/as-sha256"; +import {digest2Bytes32, digest64HashObjects, HashObject, batchHash4HashObjectInputs} from "@chainsafe/as-sha256"; import type {Hasher} from "./types"; export const hasher: Hasher = { digest64: digest2Bytes32, digest64HashObjects, - batchHash4HashObjectInputs, + batchHashObjects: (inputs: HashObject[]) => { + // as-sha256 uses SIMD for batch hash + if (inputs.length === 0) { + return []; + } else if (inputs.length % 2 !== 0) { + throw new Error(`Expect inputs.length to be even, got ${inputs.length}`); + } + + const batch = Math.floor(inputs.length / 8); + const outputs = new Array(); + for (let i = 0; i < batch; i++) { + const [out0, out1, out2, out3] = batchHash4HashObjectInputs(inputs.slice(i * 8, i * 8 + 8)); + outputs.push(out0, out1, out2, out3); + } + + for (let i = batch * 8; i < inputs.length; i += 2) { + const output = digest64HashObjects(inputs[i], inputs[i + 1]); + outputs.push(output); + } + + return outputs; + }, }; diff --git a/packages/persistent-merkle-tree/src/hasher/noble.ts b/packages/persistent-merkle-tree/src/hasher/noble.ts index ecac183b..3f3b082c 100644 --- a/packages/persistent-merkle-tree/src/hasher/noble.ts +++ b/packages/persistent-merkle-tree/src/hasher/noble.ts @@ -1,4 +1,5 @@ import {sha256} from "@noble/hashes/sha256"; +import {digest64HashObjects, HashObject} from "@chainsafe/as-sha256"; import type {Hasher} from "./types"; import {hashObjectToUint8Array, uint8ArrayToHashObject} from "./util"; @@ -7,7 +8,19 @@ const digest64 = (a: Uint8Array, b: Uint8Array): Uint8Array => sha256.create().u export const hasher: Hasher = { digest64, digest64HashObjects: (a, b) => uint8ArrayToHashObject(digest64(hashObjectToUint8Array(a), hashObjectToUint8Array(b))), - batchHash4HashObjectInputs: () => { - throw Error("TODO: not implemented"); + batchHashObjects: (inputs: HashObject[]) => { + // noble does not support batch hash + if (inputs.length === 0) { + return []; + } else if (inputs.length % 2 !== 0) { + throw new Error(`Expect inputs.length to be even, got ${inputs.length}`); + } + + const outputs = new Array(); + for (let i = 0; i < inputs.length; i += 2) { + const output = digest64HashObjects(inputs[i], inputs[i + 1]); + outputs.push(output); + } + return outputs; }, }; diff --git a/packages/persistent-merkle-tree/src/hasher/types.ts b/packages/persistent-merkle-tree/src/hasher/types.ts index ec361b76..6d0d4219 100644 --- a/packages/persistent-merkle-tree/src/hasher/types.ts +++ b/packages/persistent-merkle-tree/src/hasher/types.ts @@ -9,5 +9,8 @@ export type Hasher = { * Hash two 32-byte HashObjects */ digest64HashObjects(a: HashObject, b: HashObject): HashObject; - batchHash4HashObjectInputs(inputs: HashObject[]): HashObject[]; + /** + * Batch hash 2 * n HashObjects, return n HashObjects output + */ + batchHashObjects(inputs: HashObject[]): HashObject[]; }; diff --git a/packages/persistent-merkle-tree/src/node.ts b/packages/persistent-merkle-tree/src/node.ts index cf5bc0b9..3b35a623 100644 --- a/packages/persistent-merkle-tree/src/node.ts +++ b/packages/persistent-merkle-tree/src/node.ts @@ -407,34 +407,18 @@ export function executeHashComputations(hashComputations: Array= 0; i--) { - const {src0, src1, dest} = hcArr[hcArr.length - i - 1]; - dest.applyHash(hasher.digest64HashObjects(src0, src1)); + const outputs = hasher.batchHashObjects(inputs); + if (outputs.length !== dests.length) { + throw Error(`${inputs.length} inputs produce ${outputs.length} outputs, expected ${dests.length} outputs`); + } + for (let i = 0; i < outputs.length; i++) { + dests[i].applyHash(outputs[i]); } } } From b1a7765be2e2fb9de4cbff690c8972c5d508d0dd Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Sat, 25 May 2024 10:30:59 +0700 Subject: [PATCH 012/113] chore: add batchHash() benchmark --- .../src/hasher/hashtree.ts | 1 + .../src/hasher/index.ts | 5 ++- .../test/perf/hasher.test.ts | 2 + .../test/perf/node.test.ts | 43 ++++++++++++++++++- .../test/unit/batchHash.test.ts | 6 +-- .../test/unit/hasher.test.ts | 4 +- .../test/unit/tree.test.ts | 1 - 7 files changed, 55 insertions(+), 7 deletions(-) create mode 100644 packages/persistent-merkle-tree/src/hasher/hashtree.ts diff --git a/packages/persistent-merkle-tree/src/hasher/hashtree.ts b/packages/persistent-merkle-tree/src/hasher/hashtree.ts new file mode 100644 index 00000000..3c250aa0 --- /dev/null +++ b/packages/persistent-merkle-tree/src/hasher/hashtree.ts @@ -0,0 +1 @@ +// TODO - batch: use @chainsafe/hashtree \ No newline at end of file diff --git a/packages/persistent-merkle-tree/src/hasher/index.ts b/packages/persistent-merkle-tree/src/hasher/index.ts index 81f26444..bf429c57 100644 --- a/packages/persistent-merkle-tree/src/hasher/index.ts +++ b/packages/persistent-merkle-tree/src/hasher/index.ts @@ -7,9 +7,12 @@ export * from "./types"; export * from "./util"; /** - * Hasher used across the SSZ codebase + * Default hasher used across the SSZ codebase, this does not support batch hash. + * Use `as-sha256` hasher for batch hashing using SIMD. + * TODO - batch: Use `hashtree` hasher for 20x speedup */ // export let hasher: Hasher = nobleHasher; +// For testing purposes, we use the as-sha256 hasher export let hasher: Hasher = csHasher; /** diff --git a/packages/persistent-merkle-tree/test/perf/hasher.test.ts b/packages/persistent-merkle-tree/test/perf/hasher.test.ts index e375057f..b0dcc9f7 100644 --- a/packages/persistent-merkle-tree/test/perf/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/perf/hasher.test.ts @@ -31,3 +31,5 @@ describe("hasher", () => { }); } }); + +// TODO - batch: test more methods diff --git a/packages/persistent-merkle-tree/test/perf/node.test.ts b/packages/persistent-merkle-tree/test/perf/node.test.ts index 029085cc..4905ea8e 100644 --- a/packages/persistent-merkle-tree/test/perf/node.test.ts +++ b/packages/persistent-merkle-tree/test/perf/node.test.ts @@ -1,5 +1,6 @@ import {itBench} from "@dapplion/benchmark"; -import {getNodeH, LeafNode} from "../../src/node"; +import {BranchNode, getNodeH, LeafNode} from "../../src/node"; +import {countToDepth, subtreeFillToContents} from "../../src"; describe("HashObject LeafNode", () => { // Number of new nodes created in processAttestations() on average @@ -40,3 +41,43 @@ describe("HashObject LeafNode", () => { } }); }); + +describe("Node batchHash", () => { + const numNodes = [250_000, 500_000, 1_000_000, 2_000_000]; + + for (const numNode of numNodes) { + itBench({ + id: `batchHash ${numNode} nodes`, + before: () => { + return createList(numNode); + }, + beforeEach: (rootNode: BranchNode) => rootNode, + fn: (rootNode: BranchNode) => { + rootNode.batchHash(); + }, + }); + + itBench({ + id: `get root ${numNode} nodes`, + before: () => { + return createList(numNode); + }, + beforeEach: (rootNode: BranchNode) => rootNode, + fn: (rootNode: BranchNode) => { + rootNode.root; + }, + }); + } +}); + +function createList(numNode: number): BranchNode { + const nodes = Array.from({length: numNode}, (_, i) => newLeafNodeFilled(i)); + // add 1 to countToDepth for mix_in_length spec + const depth = countToDepth(BigInt(numNode)) + 1; + const node = subtreeFillToContents(nodes, depth); + return node as BranchNode; +} + +function newLeafNodeFilled(i: number): LeafNode { + return LeafNode.fromRoot(new Uint8Array(Array.from({length: 32}, () => i % 256))); +} diff --git a/packages/persistent-merkle-tree/test/unit/batchHash.test.ts b/packages/persistent-merkle-tree/test/unit/batchHash.test.ts index 36fc9890..cb863dc6 100644 --- a/packages/persistent-merkle-tree/test/unit/batchHash.test.ts +++ b/packages/persistent-merkle-tree/test/unit/batchHash.test.ts @@ -5,8 +5,8 @@ import {subtreeFillToContents} from "../../src/subtree"; import {zeroNode} from "../../src/zeroNode"; describe("batchHash", function () { - // const numNodes = [200, 201, 202, 203]; - const numNodes = [32, 33, 64]; + const numNodes = [200, 201, 202, 203]; + // const numNodes = [32, 33, 64]; for (const numNode of numNodes) { it(`${numNode} nodes`, () => { const rootNode = createList(numNode); @@ -36,7 +36,7 @@ function resetNodes(node: Node, depth: number): void { } function newLeafNodeFilled(i: number): LeafNode { - return LeafNode.fromRoot(new Uint8Array(Array.from({length: 32}, () => i % 255))); + return LeafNode.fromRoot(new Uint8Array(Array.from({length: 32}, () => i % 256))); } function createList(numNode: number): BranchNode { diff --git a/packages/persistent-merkle-tree/test/unit/hasher.test.ts b/packages/persistent-merkle-tree/test/unit/hasher.test.ts index f51ca461..9165f87d 100644 --- a/packages/persistent-merkle-tree/test/unit/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/unit/hasher.test.ts @@ -1,4 +1,4 @@ -import { expect } from "chai"; +import {expect} from "chai"; import {uint8ArrayToHashObject, hasher, hashObjectToUint8Array} from "../../src/hasher"; describe("hasher", function () { @@ -14,3 +14,5 @@ describe("hasher", function () { expect(newRoot).to.be.deep.equal(root, "hash and hash2 is not equal"); }); }); + +// TODO - batch: test more methods diff --git a/packages/persistent-merkle-tree/test/unit/tree.test.ts b/packages/persistent-merkle-tree/test/unit/tree.test.ts index c4c6f74d..a885466d 100644 --- a/packages/persistent-merkle-tree/test/unit/tree.test.ts +++ b/packages/persistent-merkle-tree/test/unit/tree.test.ts @@ -12,7 +12,6 @@ import { findDiffDepthi, BranchNode, HashComputation, - findDiffDepthi, getHashComputations, } from "../../src"; From e14e24a9f95538873184c7198a081f0612e27a91 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Sun, 26 May 2024 10:25:26 +0700 Subject: [PATCH 013/113] fix: getHashComputations() unit tests and perf test batchHash() --- .../test/perf/node.test.ts | 12 +-- .../test/unit/node.test.ts | 81 ++++++++++++++++++- 2 files changed, 83 insertions(+), 10 deletions(-) diff --git a/packages/persistent-merkle-tree/test/perf/node.test.ts b/packages/persistent-merkle-tree/test/perf/node.test.ts index 4905ea8e..3f0ebc74 100644 --- a/packages/persistent-merkle-tree/test/perf/node.test.ts +++ b/packages/persistent-merkle-tree/test/perf/node.test.ts @@ -43,15 +43,12 @@ describe("HashObject LeafNode", () => { }); describe("Node batchHash", () => { - const numNodes = [250_000, 500_000, 1_000_000, 2_000_000]; + const numNodes = [250_000, 500_000, 1_000_000]; for (const numNode of numNodes) { itBench({ id: `batchHash ${numNode} nodes`, - before: () => { - return createList(numNode); - }, - beforeEach: (rootNode: BranchNode) => rootNode, + beforeEach: () => createList(numNode), fn: (rootNode: BranchNode) => { rootNode.batchHash(); }, @@ -59,10 +56,7 @@ describe("Node batchHash", () => { itBench({ id: `get root ${numNode} nodes`, - before: () => { - return createList(numNode); - }, - beforeEach: (rootNode: BranchNode) => rootNode, + beforeEach: () => createList(numNode), fn: (rootNode: BranchNode) => { rootNode.root; }, diff --git a/packages/persistent-merkle-tree/test/unit/node.test.ts b/packages/persistent-merkle-tree/test/unit/node.test.ts index 6ae6f440..bbe8d7be 100644 --- a/packages/persistent-merkle-tree/test/unit/node.test.ts +++ b/packages/persistent-merkle-tree/test/unit/node.test.ts @@ -1,6 +1,6 @@ import {HashObject} from "@chainsafe/as-sha256"; import {expect} from "chai"; -import {LeafNode} from "../../src"; +import {BranchNode, HashComputation, LeafNode, countToDepth, getHashComputations, subtreeFillToContents} from "../../src"; describe("LeafNode uint", () => { const testCasesNode: { @@ -194,3 +194,82 @@ describe("getUint with correct sign", () => { expect(leafNodeInt.getUintBigint(8, 0)).to.equal(BigInt("288782042218268212"), "Wrong leafNodeInt.getUintBigint"); }); }); + +describe("getHashComputations", () => { + const testCases: {numNode: number; expectedLengths: number[]}[] = [ + {numNode: 16, expectedLengths: [1, 2, 4, 8]}, + {numNode: 15, expectedLengths: [1, 2, 4, 8]}, + {numNode: 14, expectedLengths: [1, 2, 4, 7]}, + {numNode: 13, expectedLengths: [1, 2, 4, 7]}, + {numNode: 12, expectedLengths: [1, 2, 3, 6]}, + {numNode: 11, expectedLengths: [1, 2, 3, 6]}, + {numNode: 10, expectedLengths: [1, 2, 3, 5]}, + {numNode: 9, expectedLengths: [1, 2, 3, 5]}, + // 0 root + // 1 l r + // 2 ll lr rl rr + // 3 lll llr lrl lrr rll rlr rrl rrr + {numNode: 8, expectedLengths: [1, 2, 4]}, + {numNode: 7, expectedLengths: [1, 2, 4]}, + // has 0 node at end of level 2 + {numNode: 6, expectedLengths: [1, 2, 3]}, + {numNode: 5, expectedLengths: [1, 2, 3]}, + {numNode: 4, expectedLengths: [1, 2]}, + ]; + + for (const {numNode, expectedLengths} of testCases) { + it(`subtreeFillToContents with ${numNode} nodes`, () => { + const nodes = Array.from({length: numNode}, (_, i) => newLeafNodeFilled(i)); + const depth = countToDepth(BigInt(numNode)); + const rootNode = subtreeFillToContents(nodes, depth); + const hashComputations: HashComputation[][] = []; + getHashComputations(rootNode, 0, hashComputations); + expect(hashComputations.length).to.equal(expectedLengths.length); + for (let i = 0; i < hashComputations.length; i++) { + expect(hashComputations[i].length).to.equal(expectedLengths[i]); + } + }); + } + + // same to above with "1" prepended to the expectedLengths + const testCases2: {numNode: number; expectedLengths: number[]}[] = [ + {numNode: 16, expectedLengths: [1, 1, 2, 4, 8]}, + {numNode: 15, expectedLengths: [1, 1, 2, 4, 8]}, + {numNode: 14, expectedLengths: [1, 1, 2, 4, 7]}, + {numNode: 13, expectedLengths: [1, 1, 2, 4, 7]}, + {numNode: 12, expectedLengths: [1, 1, 2, 3, 6]}, + {numNode: 11, expectedLengths: [1, 1, 2, 3, 6]}, + {numNode: 10, expectedLengths: [1, 1, 2, 3, 5]}, + {numNode: 9, expectedLengths: [1, 1, 2, 3, 5]}, + {numNode: 8, expectedLengths: [1, 1, 2, 4]}, + {numNode: 7, expectedLengths: [1, 1, 2, 4]}, + // has 0 node at end of level 2 + {numNode: 6, expectedLengths: [1, 1, 2, 3]}, + {numNode: 5, expectedLengths: [1, 1, 2, 3]}, + {numNode: 4, expectedLengths: [1, 1, 2]}, + ]; + + for (const {numNode, expectedLengths} of testCases2) { + it(`list with ${numNode} nodes`, () => { + const rootNode = createList(numNode); + const hashComputations: HashComputation[][] = []; + getHashComputations(rootNode, 0, hashComputations); + expect(hashComputations.length).to.equal(expectedLengths.length); + for (let i = 0; i < hashComputations.length; i++) { + expect(hashComputations[i].length).to.equal(expectedLengths[i]); + } + }); + } +}); + +function createList(numNode: number): BranchNode { + const nodes = Array.from({length: numNode}, (_, i) => newLeafNodeFilled(i)); + // add 1 to countToDepth for mix_in_length spec + const depth = countToDepth(BigInt(numNode)) + 1; + const node = subtreeFillToContents(nodes, depth); + return node as BranchNode; +} + +function newLeafNodeFilled(i: number): LeafNode { + return LeafNode.fromRoot(new Uint8Array(Array.from({length: 32}, () => i % 256))); +} From 87094b009b2ef9406555026820c90da1e0cca2ef Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Sun, 26 May 2024 11:16:29 +0700 Subject: [PATCH 014/113] feat: hasher.executeHashComputations() --- .../src/hasher/as-sha256.ts | 44 ++++++++++++++++++- .../src/hasher/noble.ts | 13 ++++++ .../src/hasher/types.ts | 5 +++ packages/persistent-merkle-tree/src/node.ts | 29 +----------- 4 files changed, 61 insertions(+), 30 deletions(-) diff --git a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts index 560ae421..95405bd2 100644 --- a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts +++ b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts @@ -1,5 +1,6 @@ import {digest2Bytes32, digest64HashObjects, HashObject, batchHash4HashObjectInputs} from "@chainsafe/as-sha256"; import type {Hasher} from "./types"; +import {HashComputation} from "../node"; export const hasher: Hasher = { digest64: digest2Bytes32, @@ -15,8 +16,8 @@ export const hasher: Hasher = { const batch = Math.floor(inputs.length / 8); const outputs = new Array(); for (let i = 0; i < batch; i++) { - const [out0, out1, out2, out3] = batchHash4HashObjectInputs(inputs.slice(i * 8, i * 8 + 8)); - outputs.push(out0, out1, out2, out3); + const outs = batchHash4HashObjectInputs(inputs.slice(i * 8, i * 8 + 8)); + outputs.push(...outs); } for (let i = batch * 8; i < inputs.length; i += 2) { @@ -26,4 +27,43 @@ export const hasher: Hasher = { return outputs; }, + executeHashComputations: (hashComputations: Array) => { + for (let level = hashComputations.length - 1; level >= 0; level--) { + const hcArr = hashComputations[level]; + if (!hcArr) { + // should not happen + throw Error(`no hash computations for level ${level}`); + } + + // HashComputations of the same level are safe to batch + const batch = Math.floor(hcArr.length / 4); + for (let i = 0; i < batch; i++) { + const index = i * 4; + const outs = batchHash4HashObjectInputs([ + hcArr[index].src0, + hcArr[index].src1, + hcArr[index + 1].src0, + hcArr[index + 1].src1, + hcArr[index + 2].src0, + hcArr[index + 2].src1, + hcArr[index + 3].src0, + hcArr[index + 3].src1, + ]); + if (outs.length !== 4) { + throw Error(`batchHash4HashObjectInputs returned ${outs.length} outputs, expected 4`); + } + hcArr[index].dest.applyHash(outs[0]); + hcArr[index + 1].dest.applyHash(outs[1]); + hcArr[index + 2].dest.applyHash(outs[2]); + hcArr[index + 3].dest.applyHash(outs[3]); + } + + // remaining + for (let i = batch * 4; i < hcArr.length; i++) { + const {src0, src1, dest} = hcArr[i]; + const output = digest64HashObjects(src0, src1); + dest.applyHash(output); + } + } + }, }; diff --git a/packages/persistent-merkle-tree/src/hasher/noble.ts b/packages/persistent-merkle-tree/src/hasher/noble.ts index 3f3b082c..2af966a7 100644 --- a/packages/persistent-merkle-tree/src/hasher/noble.ts +++ b/packages/persistent-merkle-tree/src/hasher/noble.ts @@ -23,4 +23,17 @@ export const hasher: Hasher = { } return outputs; }, + executeHashComputations: (hashComputations) => { + for (let level = hashComputations.length - 1; level >= 0; level--) { + const hcArr = hashComputations[level]; + if (!hcArr) { + // should not happen + throw Error(`no hash computations for level ${level}`); + } + + for (const hc of hcArr) { + hc.dest.applyHash(digest64HashObjects(hc.src0, hc.src1)); + } + } + }, }; diff --git a/packages/persistent-merkle-tree/src/hasher/types.ts b/packages/persistent-merkle-tree/src/hasher/types.ts index 6d0d4219..2fdf136c 100644 --- a/packages/persistent-merkle-tree/src/hasher/types.ts +++ b/packages/persistent-merkle-tree/src/hasher/types.ts @@ -1,4 +1,5 @@ import type {HashObject} from "@chainsafe/as-sha256/lib/hashObject"; +import {HashComputation} from "../node"; export type Hasher = { /** @@ -13,4 +14,8 @@ export type Hasher = { * Batch hash 2 * n HashObjects, return n HashObjects output */ batchHashObjects(inputs: HashObject[]): HashObject[]; + /** + * Execute a batch of HashComputations + */ + executeHashComputations(hashComputations: Array): void; }; diff --git a/packages/persistent-merkle-tree/src/node.ts b/packages/persistent-merkle-tree/src/node.ts index 3b35a623..66a288d6 100644 --- a/packages/persistent-merkle-tree/src/node.ts +++ b/packages/persistent-merkle-tree/src/node.ts @@ -86,7 +86,7 @@ export class BranchNode extends Node { batchHash(): Uint8Array { const hashComputations: HashComputation[][] = []; getHashComputations(this, 0, hashComputations); - executeHashComputations(hashComputations); + hasher.executeHashComputations(hashComputations); if (this.h0 === null) { throw Error("Root is not computed by batch"); @@ -395,33 +395,6 @@ export function bitwiseOrNodeH(node: Node, hIndex: number, value: number): void else throw Error("hIndex > 7"); } -/** - * Given an array of HashComputation, execute them from the end - * The consumer has the root node so it should be able to get the final root from there - */ -export function executeHashComputations(hashComputations: Array): void { - for (let level = hashComputations.length - 1; level >= 0; level--) { - const hcArr = hashComputations[level]; - if (!hcArr) { - // should not happen - throw Error(`no hash computations for level ${level}`); - } - // HashComputations of the same level are safe to batch - const inputs: HashObject[] = []; - const dests: Node[] = []; - for (const {src0, src1, dest} of hcArr) { - inputs.push(src0, src1); - dests.push(dest); - } - const outputs = hasher.batchHashObjects(inputs); - if (outputs.length !== dests.length) { - throw Error(`${inputs.length} inputs produce ${outputs.length} outputs, expected ${dests.length} outputs`); - } - for (let i = 0; i < outputs.length; i++) { - dests[i].applyHash(outputs[i]); - } - } -} export function getHashComputations(node: Node, offset: number, hashCompsByLevel: Array): void { if (node.h0 === null) { From 525bb1f91d7c661377805a5885cb925cd1d32cd8 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Sun, 26 May 2024 12:59:50 +0700 Subject: [PATCH 015/113] fix: executeHashComputations() util to call hasher implementation --- packages/persistent-merkle-tree/src/node.ts | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/persistent-merkle-tree/src/node.ts b/packages/persistent-merkle-tree/src/node.ts index 66a288d6..8b1e5695 100644 --- a/packages/persistent-merkle-tree/src/node.ts +++ b/packages/persistent-merkle-tree/src/node.ts @@ -395,7 +395,6 @@ export function bitwiseOrNodeH(node: Node, hIndex: number, value: number): void else throw Error("hIndex > 7"); } - export function getHashComputations(node: Node, offset: number, hashCompsByLevel: Array): void { if (node.h0 === null) { const hashComputations = arrayAtIndex(hashCompsByLevel, offset); @@ -411,6 +410,10 @@ export function getHashComputations(node: Node, offset: number, hashCompsByLevel // else stop the recursion, LeafNode should have h0 } +export function executeHashComputations(hashComputations: Array): void { + hasher.executeHashComputations(hashComputations); +} + export function arrayAtIndex(twoDArray: Array, index: number): T[] { if (twoDArray[index] === undefined) { twoDArray[index] = []; From cef8fa7a129e65de7c041e639c6ef1a466a88394 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Sun, 26 May 2024 13:30:19 +0700 Subject: [PATCH 016/113] chore: add getHashComputations() benchmark --- .../src/hasher/as-sha256.ts | 36 +++++++++++-------- .../test/perf/node.test.ts | 11 +++++- 2 files changed, 31 insertions(+), 16 deletions(-) diff --git a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts index 95405bd2..ec484ddb 100644 --- a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts +++ b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts @@ -39,23 +39,29 @@ export const hasher: Hasher = { const batch = Math.floor(hcArr.length / 4); for (let i = 0; i < batch; i++) { const index = i * 4; - const outs = batchHash4HashObjectInputs([ - hcArr[index].src0, - hcArr[index].src1, - hcArr[index + 1].src0, - hcArr[index + 1].src1, - hcArr[index + 2].src0, - hcArr[index + 2].src1, - hcArr[index + 3].src0, - hcArr[index + 3].src1, + // access array once + const {src0: src0_0, src1: src1_0, dest: dest_0} = hcArr[index]; + const {src0: src0_1, src1: src1_1, dest: dest_1} = hcArr[index + 1]; + const {src0: src0_2, src1: src1_2, dest: dest_2} = hcArr[index + 2]; + const {src0: src0_3, src1: src1_3, dest: dest_3} = hcArr[index + 3]; + + const [o0, o1, o2, o3] = batchHash4HashObjectInputs([ + src0_0, + src1_0, + src0_1, + src1_1, + src0_2, + src1_2, + src0_3, + src1_3, ]); - if (outs.length !== 4) { - throw Error(`batchHash4HashObjectInputs returned ${outs.length} outputs, expected 4`); + if (o0 == null || o1 == null || o2 == null || o3 == null) { + throw Error(`batchHash4HashObjectInputs return null at batch ${i} level ${level}`); } - hcArr[index].dest.applyHash(outs[0]); - hcArr[index + 1].dest.applyHash(outs[1]); - hcArr[index + 2].dest.applyHash(outs[2]); - hcArr[index + 3].dest.applyHash(outs[3]); + dest_0.applyHash(o0); + dest_1.applyHash(o1); + dest_2.applyHash(o2); + dest_3.applyHash(o3); } // remaining diff --git a/packages/persistent-merkle-tree/test/perf/node.test.ts b/packages/persistent-merkle-tree/test/perf/node.test.ts index 3f0ebc74..c8066d40 100644 --- a/packages/persistent-merkle-tree/test/perf/node.test.ts +++ b/packages/persistent-merkle-tree/test/perf/node.test.ts @@ -1,5 +1,5 @@ import {itBench} from "@dapplion/benchmark"; -import {BranchNode, getNodeH, LeafNode} from "../../src/node"; +import {BranchNode, getHashComputations, getNodeH, HashComputation, LeafNode} from "../../src/node"; import {countToDepth, subtreeFillToContents} from "../../src"; describe("HashObject LeafNode", () => { @@ -46,6 +46,15 @@ describe("Node batchHash", () => { const numNodes = [250_000, 500_000, 1_000_000]; for (const numNode of numNodes) { + itBench({ + id: `getHashComputations ${numNode} nodes`, + beforeEach: () => createList(numNode), + fn: (rootNode: BranchNode) => { + const hashComputations: HashComputation[][] = []; + getHashComputations(rootNode, 0, hashComputations); + }, + }); + itBench({ id: `batchHash ${numNode} nodes`, beforeEach: () => createList(numNode), From 88fd6e86fa39c736771018f0d88e0185d5549d25 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Tue, 28 May 2024 14:34:56 +0900 Subject: [PATCH 017/113] fix: improve for loop in as-sha256 executeHashComputations --- .../src/hasher/as-sha256.ts | 125 ++++++++++++++---- packages/persistent-merkle-tree/src/node.ts | 2 +- 2 files changed, 97 insertions(+), 30 deletions(-) diff --git a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts index ec484ddb..b92db603 100644 --- a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts +++ b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts @@ -1,6 +1,6 @@ import {digest2Bytes32, digest64HashObjects, HashObject, batchHash4HashObjectInputs} from "@chainsafe/as-sha256"; import type {Hasher} from "./types"; -import {HashComputation} from "../node"; +import {HashComputation, Node} from "../node"; export const hasher: Hasher = { digest64: digest2Bytes32, @@ -36,39 +36,106 @@ export const hasher: Hasher = { } // HashComputations of the same level are safe to batch - const batch = Math.floor(hcArr.length / 4); - for (let i = 0; i < batch; i++) { - const index = i * 4; - // access array once - const {src0: src0_0, src1: src1_0, dest: dest_0} = hcArr[index]; - const {src0: src0_1, src1: src1_1, dest: dest_1} = hcArr[index + 1]; - const {src0: src0_2, src1: src1_2, dest: dest_2} = hcArr[index + 2]; - const {src0: src0_3, src1: src1_3, dest: dest_3} = hcArr[index + 3]; + let src0_0: Node | null = null; + let src1_0: Node | null = null; + let dest_0: Node | null = null; + let src0_1: Node | null = null; + let src1_1: Node | null = null; + let dest_1: Node | null = null; + let src0_2: Node | null = null; + let src1_2: Node | null = null; + let dest_2: Node | null = null; + let src0_3: Node | null = null; + let src1_3: Node | null = null; + let dest_3: Node | null = null; - const [o0, o1, o2, o3] = batchHash4HashObjectInputs([ - src0_0, - src1_0, - src0_1, - src1_1, - src0_2, - src1_2, - src0_3, - src1_3, - ]); - if (o0 == null || o1 == null || o2 == null || o3 == null) { - throw Error(`batchHash4HashObjectInputs return null at batch ${i} level ${level}`); + for (const [i, hc] of hcArr.entries()) { + const indexInBatch = i % 4; + switch (indexInBatch) { + case 0: + src0_0 = hc.src0; + src1_0 = hc.src1; + dest_0 = hc.dest; + break; + case 1: + src0_1 = hc.src0; + src1_1 = hc.src1; + dest_1 = hc.dest; + break; + case 2: + src0_2 = hc.src0; + src1_2 = hc.src1; + dest_2 = hc.dest; + break; + case 3: + src0_3 = hc.src0; + src1_3 = hc.src1; + dest_3 = hc.dest; + break; + default: + throw Error(`Unexpected indexInBatch ${indexInBatch}`); + } + + if ( + indexInBatch === 3 && + src0_0 !== null && + src1_0 !== null && + dest_0 !== null && + src0_1 !== null && + src1_1 !== null && + dest_1 !== null && + src0_2 !== null && + src1_2 !== null && + dest_2 !== null && + src0_3 !== null && + src1_3 !== null && + dest_3 !== null + ) { + const [o0, o1, o2, o3] = batchHash4HashObjectInputs([ + src0_0, + src1_0, + src0_1, + src1_1, + src0_2, + src1_2, + src0_3, + src1_3, + ]); + if (o0 == null || o1 == null || o2 == null || o3 == null) { + throw Error(`batchHash4HashObjectInputs return null at batch ${i} level ${level}`); + } + dest_0.applyHash(o0); + dest_1.applyHash(o1); + dest_2.applyHash(o2); + dest_3.applyHash(o3); + + src0_0 = null; + src1_0 = null; + dest_0 = null; + src0_1 = null; + src1_1 = null; + dest_1 = null; + src0_2 = null; + src1_2 = null; + dest_2 = null; + src0_3 = null; + src1_3 = null; + dest_3 = null; } - dest_0.applyHash(o0); - dest_1.applyHash(o1); - dest_2.applyHash(o2); - dest_3.applyHash(o3); } // remaining - for (let i = batch * 4; i < hcArr.length; i++) { - const {src0, src1, dest} = hcArr[i]; - const output = digest64HashObjects(src0, src1); - dest.applyHash(output); + if (src0_0 !== null && src1_0 !== null && dest_0 !== null) { + dest_0.applyHash(digest64HashObjects(src0_0, src1_0)); + } + if (src0_1 !== null && src1_1 !== null && dest_1 !== null) { + dest_1.applyHash(digest64HashObjects(src0_1, src1_1)); + } + if (src0_2 !== null && src1_2 !== null && dest_2 !== null) { + dest_2.applyHash(digest64HashObjects(src0_2, src1_2)); + } + if (src0_3 !== null && src1_3 !== null && dest_3 !== null) { + dest_3.applyHash(digest64HashObjects(src0_3, src1_3)); } } }, diff --git a/packages/persistent-merkle-tree/src/node.ts b/packages/persistent-merkle-tree/src/node.ts index 8b1e5695..e2d69c1b 100644 --- a/packages/persistent-merkle-tree/src/node.ts +++ b/packages/persistent-merkle-tree/src/node.ts @@ -86,7 +86,7 @@ export class BranchNode extends Node { batchHash(): Uint8Array { const hashComputations: HashComputation[][] = []; getHashComputations(this, 0, hashComputations); - hasher.executeHashComputations(hashComputations); + executeHashComputations(hashComputations); if (this.h0 === null) { throw Error("Root is not computed by batch"); From 2d5fe1014f15860039b0e241a9e98fdb9f54a2f3 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Tue, 28 May 2024 14:58:52 +0900 Subject: [PATCH 018/113] chore: rename variables --- .../src/hasher/as-sha256.ts | 58 +++++++++---------- 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts index b92db603..5fc772b6 100644 --- a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts +++ b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts @@ -38,16 +38,16 @@ export const hasher: Hasher = { // HashComputations of the same level are safe to batch let src0_0: Node | null = null; let src1_0: Node | null = null; - let dest_0: Node | null = null; + let dest0: Node | null = null; let src0_1: Node | null = null; let src1_1: Node | null = null; - let dest_1: Node | null = null; + let dest1: Node | null = null; let src0_2: Node | null = null; let src1_2: Node | null = null; - let dest_2: Node | null = null; + let dest2: Node | null = null; let src0_3: Node | null = null; let src1_3: Node | null = null; - let dest_3: Node | null = null; + let dest3: Node | null = null; for (const [i, hc] of hcArr.entries()) { const indexInBatch = i % 4; @@ -55,22 +55,22 @@ export const hasher: Hasher = { case 0: src0_0 = hc.src0; src1_0 = hc.src1; - dest_0 = hc.dest; + dest0 = hc.dest; break; case 1: src0_1 = hc.src0; src1_1 = hc.src1; - dest_1 = hc.dest; + dest1 = hc.dest; break; case 2: src0_2 = hc.src0; src1_2 = hc.src1; - dest_2 = hc.dest; + dest2 = hc.dest; break; case 3: src0_3 = hc.src0; src1_3 = hc.src1; - dest_3 = hc.dest; + dest3 = hc.dest; break; default: throw Error(`Unexpected indexInBatch ${indexInBatch}`); @@ -80,16 +80,16 @@ export const hasher: Hasher = { indexInBatch === 3 && src0_0 !== null && src1_0 !== null && - dest_0 !== null && + dest0 !== null && src0_1 !== null && src1_1 !== null && - dest_1 !== null && + dest1 !== null && src0_2 !== null && src1_2 !== null && - dest_2 !== null && + dest2 !== null && src0_3 !== null && src1_3 !== null && - dest_3 !== null + dest3 !== null ) { const [o0, o1, o2, o3] = batchHash4HashObjectInputs([ src0_0, @@ -102,40 +102,40 @@ export const hasher: Hasher = { src1_3, ]); if (o0 == null || o1 == null || o2 == null || o3 == null) { - throw Error(`batchHash4HashObjectInputs return null at batch ${i} level ${level}`); + throw Error(`batchHash4HashObjectInputs return null or undefined at batch ${i} level ${level}`); } - dest_0.applyHash(o0); - dest_1.applyHash(o1); - dest_2.applyHash(o2); - dest_3.applyHash(o3); + dest0.applyHash(o0); + dest1.applyHash(o1); + dest2.applyHash(o2); + dest3.applyHash(o3); src0_0 = null; src1_0 = null; - dest_0 = null; + dest0 = null; src0_1 = null; src1_1 = null; - dest_1 = null; + dest1 = null; src0_2 = null; src1_2 = null; - dest_2 = null; + dest2 = null; src0_3 = null; src1_3 = null; - dest_3 = null; + dest3 = null; } } // remaining - if (src0_0 !== null && src1_0 !== null && dest_0 !== null) { - dest_0.applyHash(digest64HashObjects(src0_0, src1_0)); + if (src0_0 !== null && src1_0 !== null && dest0 !== null) { + dest0.applyHash(digest64HashObjects(src0_0, src1_0)); } - if (src0_1 !== null && src1_1 !== null && dest_1 !== null) { - dest_1.applyHash(digest64HashObjects(src0_1, src1_1)); + if (src0_1 !== null && src1_1 !== null && dest1 !== null) { + dest1.applyHash(digest64HashObjects(src0_1, src1_1)); } - if (src0_2 !== null && src1_2 !== null && dest_2 !== null) { - dest_2.applyHash(digest64HashObjects(src0_2, src1_2)); + if (src0_2 !== null && src1_2 !== null && dest2 !== null) { + dest2.applyHash(digest64HashObjects(src0_2, src1_2)); } - if (src0_3 !== null && src1_3 !== null && dest_3 !== null) { - dest_3.applyHash(digest64HashObjects(src0_3, src1_3)); + if (src0_3 !== null && src1_3 !== null && dest3 !== null) { + dest3.applyHash(digest64HashObjects(src0_3, src1_3)); } } }, From 1689afbd4bcbce2ee5b8399ccd74cca09e70db57 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Mon, 3 Jun 2024 10:55:37 +0700 Subject: [PATCH 019/113] fix: improve executeHashComputations() by reducing one if --- .../src/hasher/as-sha256.ts | 95 ++++++++++--------- 1 file changed, 48 insertions(+), 47 deletions(-) diff --git a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts index 5fc772b6..df2e14ae 100644 --- a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts +++ b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts @@ -51,6 +51,7 @@ export const hasher: Hasher = { for (const [i, hc] of hcArr.entries()) { const indexInBatch = i % 4; + switch (indexInBatch) { case 0: src0_0 = hc.src0; @@ -71,57 +72,57 @@ export const hasher: Hasher = { src0_3 = hc.src0; src1_3 = hc.src1; dest3 = hc.dest; + + if ( + src0_0 !== null && + src1_0 !== null && + dest0 !== null && + src0_1 !== null && + src1_1 !== null && + dest1 !== null && + src0_2 !== null && + src1_2 !== null && + dest2 !== null && + src0_3 !== null && + src1_3 !== null && + dest3 !== null + ) { + const [o0, o1, o2, o3] = batchHash4HashObjectInputs([ + src0_0, + src1_0, + src0_1, + src1_1, + src0_2, + src1_2, + src0_3, + src1_3, + ]); + if (o0 == null || o1 == null || o2 == null || o3 == null) { + throw Error(`batchHash4HashObjectInputs return null or undefined at batch ${i} level ${level}`); + } + dest0.applyHash(o0); + dest1.applyHash(o1); + dest2.applyHash(o2); + dest3.applyHash(o3); + + // reset for next batch + src0_0 = null; + src1_0 = null; + dest0 = null; + src0_1 = null; + src1_1 = null; + dest1 = null; + src0_2 = null; + src1_2 = null; + dest2 = null; + src0_3 = null; + src1_3 = null; + dest3 = null; + } break; default: throw Error(`Unexpected indexInBatch ${indexInBatch}`); } - - if ( - indexInBatch === 3 && - src0_0 !== null && - src1_0 !== null && - dest0 !== null && - src0_1 !== null && - src1_1 !== null && - dest1 !== null && - src0_2 !== null && - src1_2 !== null && - dest2 !== null && - src0_3 !== null && - src1_3 !== null && - dest3 !== null - ) { - const [o0, o1, o2, o3] = batchHash4HashObjectInputs([ - src0_0, - src1_0, - src0_1, - src1_1, - src0_2, - src1_2, - src0_3, - src1_3, - ]); - if (o0 == null || o1 == null || o2 == null || o3 == null) { - throw Error(`batchHash4HashObjectInputs return null or undefined at batch ${i} level ${level}`); - } - dest0.applyHash(o0); - dest1.applyHash(o1); - dest2.applyHash(o2); - dest3.applyHash(o3); - - src0_0 = null; - src1_0 = null; - dest0 = null; - src0_1 = null; - src1_1 = null; - dest1 = null; - src0_2 = null; - src1_2 = null; - dest2 = null; - src0_3 = null; - src1_3 = null; - dest3 = null; - } } // remaining From a49d32b69b5660feb10384bf7ed46ad0656a720b Mon Sep 17 00:00:00 2001 From: matthewkeil Date: Fri, 7 Jun 2024 15:31:47 +0200 Subject: [PATCH 020/113] feat: add hashtree hasher --- packages/persistent-merkle-tree/package.json | 6 +++ .../src/hasher/hashtree.ts | 44 ++++++++++++++++++- 2 files changed, 49 insertions(+), 1 deletion(-) diff --git a/packages/persistent-merkle-tree/package.json b/packages/persistent-merkle-tree/package.json index 73efe0f4..4b803e3d 100644 --- a/packages/persistent-merkle-tree/package.json +++ b/packages/persistent-merkle-tree/package.json @@ -45,6 +45,12 @@ "homepage": "https://github.com/ChainSafe/persistent-merkle-tree#readme", "dependencies": { "@chainsafe/as-sha256": "0.4.2", + "@chainsafe/hashtree": "1.0.0", "@noble/hashes": "^1.3.0" + }, + "peerDependencies": { + "@chainsafe/hashtree-linux-x64-gnu": "1.0.0", + "@chainsafe/hashtree-linux-arm64-gnu": "1.0.0", + "@chainsafe/hashtree-darwin-arm64": "1.0.0" } } diff --git a/packages/persistent-merkle-tree/src/hasher/hashtree.ts b/packages/persistent-merkle-tree/src/hasher/hashtree.ts index 3c250aa0..dba41a3a 100644 --- a/packages/persistent-merkle-tree/src/hasher/hashtree.ts +++ b/packages/persistent-merkle-tree/src/hasher/hashtree.ts @@ -1 +1,43 @@ -// TODO - batch: use @chainsafe/hashtree \ No newline at end of file +import {hash} from "@chainsafe/hashtree"; +import {byteArrayToHashObject, hashObjectToByteArray} from "@chainsafe/as-sha256"; +import {Hasher, HashObject} from "./types"; +import {HashComputation, Node} from "../node"; + +export const hasher: Hasher = { + digest64(obj1: Uint8Array, obj2: Uint8Array): Uint8Array { + return hash(Uint8Array.of(obj1, obj2)); + }, + digest64HashObjects(obj1: HashObject, obj2: HashObject): HashObject { + return byteArrayToHashObject(hasher.digest64(hashObjectToByteArray(obj1), hashObjectToByteArray(obj2))); + }, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + batchHashObjects(inputs: HashObject[]): HashObject[] { + throw new Error("batchHashObjects not implemented for hashtree hasher"); + }, + executeHashComputations(hashComputations: Array): void { + for (let level = hashComputations.length - 1; level >= 0; level--) { + const hcArr = hashComputations[level]; + if (!hcArr) { + // should not happen + throw Error(`no hash computations for level ${level}`); + } + + // size input array to 2 HashObject per computation * 32 bytes per object + const input: Uint8Array = Uint8Array.of(new Array(hcArr.length * 2 * 32)); + const output: Node[] = []; + for (const [i, hc] of hcArr.entries()) { + const offset = (i - 1) * 64; // zero index * 2 leafs * 32 bytes + hashObjectToByteArray(hc.src0, input, offset); + hashObjectToByteArray(hc.src1, input, offset + 32); + output.push(hc.dest); + } + + const result: Uint8Array = hash(input); + + for (const [i, out] of output.entries()) { + const offset = (i - 1) * 32; + out.applyHash(result.slice(offset, offset + 32)); + } + } + }, +}; From 92d33f0428ebc7a7c221e94ebeea5c4c46de972a Mon Sep 17 00:00:00 2001 From: matthewkeil Date: Fri, 7 Jun 2024 15:32:13 +0200 Subject: [PATCH 021/113] feat: use hashtreeHasher --- packages/persistent-merkle-tree/src/hasher/index.ts | 9 ++++++--- packages/persistent-merkle-tree/src/hasher/types.ts | 2 ++ 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/packages/persistent-merkle-tree/src/hasher/index.ts b/packages/persistent-merkle-tree/src/hasher/index.ts index bf429c57..2c5170e2 100644 --- a/packages/persistent-merkle-tree/src/hasher/index.ts +++ b/packages/persistent-merkle-tree/src/hasher/index.ts @@ -1,8 +1,8 @@ import {Hasher} from "./types"; // import {hasher as nobleHasher} from "./noble"; -import {hasher as csHasher} from "./as-sha256"; +// import {hasher as csHasher} from "./as-sha256"; +import {hasher as hashtreeHasher} from "./hashtree"; -export {HashObject} from "@chainsafe/as-sha256/lib/hashObject"; export * from "./types"; export * from "./util"; @@ -13,7 +13,10 @@ export * from "./util"; */ // export let hasher: Hasher = nobleHasher; // For testing purposes, we use the as-sha256 hasher -export let hasher: Hasher = csHasher; +// export let hasher: Hasher = csHasher; + +// For testing purposes, we use the hashtree hasher +export let hasher: Hasher = hashtreeHasher; /** * Set the hasher to be used across the SSZ codebase diff --git a/packages/persistent-merkle-tree/src/hasher/types.ts b/packages/persistent-merkle-tree/src/hasher/types.ts index 2fdf136c..e61506d2 100644 --- a/packages/persistent-merkle-tree/src/hasher/types.ts +++ b/packages/persistent-merkle-tree/src/hasher/types.ts @@ -1,6 +1,8 @@ import type {HashObject} from "@chainsafe/as-sha256/lib/hashObject"; import {HashComputation} from "../node"; +export type {HashObject}; + export type Hasher = { /** * Hash two 32-byte Uint8Arrays From 4d40df39cc38cefc2abfb1b95bfd99a5db29e462 Mon Sep 17 00:00:00 2001 From: matthewkeil Date: Fri, 7 Jun 2024 19:54:32 +0200 Subject: [PATCH 022/113] fix: rough out of hashtreeHasher --- packages/persistent-merkle-tree/package.json | 3 ++ .../src/hasher/hashtree.ts | 12 +++-- yarn.lock | 48 ++++++++----------- 3 files changed, 31 insertions(+), 32 deletions(-) diff --git a/packages/persistent-merkle-tree/package.json b/packages/persistent-merkle-tree/package.json index 4b803e3d..b35c9e92 100644 --- a/packages/persistent-merkle-tree/package.json +++ b/packages/persistent-merkle-tree/package.json @@ -48,6 +48,9 @@ "@chainsafe/hashtree": "1.0.0", "@noble/hashes": "^1.3.0" }, + "devDependencies": { + "@chainsafe/hashtree-darwin-arm64": "1.0.0" + }, "peerDependencies": { "@chainsafe/hashtree-linux-x64-gnu": "1.0.0", "@chainsafe/hashtree-linux-arm64-gnu": "1.0.0", diff --git a/packages/persistent-merkle-tree/src/hasher/hashtree.ts b/packages/persistent-merkle-tree/src/hasher/hashtree.ts index dba41a3a..772ddd59 100644 --- a/packages/persistent-merkle-tree/src/hasher/hashtree.ts +++ b/packages/persistent-merkle-tree/src/hasher/hashtree.ts @@ -5,10 +5,14 @@ import {HashComputation, Node} from "../node"; export const hasher: Hasher = { digest64(obj1: Uint8Array, obj2: Uint8Array): Uint8Array { - return hash(Uint8Array.of(obj1, obj2)); + return hash(Uint8Array.from([...obj1, ...obj2])); }, digest64HashObjects(obj1: HashObject, obj2: HashObject): HashObject { - return byteArrayToHashObject(hasher.digest64(hashObjectToByteArray(obj1), hashObjectToByteArray(obj2))); + const input1 = Uint8Array.from(new Array(32)); + const input2 = Uint8Array.from(new Array(32)); + hashObjectToByteArray(obj1, input1, 0); + hashObjectToByteArray(obj2, input2, 0); + return byteArrayToHashObject(hasher.digest64(input1, input2)); }, // eslint-disable-next-line @typescript-eslint/no-unused-vars batchHashObjects(inputs: HashObject[]): HashObject[] { @@ -23,7 +27,7 @@ export const hasher: Hasher = { } // size input array to 2 HashObject per computation * 32 bytes per object - const input: Uint8Array = Uint8Array.of(new Array(hcArr.length * 2 * 32)); + const input: Uint8Array = Uint8Array.from(new Array(hcArr.length * 2 * 32)); const output: Node[] = []; for (const [i, hc] of hcArr.entries()) { const offset = (i - 1) * 64; // zero index * 2 leafs * 32 bytes @@ -36,7 +40,7 @@ export const hasher: Hasher = { for (const [i, out] of output.entries()) { const offset = (i - 1) * 32; - out.applyHash(result.slice(offset, offset + 32)); + out.applyHash(byteArrayToHashObject(result.slice(offset, offset + 32))); } } }, diff --git a/yarn.lock b/yarn.lock index c22a5dd5..66d1b7ec 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1252,6 +1252,23 @@ core-js "2.6.10" require-resolve "0.0.2" +"@chainsafe/hashtree-darwin-arm64@1.0.0": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@chainsafe/hashtree-darwin-arm64/-/hashtree-darwin-arm64-1.0.0.tgz#a9fb6b70eaf1f715c14caff22a64152a1903258e" + integrity sha512-duJfn57lUXkSedvEisEhXNJcUZAZLKY3D3t5Jx2EUfNS1PpVLM9k5oBG2cjolyNso2n94LJGlyYKFMrPoPig1w== + +"@chainsafe/hashtree-linux-arm64-gnu@1.0.0": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@chainsafe/hashtree-linux-arm64-gnu/-/hashtree-linux-arm64-gnu-1.0.0.tgz#168db259636261d9f3612354cad9f730a4be7110" + integrity sha512-XdYEV6z503Oxa7+mPtUEq9KoKfBAs0BcxGaRiDttCbZK2/J7CcTlobBGd7KMxJ/dQ4IUonaXsob0BnXBcrlwuw== + +"@chainsafe/hashtree@1.0.0": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@chainsafe/hashtree/-/hashtree-1.0.0.tgz#529439fb07299758ca5bbe69a00d1dc4ad83a949" + integrity sha512-qft0MZiLl5jbe8omZaSp1vQ2YCO9qCb262+5qD1vsgN6l1ga3ZFKLyNI6xvwbhC7ZnzZd46vr+p+KvdUIgruOw== + optionalDependencies: + "@chainsafe/hashtree-linux-arm64-gnu" "1.0.0" + "@chainsafe/ssz@^0.15.1": version "0.15.1" resolved "https://registry.yarnpkg.com/@chainsafe/ssz/-/ssz-0.15.1.tgz#008a711c3bcdc0d207cd4be15108870b0b1c60c0" @@ -11283,7 +11300,7 @@ streamroller@^3.1.5: debug "^4.3.4" fs-extra "^8.1.0" -"string-width-cjs@npm:string-width@^4.2.0": +"string-width-cjs@npm:string-width@^4.2.0", "string-width@^1.0.2 || 2 || 3 || 4", string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: version "4.2.3" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== @@ -11300,15 +11317,6 @@ streamroller@^3.1.5: is-fullwidth-code-point "^2.0.0" strip-ansi "^4.0.0" -"string-width@^1.0.2 || 2 || 3 || 4", string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: - version "4.2.3" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" - integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== - dependencies: - emoji-regex "^8.0.0" - is-fullwidth-code-point "^3.0.0" - strip-ansi "^6.0.1" - string-width@^3.0.0, string-width@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" @@ -11387,7 +11395,7 @@ string_decoder@~1.1.1: dependencies: safe-buffer "~5.1.0" -"strip-ansi-cjs@npm:strip-ansi@^6.0.1": +"strip-ansi-cjs@npm:strip-ansi@^6.0.1", strip-ansi@^6.0.0, strip-ansi@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== @@ -11415,13 +11423,6 @@ strip-ansi@^5.0.0, strip-ansi@^5.1.0, strip-ansi@^5.2.0: dependencies: ansi-regex "^4.1.0" -strip-ansi@^6.0.0, strip-ansi@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" - integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== - dependencies: - ansi-regex "^5.0.1" - strip-ansi@^7.0.1: version "7.1.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" @@ -12616,7 +12617,7 @@ workerpool@6.1.0: resolved "https://registry.yarnpkg.com/workerpool/-/workerpool-6.1.0.tgz#a8e038b4c94569596852de7a8ea4228eefdeb37b" integrity sha512-toV7q9rWNYha963Pl/qyeZ6wG+3nnsyvolaNUS8+R5Wtw6qJPTxIlOP1ZSvcGhEJw+l3HMMmtiNo9Gl61G4GVg== -"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": +"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0", wrap-ansi@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== @@ -12643,15 +12644,6 @@ wrap-ansi@^6.0.1, wrap-ansi@^6.2.0: string-width "^4.1.0" strip-ansi "^6.0.0" -wrap-ansi@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" - integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== - dependencies: - ansi-styles "^4.0.0" - string-width "^4.1.0" - strip-ansi "^6.0.0" - wrap-ansi@^8.1.0: version "8.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-8.1.0.tgz#56dc22368ee570face1b49819975d9b9a5ead214" From f56a6c57e214e5098150a27d0018d88b263a1c52 Mon Sep 17 00:00:00 2001 From: matthewkeil Date: Mon, 10 Jun 2024 19:33:06 +0200 Subject: [PATCH 023/113] feat: add name to hasher for type detection in tests --- packages/persistent-merkle-tree/src/hasher/as-sha256.ts | 1 + packages/persistent-merkle-tree/src/hasher/noble.ts | 1 + packages/persistent-merkle-tree/src/hasher/types.ts | 2 ++ 3 files changed, 4 insertions(+) diff --git a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts index df2e14ae..4409d034 100644 --- a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts +++ b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts @@ -3,6 +3,7 @@ import type {Hasher} from "./types"; import {HashComputation, Node} from "../node"; export const hasher: Hasher = { + name: "as-sha256", digest64: digest2Bytes32, digest64HashObjects, batchHashObjects: (inputs: HashObject[]) => { diff --git a/packages/persistent-merkle-tree/src/hasher/noble.ts b/packages/persistent-merkle-tree/src/hasher/noble.ts index 2af966a7..43c32f10 100644 --- a/packages/persistent-merkle-tree/src/hasher/noble.ts +++ b/packages/persistent-merkle-tree/src/hasher/noble.ts @@ -6,6 +6,7 @@ import {hashObjectToUint8Array, uint8ArrayToHashObject} from "./util"; const digest64 = (a: Uint8Array, b: Uint8Array): Uint8Array => sha256.create().update(a).update(b).digest(); export const hasher: Hasher = { + name: "noble", digest64, digest64HashObjects: (a, b) => uint8ArrayToHashObject(digest64(hashObjectToUint8Array(a), hashObjectToUint8Array(b))), batchHashObjects: (inputs: HashObject[]) => { diff --git a/packages/persistent-merkle-tree/src/hasher/types.ts b/packages/persistent-merkle-tree/src/hasher/types.ts index e61506d2..74812aee 100644 --- a/packages/persistent-merkle-tree/src/hasher/types.ts +++ b/packages/persistent-merkle-tree/src/hasher/types.ts @@ -4,6 +4,8 @@ import {HashComputation} from "../node"; export type {HashObject}; export type Hasher = { + // name of the hashing library + name: string; /** * Hash two 32-byte Uint8Arrays */ From b9f9426549b56a1b6e1529c45349d3611e1dc2d5 Mon Sep 17 00:00:00 2001 From: matthewkeil Date: Mon, 10 Jun 2024 19:33:25 +0200 Subject: [PATCH 024/113] feat: fix bug in hashtree and add batch hashing --- .../src/hasher/hashtree.ts | 27 ++++++++++++++----- 1 file changed, 20 insertions(+), 7 deletions(-) diff --git a/packages/persistent-merkle-tree/src/hasher/hashtree.ts b/packages/persistent-merkle-tree/src/hasher/hashtree.ts index 772ddd59..e3a2a0b2 100644 --- a/packages/persistent-merkle-tree/src/hasher/hashtree.ts +++ b/packages/persistent-merkle-tree/src/hasher/hashtree.ts @@ -4,6 +4,7 @@ import {Hasher, HashObject} from "./types"; import {HashComputation, Node} from "../node"; export const hasher: Hasher = { + name: "hashtree", digest64(obj1: Uint8Array, obj2: Uint8Array): Uint8Array { return hash(Uint8Array.from([...obj1, ...obj2])); }, @@ -16,7 +17,19 @@ export const hasher: Hasher = { }, // eslint-disable-next-line @typescript-eslint/no-unused-vars batchHashObjects(inputs: HashObject[]): HashObject[] { - throw new Error("batchHashObjects not implemented for hashtree hasher"); + if (inputs.length === 0) { + return []; + } + // size input array to 2 HashObject per computation * 32 bytes per object + const input: Uint8Array = Uint8Array.from(new Array(inputs.length * 32)); + inputs.forEach((hashObject, i) => hashObjectToByteArray(hashObject, input, i * 32)); + const result: Uint8Array = hash(input); + const outputs: HashObject[] = []; + for (let i = 0; i < inputs.length / 2; i++) { + const offset = i * 32; + outputs.push(byteArrayToHashObject(result.slice(offset, offset + 32))); + } + return outputs; }, executeHashComputations(hashComputations: Array): void { for (let level = hashComputations.length - 1; level >= 0; level--) { @@ -29,17 +42,17 @@ export const hasher: Hasher = { // size input array to 2 HashObject per computation * 32 bytes per object const input: Uint8Array = Uint8Array.from(new Array(hcArr.length * 2 * 32)); const output: Node[] = []; - for (const [i, hc] of hcArr.entries()) { - const offset = (i - 1) * 64; // zero index * 2 leafs * 32 bytes - hashObjectToByteArray(hc.src0, input, offset); - hashObjectToByteArray(hc.src1, input, offset + 32); - output.push(hc.dest); + for (const [i, {src0, src1, dest}] of hcArr.entries()) { + const offset = i * 64; // zero index * 2 leafs * 32 bytes + hashObjectToByteArray(src0, input, offset); + hashObjectToByteArray(src1, input, offset + 32); + output.push(dest); } const result: Uint8Array = hash(input); for (const [i, out] of output.entries()) { - const offset = (i - 1) * 32; + const offset = i * 32; out.applyHash(byteArrayToHashObject(result.slice(offset, offset + 32))); } } From cc57d3d3026ba5470e0e037e3cc901f48c2dc802 Mon Sep 17 00:00:00 2001 From: matthewkeil Date: Mon, 10 Jun 2024 19:33:39 +0200 Subject: [PATCH 025/113] test: add expectHex util --- .../test/utils/expectHex.ts | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 packages/persistent-merkle-tree/test/utils/expectHex.ts diff --git a/packages/persistent-merkle-tree/test/utils/expectHex.ts b/packages/persistent-merkle-tree/test/utils/expectHex.ts new file mode 100644 index 00000000..aef985a3 --- /dev/null +++ b/packages/persistent-merkle-tree/test/utils/expectHex.ts @@ -0,0 +1,20 @@ +import {expect} from "chai"; + +type BufferLike = string | Uint8Array | Buffer; + +export function toHexString(bytes: BufferLike): string { + if (typeof bytes === "string") return bytes; + if (bytes instanceof Buffer) return bytes.toString("hex"); + if (bytes instanceof Uint8Array) return Buffer.from(bytes).toString("hex"); + throw Error("toHexString only accepts BufferLike types"); +} + +export function toHex(bytes: BufferLike): string { + const hex = toHexString(bytes); + if (hex.startsWith("0x")) return hex; + return "0x" + hex; +} + +export function expectEqualHex(value: BufferLike, expected: BufferLike): void { + expect(toHex(value)).to.be.equal(toHex(expected)); +} From 94b2d0917de940645ba4c79c7844413678d9229f Mon Sep 17 00:00:00 2001 From: matthewkeil Date: Mon, 10 Jun 2024 19:33:57 +0200 Subject: [PATCH 026/113] test: add unit tests for hashers --- .../test/unit/hasher.test.ts | 93 ++++++++++++++++--- 1 file changed, 79 insertions(+), 14 deletions(-) diff --git a/packages/persistent-merkle-tree/test/unit/hasher.test.ts b/packages/persistent-merkle-tree/test/unit/hasher.test.ts index 9165f87d..3505ab0d 100644 --- a/packages/persistent-merkle-tree/test/unit/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/unit/hasher.test.ts @@ -1,17 +1,82 @@ -import {expect} from "chai"; -import {uint8ArrayToHashObject, hasher, hashObjectToUint8Array} from "../../src/hasher"; - -describe("hasher", function () { - it("hasher methods should be the same", () => { - const root1 = Buffer.alloc(32, 1); - const root2 = Buffer.alloc(32, 2); - const root = hasher.digest64(root1, root2); - - const obj1 = uint8ArrayToHashObject(root1); - const obj2 = uint8ArrayToHashObject(root2); - const obj = hasher.digest64HashObjects(obj1, obj2); - const newRoot = hashObjectToUint8Array(obj); - expect(newRoot).to.be.deep.equal(root, "hash and hash2 is not equal"); +import {expectEqualHex} from "../utils/expectHex"; +import {uint8ArrayToHashObject, hashObjectToUint8Array} from "../../src/hasher/util"; +import {hasher as nobleHasher} from "../../src/hasher/noble"; +import {hasher as asSha256Hasher} from "../../src/hasher/as-sha256"; +import {hasher as hashtreeHasher} from "../../src/hasher/hashtree"; +import {linspace} from "../utils/misc"; +import {BranchNode, LeafNode} from "../../src/node"; +import {subtreeFillToContents} from "../../src"; + +const hashers = [hashtreeHasher, asSha256Hasher, nobleHasher]; + +function buildComparisonTrees(depth: number): [BranchNode, BranchNode] { + const width = 2 ** (depth - 1); + const nodes = linspace(width).map((num) => LeafNode.fromRoot(Uint8Array.from(Buffer.alloc(32, num)))); + const copy = nodes.map((node) => node.clone()); + const branch1 = subtreeFillToContents(nodes, depth) as BranchNode; + const branch2 = subtreeFillToContents(copy, depth) as BranchNode; + return [branch1, branch2]; +} + +describe("hashers", function () { + describe("digest64 vs digest64HashObjects methods should be the same", () => { + for (const hasher of hashers) { + it(`${hasher.name} hasher`, () => { + const root1 = Buffer.alloc(32, 1); + const root2 = Buffer.alloc(32, 2); + const root = hasher.digest64(root1, root2); + + const obj1 = uint8ArrayToHashObject(root1); + const obj2 = uint8ArrayToHashObject(root2); + const obj = hasher.digest64HashObjects(obj1, obj2); + const newRoot = hashObjectToUint8Array(obj); + expectEqualHex(root, newRoot); + }); + } + }); + + it("all hashers should return the same values from digest64", () => { + const root1 = Buffer.alloc(32, 0x01); + const root2 = Buffer.alloc(32, 0xff); + const hash1 = nobleHasher.digest64(root1, root2); + const hash2 = asSha256Hasher.digest64(root1, root2); + const hash3 = hashtreeHasher.digest64(root1, root2); + expectEqualHex(hash1, hash2); + expectEqualHex(hash1, hash3); + }); + + it("all hashers should return the same values from digest64HashObjects", () => { + const root1 = Buffer.alloc(32, 0x01); + const hashObject1 = uint8ArrayToHashObject(root1); + const root2 = Buffer.alloc(32, 0xff); + const hashObject2 = uint8ArrayToHashObject(root2); + const hash1 = hashObjectToUint8Array(nobleHasher.digest64HashObjects(hashObject1, hashObject2)); + const hash2 = hashObjectToUint8Array(asSha256Hasher.digest64HashObjects(hashObject1, hashObject2)); + const hash3 = hashObjectToUint8Array(hashtreeHasher.digest64HashObjects(hashObject1, hashObject2)); + expectEqualHex(hash1, hash2); + expectEqualHex(hash1, hash3); + }); + + it("all hashers should return the same values from batchHashObjects", () => { + const hashObjects = linspace(254) + .map((num) => Buffer.alloc(32, num)) + .map(uint8ArrayToHashObject); + const results1 = nobleHasher.batchHashObjects(hashObjects).map(hashObjectToUint8Array); + const results2 = asSha256Hasher.batchHashObjects(hashObjects).map(hashObjectToUint8Array); + const results3 = hashtreeHasher.batchHashObjects(hashObjects).map(hashObjectToUint8Array); + Object.values(results1).forEach((result1, i) => { + expectEqualHex(result1, results2[i]); + expectEqualHex(result1, results3[i]); + }); + }); + + describe("all hashers should return the same values from executeHashComputations", () => { + for (const hasher of hashers) { + it(hasher.name, () => { + const [tree1, tree2] = buildComparisonTrees(8); + expectEqualHex(tree1.root, tree2.batchHash()); + }); + } }); }); From 29bdd2a42e53c342f101d0362edfc7452ddd27d9 Mon Sep 17 00:00:00 2001 From: matthewkeil Date: Mon, 10 Jun 2024 19:48:09 +0200 Subject: [PATCH 027/113] feat: add null-loader to simpleserialze webpack config --- packages/simpleserialize.com/package.json | 1 + .../simpleserialize.com/webpack.config.js | 86 ++++++++++--------- yarn.lock | 8 ++ 3 files changed, 56 insertions(+), 39 deletions(-) diff --git a/packages/simpleserialize.com/package.json b/packages/simpleserialize.com/package.json index e472df4b..ab66ad7c 100644 --- a/packages/simpleserialize.com/package.json +++ b/packages/simpleserialize.com/package.json @@ -25,6 +25,7 @@ "eyzy-tree": "^0.2.2", "file-saver": "^2.0.5", "js-yaml": "^4.1.0", + "null-loader": "^4.0.1", "react": "^17.0.2", "react-alert": "^7.0.1", "react-alert-template-basic": "^1.0.0", diff --git a/packages/simpleserialize.com/webpack.config.js b/packages/simpleserialize.com/webpack.config.js index 0f2ae396..f822c0b7 100644 --- a/packages/simpleserialize.com/webpack.config.js +++ b/packages/simpleserialize.com/webpack.config.js @@ -1,22 +1,22 @@ -const webpack = require('webpack'); -const { resolve } = require('path'); -const MiniCssExtractPlugin = require('mini-css-extract-plugin') -const HtmlWebpackPlugin = require('html-webpack-plugin'); +const webpack = require("webpack"); +const {resolve} = require("path"); +const MiniCssExtractPlugin = require("mini-css-extract-plugin"); +const HtmlWebpackPlugin = require("html-webpack-plugin"); -const isProd = process.env.NODE_ENV === 'production'; +const isProd = process.env.NODE_ENV === "production"; const config = { devtool: "source-map", - mode: isProd ? 'production' : 'development', + mode: isProd ? "production" : "development", entry: { - index: './src/index.tsx', + index: "./src/index.tsx", }, output: { - path: resolve(__dirname, 'dist'), - filename: '[name].js', + path: resolve(__dirname, "dist"), + filename: "[name].js", }, resolve: { - extensions: ['.js', '.jsx', '.ts', '.tsx'], + extensions: [".js", ".jsx", ".ts", ".tsx"], }, module: { rules: [ @@ -27,45 +27,49 @@ const config = { }, }, { - test: /\.scss$/, - use: [ + test: /\.scss$/, + use: [ MiniCssExtractPlugin.loader, { - loader: 'css-loader' + loader: "css-loader", }, { - loader: 'sass-loader', + loader: "sass-loader", options: { sourceMap: true, - } - } - ] - },{ + }, + }, + ], + }, + { test: /\.tsx?$/, - use: 'babel-loader', + use: "babel-loader", exclude: /node_modules/, - } + }, + { + use: "null-loader", + test: /@chainsafe\/hashtree/, + }, ], }, plugins: [ new webpack.ProvidePlugin({ - process: 'process/browser', - Buffer: ['buffer', 'Buffer'], + process: "process/browser", + Buffer: ["buffer", "Buffer"], }), new MiniCssExtractPlugin({ - filename: 'css/[name].bundle.css' + filename: "css/[name].bundle.css", }), new HtmlWebpackPlugin({ - title: 'Simple Serialize | Chainsafe Systems', - template: 'src/index.html', + title: "Simple Serialize | Chainsafe Systems", + template: "src/index.html", }), ], }; if (isProd) { config.optimization = { - minimizer: [ - ], + minimizer: [], }; } else { config.devServer = { @@ -73,7 +77,7 @@ if (isProd) { open: true, // https://webpack.js.org/configuration/dev-server/#devserveropen hot: true, // https://webpack.js.org/configuration/dev-server/#devserverhot compress: true, // https://webpack.js.org/configuration/dev-server/#devservercompress - stats: 'errors-only', // https://webpack.js.org/configuration/dev-server/#devserverstats- + stats: "errors-only", // https://webpack.js.org/configuration/dev-server/#devserverstats- overlay: true, // https://webpack.js.org/configuration/dev-server/#devserveroverlay }; } @@ -81,14 +85,14 @@ if (isProd) { const workerConfig = { name: "worker", resolve: { - extensions: ['.js', '.jsx', '.ts', '.tsx'], + extensions: [".js", ".jsx", ".ts", ".tsx"], }, entry: { - index: './src/components/worker/index.ts', + index: "./src/components/worker/index.ts", }, output: { - path: resolve(__dirname, 'dist'), - filename: 'worker.js', + path: resolve(__dirname, "dist"), + filename: "worker.js", }, module: { rules: [ @@ -100,21 +104,25 @@ const workerConfig = { }, { test: /worker?$/, - loader: 'threads-webpack-plugin', + loader: "threads-webpack-plugin", }, { test: /\.ts?$/, - use: 'babel-loader', + use: "babel-loader", exclude: /node_modules/, - } + }, + { + use: "null-loader", + test: /@chainsafe\/hashtree/, + }, ], }, plugins: [ new webpack.ProvidePlugin({ - process: 'process/browser', - Buffer: ['buffer', 'Buffer'], + process: "process/browser", + Buffer: ["buffer", "Buffer"], }), - ] -} + ], +}; module.exports = [config, workerConfig]; diff --git a/yarn.lock b/yarn.lock index 66d1b7ec..75b41a42 100644 --- a/yarn.lock +++ b/yarn.lock @@ -8980,6 +8980,14 @@ nth-check@^2.0.1: dependencies: boolbase "^1.0.0" +null-loader@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/null-loader/-/null-loader-4.0.1.tgz#8e63bd3a2dd3c64236a4679428632edd0a6dbc6a" + integrity sha512-pxqVbi4U6N26lq+LmgIbB5XATP0VdZKOG25DhHi8btMmJJefGArFyDg1yc4U3hWCJbMqSrw0qyrz1UQX+qYXqg== + dependencies: + loader-utils "^2.0.0" + schema-utils "^3.0.0" + nx@19.0.4, "nx@>=17.1.2 < 20": version "19.0.4" resolved "https://registry.yarnpkg.com/nx/-/nx-19.0.4.tgz#c39803f6186f6b009c39f5f30f902ce8e136dcde" From 578f706e32040ed759e91ccf8829cc476e4c53bf Mon Sep 17 00:00:00 2001 From: matthewkeil Date: Tue, 11 Jun 2024 11:02:23 +0200 Subject: [PATCH 028/113] feat: add get hashComputations to BranchNode --- packages/persistent-merkle-tree/src/node.ts | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/packages/persistent-merkle-tree/src/node.ts b/packages/persistent-merkle-tree/src/node.ts index e2d69c1b..42a4311e 100644 --- a/packages/persistent-merkle-tree/src/node.ts +++ b/packages/persistent-merkle-tree/src/node.ts @@ -84,9 +84,7 @@ export class BranchNode extends Node { } batchHash(): Uint8Array { - const hashComputations: HashComputation[][] = []; - getHashComputations(this, 0, hashComputations); - executeHashComputations(hashComputations); + executeHashComputations(this.hashComputations); if (this.h0 === null) { throw Error("Root is not computed by batch"); @@ -116,6 +114,12 @@ export class BranchNode extends Node { get right(): Node { return this._right; } + + get hashComputations(): HashComputation[][] { + const hashComputations: HashComputation[][] = []; + getHashComputations(this, 0, hashComputations); + return hashComputations; + } } /** From 39cb5e4fceaa7b1cb3b8ffe5b250d61e46f381fe Mon Sep 17 00:00:00 2001 From: matthewkeil Date: Tue, 11 Jun 2024 11:02:50 +0200 Subject: [PATCH 029/113] test: add buildComparisonTrees to test utils --- .../persistent-merkle-tree/test/unit/hasher.test.ts | 12 +----------- packages/persistent-merkle-tree/test/utils/tree.ts | 11 +++++++++++ 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/packages/persistent-merkle-tree/test/unit/hasher.test.ts b/packages/persistent-merkle-tree/test/unit/hasher.test.ts index 3505ab0d..8e4482ee 100644 --- a/packages/persistent-merkle-tree/test/unit/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/unit/hasher.test.ts @@ -4,20 +4,10 @@ import {hasher as nobleHasher} from "../../src/hasher/noble"; import {hasher as asSha256Hasher} from "../../src/hasher/as-sha256"; import {hasher as hashtreeHasher} from "../../src/hasher/hashtree"; import {linspace} from "../utils/misc"; -import {BranchNode, LeafNode} from "../../src/node"; -import {subtreeFillToContents} from "../../src"; +import {buildComparisonTrees} from "../utils/tree"; const hashers = [hashtreeHasher, asSha256Hasher, nobleHasher]; -function buildComparisonTrees(depth: number): [BranchNode, BranchNode] { - const width = 2 ** (depth - 1); - const nodes = linspace(width).map((num) => LeafNode.fromRoot(Uint8Array.from(Buffer.alloc(32, num)))); - const copy = nodes.map((node) => node.clone()); - const branch1 = subtreeFillToContents(nodes, depth) as BranchNode; - const branch2 = subtreeFillToContents(copy, depth) as BranchNode; - return [branch1, branch2]; -} - describe("hashers", function () { describe("digest64 vs digest64HashObjects methods should be the same", () => { for (const hasher of hashers) { diff --git a/packages/persistent-merkle-tree/test/utils/tree.ts b/packages/persistent-merkle-tree/test/utils/tree.ts index 2ecec3e8..19c50bd8 100644 --- a/packages/persistent-merkle-tree/test/utils/tree.ts +++ b/packages/persistent-merkle-tree/test/utils/tree.ts @@ -1,4 +1,6 @@ +import {subtreeFillToContents} from "../../src"; import {BranchNode, LeafNode, Node} from "../../src/node"; +import {linspace} from "./misc"; export function createTree(depth: number, index = 0): Node { if (!depth) { @@ -6,3 +8,12 @@ export function createTree(depth: number, index = 0): Node { } return new BranchNode(createTree(depth - 1, 2 ** depth + index), createTree(depth - 1, 2 ** depth + index + 1)); } + +export function buildComparisonTrees(depth: number): [BranchNode, BranchNode] { + const width = 2 ** (depth - 1); + const nodes = linspace(width).map((num) => LeafNode.fromRoot(Uint8Array.from(Buffer.alloc(32, num)))); + const copy = nodes.map((node) => node.clone()); + const branch1 = subtreeFillToContents(nodes, depth) as BranchNode; + const branch2 = subtreeFillToContents(copy, depth) as BranchNode; + return [branch1, branch2]; +} From 3b8050eb5686a73709f08ece9fa51a117ba09d50 Mon Sep 17 00:00:00 2001 From: matthewkeil Date: Tue, 11 Jun 2024 11:03:43 +0200 Subject: [PATCH 030/113] fix: broken perf test --- .../test/perf/validators.test.ts | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/packages/persistent-merkle-tree/test/perf/validators.test.ts b/packages/persistent-merkle-tree/test/perf/validators.test.ts index a9b316f8..452dd560 100644 --- a/packages/persistent-merkle-tree/test/perf/validators.test.ts +++ b/packages/persistent-merkle-tree/test/perf/validators.test.ts @@ -1,5 +1,13 @@ import {itBench, setBenchOpts} from "@dapplion/benchmark"; -import {BranchNode, LeafNode, subtreeFillToContents, Node, countToDepth, zeroNode} from "../../src"; +import { + BranchNode, + LeafNode, + subtreeFillToContents, + Node, + countToDepth, + zeroNode, + getHashComputations, +} from "../../src"; import {MemoryTracker} from "../utils/memTracker"; /** @@ -68,8 +76,7 @@ describe("Track the performance of validators", () => { return node; }, fn: (node) => { - const hashComputationsByLevel = new Map(); - (node as BranchNode).getHashComputation(0, hashComputationsByLevel); + (node as BranchNode).hashComputations; }, }); }); From 3ebb4c2e1919780e5f15e29d128a4dcff1034268 Mon Sep 17 00:00:00 2001 From: matthewkeil Date: Tue, 11 Jun 2024 11:04:07 +0200 Subject: [PATCH 031/113] test: add perf comparison for hashers --- .../test/perf/hasher.test.ts | 48 +++++++++++++------ 1 file changed, 34 insertions(+), 14 deletions(-) diff --git a/packages/persistent-merkle-tree/test/perf/hasher.test.ts b/packages/persistent-merkle-tree/test/perf/hasher.test.ts index b0dcc9f7..a75d7305 100644 --- a/packages/persistent-merkle-tree/test/perf/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/perf/hasher.test.ts @@ -2,8 +2,11 @@ import {itBench} from "@dapplion/benchmark"; import {uint8ArrayToHashObject} from "../../src/hasher"; import {hasher as asShaHasher} from "../../src/hasher/as-sha256"; import {hasher as nobleHasher} from "../../src/hasher/noble"; +import {hasher as hashtreeHasher} from "../../src/hasher/hashtree"; +import {buildComparisonTrees} from "../utils/tree"; -describe("hasher", () => { +describe("hasher", function () { + this.timeout(0); const root1 = new Uint8Array(32); const root2 = new Uint8Array(32); for (let i = 0; i < root1.length; i++) { @@ -13,21 +16,38 @@ describe("hasher", () => { root2[i] = 2; } + const [tree] = buildComparisonTrees(16); + // total number of time running hash for 250_000 validators - const iterations = 2250026; - - for (const {hasher, name} of [ - {hasher: asShaHasher, name: "as-sha256"}, - {hasher: nobleHasher, name: "noble"}, - ]) { - itBench(`hash 2 Uint8Array ${iterations} times - ${name}`, () => { - for (let j = 0; j < iterations; j++) hasher.digest64(root1, root2); - }); + // const iterations = 2_250_026; + const iterations = 1_000_000; + + for (const hasher of [asShaHasher, nobleHasher, hashtreeHasher]) { + describe(hasher.name, () => { + itBench(`hash 2 Uint8Array ${iterations} times - ${hasher.name}`, () => { + for (let j = 0; j < iterations; j++) hasher.digest64(root1, root2); + }); + + itBench({ + id: `hashTwoObjects ${iterations} times - ${hasher.name}`, + before: () => ({ + obj1: uint8ArrayToHashObject(root1), + obj2: uint8ArrayToHashObject(root2), + }), + beforeEach: (params) => params, + fn: ({obj1, obj2}) => { + for (let j = 0; j < iterations; j++) hasher.digest64HashObjects(obj1, obj2); + }, + }); + + // itBench(`batchHash - ${hasher.name}`, () => {}); - const obj1 = uint8ArrayToHashObject(root1); - const obj2 = uint8ArrayToHashObject(root2); - itBench(`hashTwoObjects ${iterations} times - ${name}`, () => { - for (let j = 0; j < iterations; j++) hasher.digest64HashObjects(obj1, obj2); + itBench({ + id: `executeHashComputations - ${hasher.name}`, + fn: () => { + hasher.executeHashComputations(tree.hashComputations); + }, + }); }); } }); From bef3d5b3843f991285091c37e9f7424387cfd688 Mon Sep 17 00:00:00 2001 From: matthewkeil Date: Tue, 11 Jun 2024 11:32:18 +0200 Subject: [PATCH 032/113] fix: benchmark command in persistent-merkle-tree --- packages/persistent-merkle-tree/package.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/persistent-merkle-tree/package.json b/packages/persistent-merkle-tree/package.json index b35c9e92..437ec53e 100644 --- a/packages/persistent-merkle-tree/package.json +++ b/packages/persistent-merkle-tree/package.json @@ -20,7 +20,8 @@ "clean": "rm -rf lib", "build": "tsc", "lint": "eslint --color --ext .ts src/", - "benchmark": "node --max-old-space-size=4096 --expose-gc -r ts-node/register ./node_modules/.bin/benchmark 'test/perf/*.perf.ts'", + "benchmark:files": "node --max-old-space-size=4096 --expose-gc -r ts-node/register ../../node_modules/.bin/benchmark", + "benchmark": "yarn benchmark:files 'test/perf/*.test.ts'", "benchmark:local": "yarn benchmark --local", "test": "mocha -r ts-node/register 'test/unit/**/*.test.ts'" }, From d4293f07fd7a9fab9f8a9cdccfe55e750987336c Mon Sep 17 00:00:00 2001 From: matthewkeil Date: Tue, 11 Jun 2024 11:32:42 +0200 Subject: [PATCH 033/113] feat: improve hashtree performance a bit --- packages/persistent-merkle-tree/src/hasher/hashtree.ts | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/persistent-merkle-tree/src/hasher/hashtree.ts b/packages/persistent-merkle-tree/src/hasher/hashtree.ts index e3a2a0b2..c5cc8bc8 100644 --- a/packages/persistent-merkle-tree/src/hasher/hashtree.ts +++ b/packages/persistent-merkle-tree/src/hasher/hashtree.ts @@ -6,11 +6,11 @@ import {HashComputation, Node} from "../node"; export const hasher: Hasher = { name: "hashtree", digest64(obj1: Uint8Array, obj2: Uint8Array): Uint8Array { - return hash(Uint8Array.from([...obj1, ...obj2])); + return hash(Buffer.concat([obj1, obj2], 64)); }, digest64HashObjects(obj1: HashObject, obj2: HashObject): HashObject { - const input1 = Uint8Array.from(new Array(32)); - const input2 = Uint8Array.from(new Array(32)); + const input1 = new Uint8Array(32); + const input2 = new Uint8Array(32); hashObjectToByteArray(obj1, input1, 0); hashObjectToByteArray(obj2, input2, 0); return byteArrayToHashObject(hasher.digest64(input1, input2)); @@ -21,9 +21,9 @@ export const hasher: Hasher = { return []; } // size input array to 2 HashObject per computation * 32 bytes per object - const input: Uint8Array = Uint8Array.from(new Array(inputs.length * 32)); + const input = new Uint8Array(inputs.length * 32); inputs.forEach((hashObject, i) => hashObjectToByteArray(hashObject, input, i * 32)); - const result: Uint8Array = hash(input); + const result = hash(input); const outputs: HashObject[] = []; for (let i = 0; i < inputs.length / 2; i++) { const offset = i * 32; From 483f85ba510ad7c8ac4e8a12690bd71298c11a2e Mon Sep 17 00:00:00 2001 From: matthewkeil Date: Tue, 11 Jun 2024 11:33:04 +0200 Subject: [PATCH 034/113] test: add batchHash perf comparison --- .../test/perf/hasher.test.ts | 22 ++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/packages/persistent-merkle-tree/test/perf/hasher.test.ts b/packages/persistent-merkle-tree/test/perf/hasher.test.ts index a75d7305..35e76a69 100644 --- a/packages/persistent-merkle-tree/test/perf/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/perf/hasher.test.ts @@ -1,5 +1,5 @@ import {itBench} from "@dapplion/benchmark"; -import {uint8ArrayToHashObject} from "../../src/hasher"; +import {HashObject, uint8ArrayToHashObject} from "../../src/hasher"; import {hasher as asShaHasher} from "../../src/hasher/as-sha256"; import {hasher as nobleHasher} from "../../src/hasher/noble"; import {hasher as hashtreeHasher} from "../../src/hasher/hashtree"; @@ -7,6 +7,11 @@ import {buildComparisonTrees} from "../utils/tree"; describe("hasher", function () { this.timeout(0); + + // total number of time running hash for 250_000 validators + // const iterations = 2_250_026; + const iterations = 1_000_000; + const root1 = new Uint8Array(32); const root2 = new Uint8Array(32); for (let i = 0; i < root1.length; i++) { @@ -18,9 +23,11 @@ describe("hasher", function () { const [tree] = buildComparisonTrees(16); - // total number of time running hash for 250_000 validators - // const iterations = 2_250_026; - const iterations = 1_000_000; + const hashObjects: HashObject[] = []; + for (let i = 0; i < iterations; i++) { + hashObjects.push(uint8ArrayToHashObject(root1)); + hashObjects.push(uint8ArrayToHashObject(root2)); + } for (const hasher of [asShaHasher, nobleHasher, hashtreeHasher]) { describe(hasher.name, () => { @@ -40,7 +47,12 @@ describe("hasher", function () { }, }); - // itBench(`batchHash - ${hasher.name}`, () => {}); + itBench({ + id: `batchHash - ${hasher.name}`, + fn: () => { + hasher.batchHashObjects(hashObjects); + }, + }); itBench({ id: `executeHashComputations - ${hasher.name}`, From 6fff41fbc67a536824cb7920efbfe8f98ccc140c Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Sun, 9 Jun 2024 14:32:35 +0700 Subject: [PATCH 035/113] chore: add partially modified tree benchmark --- packages/persistent-merkle-tree/src/node.ts | 12 +- packages/ssz/src/viewDU/container.ts | 1 + .../ssz/test/perf/eth2/beaconState.test.ts | 120 ++++++++++++++++++ .../ssz/test/unit/eth2/validators.test.ts | 86 +++++++++++-- 4 files changed, 199 insertions(+), 20 deletions(-) create mode 100644 packages/ssz/test/perf/eth2/beaconState.test.ts diff --git a/packages/persistent-merkle-tree/src/node.ts b/packages/persistent-merkle-tree/src/node.ts index 42a4311e..1c7def21 100644 --- a/packages/persistent-merkle-tree/src/node.ts +++ b/packages/persistent-merkle-tree/src/node.ts @@ -402,13 +402,11 @@ export function bitwiseOrNodeH(node: Node, hIndex: number, value: number): void export function getHashComputations(node: Node, offset: number, hashCompsByLevel: Array): void { if (node.h0 === null) { const hashComputations = arrayAtIndex(hashCompsByLevel, offset); - hashComputations.push({src0: node.left, src1: node.right, dest: node}); - if (!node.left.isLeaf()) { - getHashComputations(node.left, offset + 1, hashCompsByLevel); - } - if (!node.right.isLeaf()) { - getHashComputations(node.right, offset + 1, hashCompsByLevel); - } + const {left, right} = node; + hashComputations.push({src0: left, src1: right, dest: node}); + // leaf nodes should have h0 to stop the recursion + getHashComputations(left, offset + 1, hashCompsByLevel); + getHashComputations(right, offset + 1, hashCompsByLevel); } // else stop the recursion, LeafNode should have h0 diff --git a/packages/ssz/src/viewDU/container.ts b/packages/ssz/src/viewDU/container.ts index 2f168114..e0c16cdd 100644 --- a/packages/ssz/src/viewDU/container.ts +++ b/packages/ssz/src/viewDU/container.ts @@ -120,6 +120,7 @@ class ContainerTreeViewDU>> extends isOldRootHashed ? hashComps : null ); + // old root is not hashed, need to traverse and put to HashComputationGroup if (!isOldRootHashed && hashComps !== null) { getHashComputations(this._rootNode, hashComps.offset, hashComps.byLevel); } diff --git a/packages/ssz/test/perf/eth2/beaconState.test.ts b/packages/ssz/test/perf/eth2/beaconState.test.ts new file mode 100644 index 00000000..b14e4c6c --- /dev/null +++ b/packages/ssz/test/perf/eth2/beaconState.test.ts @@ -0,0 +1,120 @@ +import {itBench} from "@dapplion/benchmark"; +import {HashComputationGroup} from "@chainsafe/persistent-merkle-tree"; +import {BeaconState} from "../../lodestarTypes/altair/sszTypes"; +import {BitArray, CompositeViewDU} from "../../../src"; + +const vc = 100_000; +const numModified = vc / 2; + +/** + * The fresh tree batch hash bechmark is in packages/persistent-merkle-tree/test/perf/node.test.ts + */ +describe("BeaconState ViewDU partially modified tree", function () { + itBench({ + id: `BeaconState ViewDU batchHash vc=${vc}`, + beforeEach: () => createPartiallyModifiedDenebState(), + fn: (state: CompositeViewDU) => { + // commit() step is inside hashTreeRoot() + state.hashTreeRoot(); + }, + }); + + itBench({ + id: `BeaconState ViewDU batchHash - commit step vc=${vc}`, + beforeEach: () => createPartiallyModifiedDenebState(), + fn: (state: CompositeViewDU) => { + const hashComps: HashComputationGroup = { + byLevel: [], + offset: 0, + }; + state.commit(hashComps); + }, + }); + + itBench({ + id: `BeaconState ViewDU batchHash - commit step each validator vc=${vc}`, + beforeEach: () => createPartiallyModifiedDenebState(), + fn: (state: CompositeViewDU) => { + const hashComps: HashComputationGroup = { + byLevel: [], + offset: 0, + }; + for (let i = 0; i < vc / 2; i++) { + state.validators.get(i).commit(hashComps); + } + }, + }); + + itBench({ + id: `BeaconState ViewDU recursive hash vc=${vc}`, + beforeEach: () => createPartiallyModifiedDenebState(), + fn: (state: CompositeViewDU) => { + state.commit(); + state.node.root; + }, + }); + + itBench({ + id: `BeaconState ViewDU recursive hash - commit step vc=${vc}`, + beforeEach: () => createPartiallyModifiedDenebState(), + fn: (state: CompositeViewDU) => { + state.commit(); + }, + }); +}); + +function createPartiallyModifiedDenebState(): CompositeViewDU { + const state = createDenebState(vc); + // cache all roots + state.hashTreeRoot(); + // modify half of validators and balances + for (let i = 0; i < numModified; i++) { + state.validators.get(i).effectiveBalance += 1e9; + state.balances.set(i, state.balances.get(i) + 1e9); + } + return state; +} + +function createDenebState(vc: number): CompositeViewDU { + const state = BeaconState.defaultViewDU(); + state.genesisTime = 1e9; + state.genesisValidatorsRoot = Buffer.alloc(32, 1); + state.fork = BeaconState.fields.fork.toViewDU({ + epoch: 1000, + previousVersion: Buffer.alloc(4, 0x03), + currentVersion: Buffer.alloc(4, 0x04), + }); + state.latestBlockHeader = BeaconState.fields.latestBlockHeader.toViewDU({ + slot: 1000, + proposerIndex: 1, + parentRoot: Buffer.alloc(32, 0xac), + stateRoot: Buffer.alloc(32, 0xed), + bodyRoot: Buffer.alloc(32, 0x32), + }); + const validators = Array.from({length: vc}, () => { + return { + pubkey: Buffer.alloc(48, 0xaa), + withdrawalCredentials: Buffer.alloc(32, 0xbb), + effectiveBalance: 32e9, + slashed: false, + activationEligibilityEpoch: 1_000_000, + activationEpoch: 2_000_000, + exitEpoch: 3_000_000, + withdrawableEpoch: 4_000_000, + }; + }); + state.validators = BeaconState.fields.validators.toViewDU(validators); + state.balances = BeaconState.fields.balances.toViewDU(Array.from({length: vc}, () => 32e9)); + // randomMixes + // slashings + state.previousEpochParticipation = BeaconState.fields.previousEpochParticipation.toViewDU( + Array.from({length: vc}, () => 7) + ); + state.currentEpochParticipation = BeaconState.fields.previousEpochParticipation.toViewDU( + Array.from({length: vc}, () => 7) + ); + state.justificationBits = BeaconState.fields.justificationBits.toViewDU( + BitArray.fromBoolArray([true, false, true, true]) + ); + return state; +} diff --git a/packages/ssz/test/unit/eth2/validators.test.ts b/packages/ssz/test/unit/eth2/validators.test.ts index a0878f9c..8c6e1c18 100644 --- a/packages/ssz/test/unit/eth2/validators.test.ts +++ b/packages/ssz/test/unit/eth2/validators.test.ts @@ -2,23 +2,23 @@ import {expect} from "chai"; import {describe, it} from "mocha"; import {toHexString, ListCompositeType, ValueOf, CompositeViewDU} from "../../../src"; import {ValidatorContainer, ValidatorNodeStruct} from "../../lodestarTypes/phase0/sszTypes"; +import {HashComputationGroup} from "@chainsafe/persistent-merkle-tree"; + +type Validator = ValueOf; +const validator: Validator = { + pubkey: Buffer.alloc(48, 0xaa), + withdrawalCredentials: Buffer.alloc(32, 0xbb), + effectiveBalance: 32e9, + slashed: false, + activationEligibilityEpoch: 1_000_000, + activationEpoch: 2_000_000, + exitEpoch: 3_000_000, + withdrawableEpoch: 4_000_000, +}; describe("Container with BranchNodeStruct", function () { this.timeout(0); - type Validator = ValueOf; - - const validator: Validator = { - pubkey: Buffer.alloc(48, 0xaa), - withdrawalCredentials: Buffer.alloc(32, 0xbb), - effectiveBalance: 32e9, - slashed: false, - activationEligibilityEpoch: 1_000_000, - activationEpoch: 2_000_000, - exitEpoch: 3_000_000, - withdrawableEpoch: 4_000_000, - }; - const validatorViewDU = ValidatorContainer.toViewDU(validator); const validatorNodeStructViewDU = ValidatorNodeStruct.toViewDU(validator); @@ -105,3 +105,63 @@ describe("Container with BranchNodeStruct", function () { }); }); }); + +/** + * modifying any fields should result in the whole tree being recomputed + * 0 root + * / \ + * 1 10 11 + * / \ / \ + * 2 20 21 22 23 + * / \ / \ / \ / \ + * 3 pub with eff sla act act exit with + * / \ + * 4 pub0 pub1 + **/ +describe("getHashComputations BranchNodeStruct", function () { + const testCases: {name: string; fn: (validator: ValueOf) => void}[] = [ + {name: "modify pubkey", fn: (validator) => (validator.pubkey = Buffer.alloc(48, 0x01))}, + { + name: "modify withdrawalCredentials", + fn: (validator) => (validator.withdrawalCredentials = Buffer.alloc(32, 0x01)), + }, + {name: "modify effectiveBalance", fn: (validator) => (validator.effectiveBalance += 1e9)}, + {name: "modify slashed", fn: (validator) => (validator.slashed = true)}, + {name: "modify activationEligibilityEpoch", fn: (validator) => (validator.activationEligibilityEpoch += 1e6)}, + {name: "modify activationEpoch", fn: (validator) => (validator.activationEpoch += 1e6)}, + {name: "modify exitEpoch", fn: (validator) => (validator.exitEpoch += 1e6)}, + {name: "modify withdrawableEpoch", fn: (validator) => (validator.withdrawableEpoch += 1e6)}, + { + name: "modify all", + fn: (validator) => { + validator.pubkey = Buffer.alloc(48, 0x01); + validator.withdrawalCredentials = Buffer.alloc(32, 0x01); + validator.effectiveBalance += 1e9; + validator.slashed = true; + validator.activationEligibilityEpoch += 1e6; + validator.activationEpoch += 1e6; + validator.exitEpoch += 1e6; + validator.withdrawableEpoch += 1e6; + }, + }, + ]; + + for (const {name, fn} of testCases) { + it(name, () => { + const hashComps: HashComputationGroup = { + byLevel: [], + offset: 0, + }; + const validatorViewDU = ValidatorNodeStruct.toViewDU(validator); + // cache all roots + validatorViewDU.hashTreeRoot(); + fn(validatorViewDU); + validatorViewDU.commit(hashComps); + expect(hashComps.byLevel.length).to.be.equal(4); + expect(hashComps.byLevel[0].length).to.be.equal(1); + expect(hashComps.byLevel[1].length).to.be.equal(2); + expect(hashComps.byLevel[2].length).to.be.equal(4); + expect(hashComps.byLevel[3].length).to.be.equal(1); + }); + } +}); From 5ba002c6b9ad1dd055d731bb1e841637c2fd047b Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Mon, 10 Jun 2024 15:53:06 +0700 Subject: [PATCH 036/113] feat: optimize ContainerNodeStruct.valueToTree() --- packages/ssz/src/type/byteArray.ts | 8 +++++++- packages/ssz/src/type/containerNodeStruct.ts | 10 ++++------ packages/ssz/src/type/optional.ts | 2 +- packages/ssz/src/type/uint.ts | 6 ++++++ packages/ssz/src/type/union.ts | 2 +- 5 files changed, 19 insertions(+), 9 deletions(-) diff --git a/packages/ssz/src/type/byteArray.ts b/packages/ssz/src/type/byteArray.ts index 42393c37..b6dbb128 100644 --- a/packages/ssz/src/type/byteArray.ts +++ b/packages/ssz/src/type/byteArray.ts @@ -37,7 +37,7 @@ export abstract class ByteArrayType extends CompositeType return new BranchNodeStruct(this.valueToTree.bind(this), value); } - // TODO: Optimize conversion private valueToTree(value: ValueOfFields): Node { - const uint8Array = new Uint8Array(this.value_serializedSize(value)); - const dataView = new DataView(uint8Array.buffer, uint8Array.byteOffset, uint8Array.byteLength); - this.value_serializeToBytes({uint8Array, dataView}, 0, value); - return super.tree_deserializeFromBytes({uint8Array, dataView}, 0, uint8Array.length); + // TODO - batch get hash computations while creating tree + const nodes = this.fieldsEntries.map(({fieldName, fieldType}) => fieldType.value_toTree(value[fieldName])); + return subtreeFillToContents(nodes, this.depth); } } diff --git a/packages/ssz/src/type/optional.ts b/packages/ssz/src/type/optional.ts index 7bec6e3d..d2d8fb0c 100644 --- a/packages/ssz/src/type/optional.ts +++ b/packages/ssz/src/type/optional.ts @@ -75,7 +75,7 @@ export class OptionalType> extends CompositeTy } // TODO add an OptionalViewDU - // TODO: batch + // TODO - batch commitViewDU(view: ValueOfType, hashComps: HashComputationGroup | null = null): Node { return this.value_toTree(view); } diff --git a/packages/ssz/src/type/uint.ts b/packages/ssz/src/type/uint.ts index 910310f4..81b47a18 100644 --- a/packages/ssz/src/type/uint.ts +++ b/packages/ssz/src/type/uint.ts @@ -133,6 +133,12 @@ export class UintNumberType extends BasicType { } } + value_toTree(value: number): Node { + const node = LeafNode.fromZero(); + node.setUint(this.byteLength, 0, value, this.clipInfinity); + return node; + } + tree_serializeToBytes(output: ByteViews, offset: number, node: Node): number { const value = (node as LeafNode).getUint(this.byteLength, 0, this.clipInfinity); this.value_serializeToBytes(output, offset, value); diff --git a/packages/ssz/src/type/union.ts b/packages/ssz/src/type/union.ts index 93cbed51..ac60e1b7 100644 --- a/packages/ssz/src/type/union.ts +++ b/packages/ssz/src/type/union.ts @@ -106,7 +106,7 @@ export class UnionType[]> extends CompositeType< return this.value_toTree(view); } - // TODO: batch + // TODO - batch commitViewDU(view: ValueOfTypes, hashComps: HashComputationGroup | null = null): Node { return this.value_toTree(view); } From a3a17a01fa804f51710334926f1080b4f84ab14f Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Mon, 10 Jun 2024 16:50:33 +0700 Subject: [PATCH 037/113] fix: add batchHash vs hashTreeRoot benchmark --- packages/ssz/test/perf/eth2/beaconState.test.ts | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/packages/ssz/test/perf/eth2/beaconState.test.ts b/packages/ssz/test/perf/eth2/beaconState.test.ts index b14e4c6c..51a3d1b1 100644 --- a/packages/ssz/test/perf/eth2/beaconState.test.ts +++ b/packages/ssz/test/perf/eth2/beaconState.test.ts @@ -1,5 +1,5 @@ import {itBench} from "@dapplion/benchmark"; -import {HashComputationGroup} from "@chainsafe/persistent-merkle-tree"; +import {BranchNode, HashComputationGroup} from "@chainsafe/persistent-merkle-tree"; import {BeaconState} from "../../lodestarTypes/altair/sszTypes"; import {BitArray, CompositeViewDU} from "../../../src"; @@ -11,7 +11,7 @@ const numModified = vc / 2; */ describe("BeaconState ViewDU partially modified tree", function () { itBench({ - id: `BeaconState ViewDU batchHash vc=${vc}`, + id: `BeaconState ViewDU hashTreeRoot vc=${vc}`, beforeEach: () => createPartiallyModifiedDenebState(), fn: (state: CompositeViewDU) => { // commit() step is inside hashTreeRoot() @@ -45,6 +45,15 @@ describe("BeaconState ViewDU partially modified tree", function () { }, }); + itBench({ + id: `BeaconState ViewDU batchHash vc=${vc}`, + beforeEach: () => createPartiallyModifiedDenebState(), + fn: (state: CompositeViewDU) => { + state.commit(); + (state.node as BranchNode).batchHash(); + }, + }); + itBench({ id: `BeaconState ViewDU recursive hash vc=${vc}`, beforeEach: () => createPartiallyModifiedDenebState(), From a969608d2432d7905ad4c642856bc126b75f1404 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Tue, 11 Jun 2024 06:20:43 +0700 Subject: [PATCH 038/113] feat: benchmark hash step of each batch hash method --- packages/ssz/src/viewDU/abstract.ts | 3 +- .../ssz/test/perf/eth2/beaconState.test.ts | 68 +++++++++++++++++-- 2 files changed, 63 insertions(+), 8 deletions(-) diff --git a/packages/ssz/src/viewDU/abstract.ts b/packages/ssz/src/viewDU/abstract.ts index e9cb8abb..69954660 100644 --- a/packages/ssz/src/viewDU/abstract.ts +++ b/packages/ssz/src/viewDU/abstract.ts @@ -1,4 +1,3 @@ -import {hasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/index"; import {HashComputationGroup, executeHashComputations} from "@chainsafe/persistent-merkle-tree"; import {ByteViews, CompositeType} from "../type/composite"; import {TreeView} from "../view/abstract"; @@ -65,7 +64,7 @@ export abstract class TreeViewDU createPartiallyModifiedDenebState(), fn: (state: CompositeViewDU) => { // commit() step is inside hashTreeRoot() - state.hashTreeRoot(); + if (toHexString(state.hashTreeRoot()) !== expectedRoot) { + throw new Error("hashTreeRoot does not match expectedRoot"); + } }, }); itBench({ - id: `BeaconState ViewDU batchHash - commit step vc=${vc}`, + id: `BeaconState ViewDU hashTreeRoot - commit step vc=${vc}`, beforeEach: () => createPartiallyModifiedDenebState(), fn: (state: CompositeViewDU) => { const hashComps: HashComputationGroup = { @@ -32,7 +42,23 @@ describe("BeaconState ViewDU partially modified tree", function () { }); itBench({ - id: `BeaconState ViewDU batchHash - commit step each validator vc=${vc}`, + id: `BeaconState ViewDU hashTreeRoot - hash step vc=${vc}`, + beforeEach: () => { + const state = createPartiallyModifiedDenebState(); + const hashComps: HashComputationGroup = { + byLevel: [], + offset: 0, + }; + state.commit(hashComps); + return hashComps; + }, + fn: (hashComps) => { + executeHashComputations(hashComps.byLevel); + }, + }); + + itBench.skip({ + id: `BeaconState ViewDU hashTreeRoot - commit step each validator vc=${vc}`, beforeEach: () => createPartiallyModifiedDenebState(), fn: (state: CompositeViewDU) => { const hashComps: HashComputationGroup = { @@ -51,6 +77,32 @@ describe("BeaconState ViewDU partially modified tree", function () { fn: (state: CompositeViewDU) => { state.commit(); (state.node as BranchNode).batchHash(); + if (toHexString(state.node.root) !== expectedRoot) { + throw new Error("hashTreeRoot does not match expectedRoot"); + } + }, + }); + + itBench({ + id: `BeaconState ViewDU batchHash - getHashComputation vc=${vc}`, + beforeEach: () => createPartiallyModifiedDenebState(), + fn: (state: CompositeViewDU) => { + state.commit(); + getHashComputations(state.node, 0, []); + }, + }); + + itBench({ + id: `BeaconState ViewDU batchHash - hash step vc=${vc}`, + beforeEach: () => { + const state = createPartiallyModifiedDenebState(); + state.commit(); + const hashComputations: HashComputation[][] = []; + getHashComputations(state.node, 0, hashComputations); + return hashComputations; + }, + fn: (hashComputations: HashComputation[][]) => { + executeHashComputations(hashComputations); }, }); @@ -60,10 +112,14 @@ describe("BeaconState ViewDU partially modified tree", function () { fn: (state: CompositeViewDU) => { state.commit(); state.node.root; + if (toHexString(state.node.root) !== expectedRoot) { + throw new Error("hashTreeRoot does not match expectedRoot"); + } + // console.log("@@@@ root", toHexString(state.node.root)); }, }); - itBench({ + itBench.skip({ id: `BeaconState ViewDU recursive hash - commit step vc=${vc}`, beforeEach: () => createPartiallyModifiedDenebState(), fn: (state: CompositeViewDU) => { From 396cebe05adca7da4abb10795dabdbdb79f7a02f Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Wed, 12 Jun 2024 13:35:31 +0700 Subject: [PATCH 039/113] feat: compute HashComputations when creating validator tree --- .../persistent-merkle-tree/src/subtree.ts | 74 +++++++++-- .../test/unit/subtree.test.ts | 65 +++++++++- packages/ssz/src/branchNodeStruct.ts | 32 ++++- packages/ssz/src/type/containerNodeStruct.ts | 12 +- .../ssz/src/viewDU/containerNodeStruct.ts | 6 +- .../ssz/test/perf/eth2/beaconState.test.ts | 115 +++++++++++------- 6 files changed, 234 insertions(+), 70 deletions(-) diff --git a/packages/persistent-merkle-tree/src/subtree.ts b/packages/persistent-merkle-tree/src/subtree.ts index 53e242ae..561f0b1d 100644 --- a/packages/persistent-merkle-tree/src/subtree.ts +++ b/packages/persistent-merkle-tree/src/subtree.ts @@ -1,4 +1,4 @@ -import {BranchNode, Node} from "./node"; +import {BranchNode, HashComputationGroup, Node, arrayAtIndex, getHashComputations} from "./node"; import {zeroNode} from "./zeroNode"; export function subtreeFillToDepth(bottom: Node, depth: number): Node { @@ -37,9 +37,16 @@ export function subtreeFillToLength(bottom: Node, depth: number, length: number) /** * WARNING: Mutates the provided nodes array. + * @param hashCompRootNode is a hacky way from ssz to set `dest` of HashComputation for BranchNodeStruct * TODO: Don't mutate the nodes array. + * TODO - batch: check consumers of this function, can we compute HashComputationGroup when deserializing ViewDU from Uint8Array? */ -export function subtreeFillToContents(nodes: Node[], depth: number): Node { +export function subtreeFillToContents( + nodes: Node[], + depth: number, + hashComps: HashComputationGroup | null = null, + hashCompRootNode: Node | null = null +): Node { const maxLength = 2 ** depth; if (nodes.length > maxLength) { throw new Error(`nodes.length ${nodes.length} over maxIndex at depth ${depth}`); @@ -50,15 +57,33 @@ export function subtreeFillToContents(nodes: Node[], depth: number): Node { } if (depth === 0) { - return nodes[0]; + const node = nodes[0]; + if (hashComps !== null) { + // only use hashCompRootNode for >=1 nodes where we have a rebind + getHashComputations(node, hashComps.offset, hashComps.byLevel); + } + return node; } if (depth === 1) { - return nodes.length > 1 - ? // All nodes at depth 1 available - new BranchNode(nodes[0], nodes[1]) - : // Pad with zero node - new BranchNode(nodes[0], zeroNode(0)); + // All nodes at depth 1 available + // If there is only one node, pad with zero node + const leftNode = nodes[0]; + const rightNode = nodes.length > 1 ? nodes[1] : zeroNode(0); + const rootNode = new BranchNode(leftNode, rightNode); + + if (hashComps !== null) { + const offset = hashComps.offset; + getHashComputations(leftNode, offset + 1, hashComps.byLevel); + getHashComputations(rightNode, offset + 1, hashComps.byLevel); + arrayAtIndex(hashComps.byLevel, offset).push({ + src0: leftNode, + src1: rightNode, + dest: hashCompRootNode ?? rootNode, + }); + } + + return rootNode; } let count = nodes.length; @@ -66,14 +91,43 @@ export function subtreeFillToContents(nodes: Node[], depth: number): Node { for (let d = depth; d > 0; d--) { const countRemainder = count % 2; const countEven = count - countRemainder; + const offset = hashComps ? hashComps.offset + d - 1 : null; // For each depth level compute the new BranchNodes and overwrite the nodes array for (let i = 0; i < countEven; i += 2) { - nodes[i / 2] = new BranchNode(nodes[i], nodes[i + 1]); + const left = nodes[i]; + const right = nodes[i + 1]; + const node = new BranchNode(left, right); + nodes[i / 2] = node; + if (offset !== null && hashComps !== null) { + arrayAtIndex(hashComps.byLevel, offset).push({ + src0: left, + src1: right, + // d = 1 means we are at root node, use hashCompRootNode if possible + dest: d === 1 ? hashCompRootNode ?? node : node, + }); + if (d === depth) { + // bottom up strategy so we don't need to go down the tree except for the last level + getHashComputations(left, offset + 1, hashComps.byLevel); + getHashComputations(right, offset + 1, hashComps.byLevel); + } + } } if (countRemainder > 0) { - nodes[countEven / 2] = new BranchNode(nodes[countEven], zeroNode(depth - d)); + const left = nodes[countEven]; + const right = zeroNode(depth - d); + const node = new BranchNode(left, right); + nodes[countEven / 2] = node; + if (offset !== null && hashComps !== null) { + if (d === depth) { + // only go down on the last level + getHashComputations(left, offset + 1, hashComps.byLevel); + } + // no need to getHashComputations for zero node + // no need to set hashCompRootNode here + arrayAtIndex(hashComps.byLevel, offset).push({src0: left, src1: right, dest: node}); + } } // If there was remainer, 2 nodes are added to the count diff --git a/packages/persistent-merkle-tree/test/unit/subtree.test.ts b/packages/persistent-merkle-tree/test/unit/subtree.test.ts index b170aa16..880838d7 100644 --- a/packages/persistent-merkle-tree/test/unit/subtree.test.ts +++ b/packages/persistent-merkle-tree/test/unit/subtree.test.ts @@ -1,6 +1,10 @@ -import {subtreeFillToContents, LeafNode, getNodesAtDepth} from "../../src"; +import { expect } from "chai"; +import {subtreeFillToContents, LeafNode, getNodesAtDepth, executeHashComputations, BranchNode, Node} from "../../src"; + +describe("subtreeFillToContents", function () { + // the hash computation takes time + this.timeout(5000); -describe("subtreeFillToContents", () => { it("Simple case", () => { function nodeNum(num: number): LeafNode { return LeafNode.fromUint32(num); @@ -35,7 +39,12 @@ describe("subtreeFillToContents", () => { expectedNodes[i] = node; } - const node = subtreeFillToContents(nodes, depth); + const hashComps = { + offset: 0, + byLevel: [], + }; + + const node = subtreeFillToContents(nodes, depth, hashComps); const retrievedNodes = getNodesAtDepth(node, depth, 0, count); // Assert correct @@ -44,7 +53,57 @@ describe("subtreeFillToContents", () => { throw Error(`Wrong node at index ${i}`); } } + executeHashComputations(hashComps.byLevel); + if (node.h0 === null) { + throw Error("Root node h0 is null"); + } }); } } }); + +describe("subtreeFillToContents - validator nodes", function () { + /** + * 0 root + * / \ + * 1 10 11 + * / \ / \ + * 2 20 21 22 23 + * / \ / \ / \ / \ + * 3 pub with eff sla act act exit with + * / \ + * 4 pub0 pub1 + **/ + it("should compute HashComputations for validator nodes", () => { + const numNodes = 8; + const nodesArr: Array = []; + for (let count = 0; count < 2; count++) { + const nodes = new Array(numNodes); + for (let i = 1; i < numNodes; i++) { + const node = LeafNode.fromUint32(i); + nodes[i] = node; + } + nodes[0] = new BranchNode(LeafNode.fromUint32(0), LeafNode.fromUint32(1)); + nodesArr.push(nodes); + } + + // maxChunksToDepth in ssz returns 3 for 8 nodes + const depth = 3; + const root0 = subtreeFillToContents(nodesArr[0], depth); + const hashComps = { + offset: 0, + byLevel: new Array<[]>(), + }; + const node = subtreeFillToContents(nodesArr[1], depth, hashComps); + expect(hashComps.byLevel.length).to.equal(4); + expect(hashComps.byLevel[0].length).to.equal(1); + expect(hashComps.byLevel[1].length).to.equal(2); + expect(hashComps.byLevel[2].length).to.equal(4); + expect(hashComps.byLevel[3].length).to.equal(1); + executeHashComputations(hashComps.byLevel); + if (node.h0 === null) { + throw Error("Root node h0 is null"); + } + expect(node.root).to.deep.equal(root0.root); + }); +}); diff --git a/packages/ssz/src/branchNodeStruct.ts b/packages/ssz/src/branchNodeStruct.ts index be38fd88..b3a45470 100644 --- a/packages/ssz/src/branchNodeStruct.ts +++ b/packages/ssz/src/branchNodeStruct.ts @@ -1,5 +1,16 @@ import {HashObject} from "@chainsafe/as-sha256/lib/hashObject"; -import {hashObjectToUint8Array, Node} from "@chainsafe/persistent-merkle-tree"; +import { + hashObjectToUint8Array, + Node, + getHashComputations, + HashComputationGroup, +} from "@chainsafe/persistent-merkle-tree"; + +export type ValueToNodeFn = ( + value: T, + hashComps: HashComputationGroup | null, + hashCompRootNode: Node | null +) => Node; /** * BranchNode whose children's data is represented as a struct, the backed tree is lazily computed from the struct. @@ -13,14 +24,13 @@ export class BranchNodeStruct extends Node { * this represents the backed tree which is lazily computed from value */ private _rootNode: Node | null = null; - constructor(private readonly valueToNode: (value: T) => Node, readonly value: T) { + constructor(private readonly valueToNode: ValueToNodeFn, readonly value: T) { // First null value is to save an extra variable to check if a node has a root or not super(null as unknown as number, 0, 0, 0, 0, 0, 0, 0); this._rootNode = null; } get rootHashObject(): HashObject { - // return this.rootNode.rootHashObject; if (this.h0 === null) { super.applyHash(this.rootNode.rootHashObject); } @@ -43,13 +53,27 @@ export class BranchNodeStruct extends Node { return this.rootNode.right; } + getHashComputations(hashComps: HashComputationGroup): void { + if (this.h0 !== null) { + return; + } + + if (this._rootNode === null) { + // set dest of HashComputation to this node + this._rootNode = this.valueToNode(this.value, hashComps, this); + } else { + // not likely to hit this path if called from ViewDU, handle just in case + getHashComputations(this, hashComps.offset, hashComps.byLevel); + } + } + /** * Singleton implementation to make sure there is single backed tree for this node. * This is important for batching HashComputations */ private get rootNode(): Node { if (this._rootNode === null) { - this._rootNode = this.valueToNode(this.value); + this._rootNode = this.valueToNode(this.value, null, null); } return this._rootNode; } diff --git a/packages/ssz/src/type/containerNodeStruct.ts b/packages/ssz/src/type/containerNodeStruct.ts index f72a7307..49a8ffd5 100644 --- a/packages/ssz/src/type/containerNodeStruct.ts +++ b/packages/ssz/src/type/containerNodeStruct.ts @@ -1,4 +1,4 @@ -import {Node, subtreeFillToContents} from "@chainsafe/persistent-merkle-tree"; +import {HashComputationGroup, Node, subtreeFillToContents} from "@chainsafe/persistent-merkle-tree"; import {Type, ByteViews} from "./abstract"; import {isCompositeType} from "./composite"; import {ContainerType, ContainerOptions, renderContainerTypeName} from "./container"; @@ -106,9 +106,13 @@ export class ContainerNodeStructType return new BranchNodeStruct(this.valueToTree.bind(this), value); } - private valueToTree(value: ValueOfFields): Node { - // TODO - batch get hash computations while creating tree + private valueToTree( + value: ValueOfFields, + hashComps: HashComputationGroup | null = null, + hashCompRootNode: Node | null = null + ): Node { const nodes = this.fieldsEntries.map(({fieldName, fieldType}) => fieldType.value_toTree(value[fieldName])); - return subtreeFillToContents(nodes, this.depth); + const rootNode = subtreeFillToContents(nodes, this.depth, hashComps, hashCompRootNode); + return rootNode; } } diff --git a/packages/ssz/src/viewDU/containerNodeStruct.ts b/packages/ssz/src/viewDU/containerNodeStruct.ts index eb850faf..e9baef20 100644 --- a/packages/ssz/src/viewDU/containerNodeStruct.ts +++ b/packages/ssz/src/viewDU/containerNodeStruct.ts @@ -1,4 +1,4 @@ -import {HashComputationGroup, Node, getHashComputations} from "@chainsafe/persistent-merkle-tree"; +import {HashComputationGroup, Node} from "@chainsafe/persistent-merkle-tree"; import {Type, ValueOf} from "../type/abstract"; import {isCompositeType} from "../type/composite"; import {BranchNodeStruct} from "../branchNodeStruct"; @@ -34,8 +34,8 @@ class ContainerTreeViewDU>> extends this._rootNode = this.type.value_toTree(value) as BranchNodeStruct>; } - if (hashComps !== null && this._rootNode.h0 === null) { - getHashComputations(this._rootNode, hashComps.offset, hashComps.byLevel); + if (hashComps !== null) { + this._rootNode.getHashComputations(hashComps); } } diff --git a/packages/ssz/test/perf/eth2/beaconState.test.ts b/packages/ssz/test/perf/eth2/beaconState.test.ts index f3b77b21..2fecb3f0 100644 --- a/packages/ssz/test/perf/eth2/beaconState.test.ts +++ b/packages/ssz/test/perf/eth2/beaconState.test.ts @@ -9,64 +9,51 @@ import { import {BeaconState} from "../../lodestarTypes/altair/sszTypes"; import {BitArray, CompositeViewDU, toHexString} from "../../../src"; -const vc = 100_000; +const vc = 200_000; const numModified = vc / 2; -// TODO - batch: should confirm in unit test instead? -const expectedRoot = "0xda08e9e2ce3d77df6d6cb29d744871bff4975365841c3b574534f86be352652b"; +// every we increase vc, need to change this value from "recursive hash" test +const expectedRoot = "0x0bd3c6caecdf5b04e8ac48e41732aa5908019e072aa4e61c5298cf31a643eb70"; /** + * This simulates a BeaconState being modified after an epoch transition in lodestar * The fresh tree batch hash bechmark is in packages/persistent-merkle-tree/test/perf/node.test.ts + * Note that this benchmark is not very stable because we cannot apply runsFactor as once commit() we + * cannot compute HashComputationGroup again. + * Increasing number of validators could be OOM since we have to create BeaconState every time */ -describe("BeaconState ViewDU partially modified tree", function () { +describe(`BeaconState ViewDU partially modified tree vc=${vc} numModified=${numModified}`, function () { itBench({ - id: `BeaconState ViewDU hashTreeRoot vc=${vc}`, + id: `BeaconState ViewDU recursive hash vc=${vc}`, beforeEach: () => createPartiallyModifiedDenebState(), fn: (state: CompositeViewDU) => { - // commit() step is inside hashTreeRoot() - if (toHexString(state.hashTreeRoot()) !== expectedRoot) { + state.commit(); + state.node.root; + // console.log("@@@@ root", toHexString(state.node.root)); + if (toHexString(state.node.root) !== expectedRoot) { throw new Error("hashTreeRoot does not match expectedRoot"); } }, }); itBench({ - id: `BeaconState ViewDU hashTreeRoot - commit step vc=${vc}`, + id: `BeaconState ViewDU recursive hash - commit step vc=${vc}`, beforeEach: () => createPartiallyModifiedDenebState(), fn: (state: CompositeViewDU) => { - const hashComps: HashComputationGroup = { - byLevel: [], - offset: 0, - }; - state.commit(hashComps); + state.commit(); }, }); itBench({ - id: `BeaconState ViewDU hashTreeRoot - hash step vc=${vc}`, + id: `BeaconState ViewDU validator tree creation vc=${numModified}`, beforeEach: () => { const state = createPartiallyModifiedDenebState(); - const hashComps: HashComputationGroup = { - byLevel: [], - offset: 0, - }; - state.commit(hashComps); - return hashComps; - }, - fn: (hashComps) => { - executeHashComputations(hashComps.byLevel); + state.commit(); + return state; }, - }); - - itBench.skip({ - id: `BeaconState ViewDU hashTreeRoot - commit step each validator vc=${vc}`, - beforeEach: () => createPartiallyModifiedDenebState(), fn: (state: CompositeViewDU) => { - const hashComps: HashComputationGroup = { - byLevel: [], - offset: 0, - }; - for (let i = 0; i < vc / 2; i++) { - state.validators.get(i).commit(hashComps); + const validators = state.validators; + for (let i = 0; i < numModified; i++) { + validators.getReadonly(i).node.left; } }, }); @@ -84,7 +71,7 @@ describe("BeaconState ViewDU partially modified tree", function () { }); itBench({ - id: `BeaconState ViewDU batchHash - getHashComputation vc=${vc}`, + id: `BeaconState ViewDU batchHash - commit & getHashComputation vc=${vc}`, beforeEach: () => createPartiallyModifiedDenebState(), fn: (state: CompositeViewDU) => { state.commit(); @@ -107,32 +94,68 @@ describe("BeaconState ViewDU partially modified tree", function () { }); itBench({ - id: `BeaconState ViewDU recursive hash vc=${vc}`, + id: `BeaconState ViewDU hashTreeRoot vc=${vc}`, beforeEach: () => createPartiallyModifiedDenebState(), fn: (state: CompositeViewDU) => { - state.commit(); - state.node.root; - if (toHexString(state.node.root) !== expectedRoot) { + // commit() step is inside hashTreeRoot() + if (toHexString(state.hashTreeRoot()) !== expectedRoot) { throw new Error("hashTreeRoot does not match expectedRoot"); } - // console.log("@@@@ root", toHexString(state.node.root)); + }, + }); + + itBench({ + id: `BeaconState ViewDU hashTreeRoot - commit step vc=${vc}`, + beforeEach: () => createPartiallyModifiedDenebState(), + fn: (state: CompositeViewDU) => { + const hashComps: HashComputationGroup = { + byLevel: [], + offset: 0, + }; + state.commit(hashComps); + }, + }); + + itBench({ + id: `BeaconState ViewDU hashTreeRoot - hash step vc=${vc}`, + beforeEach: () => { + const state = createPartiallyModifiedDenebState(); + const hashComps: HashComputationGroup = { + byLevel: [], + offset: 0, + }; + state.commit(hashComps); + return hashComps; + }, + fn: (hashComps) => { + executeHashComputations(hashComps.byLevel); }, }); itBench.skip({ - id: `BeaconState ViewDU recursive hash - commit step vc=${vc}`, + id: `BeaconState ViewDU hashTreeRoot - commit step each validator vc=${vc}`, beforeEach: () => createPartiallyModifiedDenebState(), fn: (state: CompositeViewDU) => { - state.commit(); + const hashComps: HashComputationGroup = { + byLevel: [], + offset: 0, + }; + for (let i = 0; i < numModified; i++) { + state.validators.get(i).commit(hashComps); + } }, }); }); +let originalState: CompositeViewDU | null = null; function createPartiallyModifiedDenebState(): CompositeViewDU { - const state = createDenebState(vc); - // cache all roots - state.hashTreeRoot(); - // modify half of validators and balances + if (originalState === null) { + originalState = createDenebState(vc); + // cache all roots + originalState.hashTreeRoot(); + } + + const state = originalState.clone(); for (let i = 0; i < numModified; i++) { state.validators.get(i).effectiveBalance += 1e9; state.balances.set(i, state.balances.get(i) + 1e9); From ec89ce9a0657b2867eac6aa60a44d7e2db86aaaf Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Wed, 12 Jun 2024 14:40:53 +0700 Subject: [PATCH 040/113] fix: use hashtree for ssz benchmark --- packages/persistent-merkle-tree/src/hasher/hashtree.ts | 2 +- setHasher.mjs | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/persistent-merkle-tree/src/hasher/hashtree.ts b/packages/persistent-merkle-tree/src/hasher/hashtree.ts index c5cc8bc8..a36fb861 100644 --- a/packages/persistent-merkle-tree/src/hasher/hashtree.ts +++ b/packages/persistent-merkle-tree/src/hasher/hashtree.ts @@ -53,7 +53,7 @@ export const hasher: Hasher = { for (const [i, out] of output.entries()) { const offset = i * 32; - out.applyHash(byteArrayToHashObject(result.slice(offset, offset + 32))); + out.applyHash(byteArrayToHashObject(result.subarray(offset, offset + 32))); } } }, diff --git a/setHasher.mjs b/setHasher.mjs index 4643f73e..c210cdba 100644 --- a/setHasher.mjs +++ b/setHasher.mjs @@ -1,5 +1,5 @@ -// Set the hasher to as-sha256 -// Used to run benchmarks with with visibility into as-sha256 performance, useful for Lodestar +// Set the hasher to hashtree +// Used to run benchmarks with with visibility into hashtree performance, useful for Lodestar import {setHasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/index.js"; -import {hasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/as-sha256.js"; +import {hasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/hashtree.js"; setHasher(hasher); From 07e4d3c03c563e806174dab9657e11cf1a2e329f Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Wed, 12 Jun 2024 16:51:00 +0700 Subject: [PATCH 041/113] fix: hasher executeHashComputations() benchmark --- .../persistent-merkle-tree/test/perf/hasher.test.ts | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/persistent-merkle-tree/test/perf/hasher.test.ts b/packages/persistent-merkle-tree/test/perf/hasher.test.ts index 35e76a69..4dbc3412 100644 --- a/packages/persistent-merkle-tree/test/perf/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/perf/hasher.test.ts @@ -8,9 +8,7 @@ import {buildComparisonTrees} from "../utils/tree"; describe("hasher", function () { this.timeout(0); - // total number of time running hash for 250_000 validators - // const iterations = 2_250_026; - const iterations = 1_000_000; + const iterations = 500_000; const root1 = new Uint8Array(32); const root2 = new Uint8Array(32); @@ -21,8 +19,6 @@ describe("hasher", function () { root2[i] = 2; } - const [tree] = buildComparisonTrees(16); - const hashObjects: HashObject[] = []; for (let i = 0; i < iterations; i++) { hashObjects.push(uint8ArrayToHashObject(root1)); @@ -56,7 +52,11 @@ describe("hasher", function () { itBench({ id: `executeHashComputations - ${hasher.name}`, - fn: () => { + beforeEach: () => { + const [tree] = buildComparisonTrees(16); + return tree; + }, + fn: (tree) => { hasher.executeHashComputations(tree.hashComputations); }, }); From 31cec22baa0006d5ae84380e9d22abcd2cc027aa Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Thu, 13 Jun 2024 10:27:00 +0700 Subject: [PATCH 042/113] fix: allocate memory once for hashtree --- .../src/hasher/hashtree.ts | 70 ++++++++++++++----- .../test/unit/hasher.test.ts | 4 +- 2 files changed, 57 insertions(+), 17 deletions(-) diff --git a/packages/persistent-merkle-tree/src/hasher/hashtree.ts b/packages/persistent-merkle-tree/src/hasher/hashtree.ts index a36fb861..23cc4e90 100644 --- a/packages/persistent-merkle-tree/src/hasher/hashtree.ts +++ b/packages/persistent-merkle-tree/src/hasher/hashtree.ts @@ -1,22 +1,43 @@ -import {hash} from "@chainsafe/hashtree"; +import {hash, hashInto} from "@chainsafe/hashtree"; import {byteArrayToHashObject, hashObjectToByteArray} from "@chainsafe/as-sha256"; import {Hasher, HashObject} from "./types"; import {HashComputation, Node} from "../node"; +/** + * Best SIMD implementation is in 512 bits = 64 bytes + * If not, hashtree will make a loop inside + * Given sha256 operates on a block of 4 bytes, we can hash 16 inputs at once + * Each input is 64 bytes + */ +const PARALLEL_FACTOR = 16; +const input = new Uint8Array(PARALLEL_FACTOR * 64); +const output = new Uint8Array(PARALLEL_FACTOR * 32); + export const hasher: Hasher = { name: "hashtree", digest64(obj1: Uint8Array, obj2: Uint8Array): Uint8Array { - return hash(Buffer.concat([obj1, obj2], 64)); + // return hash(Buffer.concat([obj1, obj2], 64)); + if (obj1.length !== 32 || obj2.length !== 32) { + throw new Error("Invalid input length"); + } + input.set(obj1, 0); + input.set(obj2, 32); + const hashInput = input.subarray(0, 64); + const hashOutput = output.subarray(0, 32); + hashInto(hashInput, hashOutput); + return hashOutput.slice(); }, digest64HashObjects(obj1: HashObject, obj2: HashObject): HashObject { - const input1 = new Uint8Array(32); - const input2 = new Uint8Array(32); - hashObjectToByteArray(obj1, input1, 0); - hashObjectToByteArray(obj2, input2, 0); - return byteArrayToHashObject(hasher.digest64(input1, input2)); + hashObjectToByteArray(obj1, input, 0); + hashObjectToByteArray(obj2, input, 32); + const hashInput = input.subarray(0, 64); + const hashOutput = output.subarray(0, 32); + hashInto(hashInput, hashOutput); + return byteArrayToHashObject(hashOutput); }, // eslint-disable-next-line @typescript-eslint/no-unused-vars batchHashObjects(inputs: HashObject[]): HashObject[] { + // TODO - batch: remove if (inputs.length === 0) { return []; } @@ -40,20 +61,37 @@ export const hasher: Hasher = { } // size input array to 2 HashObject per computation * 32 bytes per object - const input: Uint8Array = Uint8Array.from(new Array(hcArr.length * 2 * 32)); - const output: Node[] = []; + // const input: Uint8Array = Uint8Array.from(new Array(hcArr.length * 2 * 32)); + let destNodes: Node[] = []; + + // hash every 16 inputs at once to avoid memory allocation for (const [i, {src0, src1, dest}] of hcArr.entries()) { - const offset = i * 64; // zero index * 2 leafs * 32 bytes + const indexInBatch = i % PARALLEL_FACTOR; + const offset = indexInBatch * 64; hashObjectToByteArray(src0, input, offset); hashObjectToByteArray(src1, input, offset + 32); - output.push(dest); + destNodes.push(dest); + if (indexInBatch === PARALLEL_FACTOR - 1) { + hashInto(input, output); + for (const [j, destNode] of destNodes.entries()) { + const outputOffset = j * 32; + destNode.applyHash(byteArrayToHashObject(output.subarray(outputOffset, outputOffset + 32))); + } + destNodes = []; + } } - const result: Uint8Array = hash(input); - - for (const [i, out] of output.entries()) { - const offset = i * 32; - out.applyHash(byteArrayToHashObject(result.subarray(offset, offset + 32))); + const remaining = hcArr.length % PARALLEL_FACTOR; + // we prepared data in input, now hash the remaining + if (remaining > 0) { + const remainingInput = input.subarray(0, remaining * 64); + const remainingOutput = output.subarray(0, remaining * 32); + hashInto(remainingInput, remainingOutput); + // destNodes was prepared above + for (const [i, destNode] of destNodes.entries()) { + const offset = i * 32; + destNode.applyHash(byteArrayToHashObject(remainingOutput.subarray(offset, offset + 32))); + } } } }, diff --git a/packages/persistent-merkle-tree/test/unit/hasher.test.ts b/packages/persistent-merkle-tree/test/unit/hasher.test.ts index 8e4482ee..a2edf1d2 100644 --- a/packages/persistent-merkle-tree/test/unit/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/unit/hasher.test.ts @@ -64,7 +64,9 @@ describe("hashers", function () { for (const hasher of hashers) { it(hasher.name, () => { const [tree1, tree2] = buildComparisonTrees(8); - expectEqualHex(tree1.root, tree2.batchHash()); + const hashComputations = tree2.hashComputations; + hasher.executeHashComputations(hashComputations); + expectEqualHex(tree1.root, tree2.root); }); } }); From fbf59eaf224abab15111457f3e676a47b1d8f73a Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Thu, 13 Jun 2024 12:22:28 +0700 Subject: [PATCH 043/113] fix: improve hashtree.batchHashObjects() --- .../src/hasher/hashtree.ts | 40 ++++++++++++++----- .../test/perf/hasher.test.ts | 2 +- 2 files changed, 30 insertions(+), 12 deletions(-) diff --git a/packages/persistent-merkle-tree/src/hasher/hashtree.ts b/packages/persistent-merkle-tree/src/hasher/hashtree.ts index 23cc4e90..618ac2dd 100644 --- a/packages/persistent-merkle-tree/src/hasher/hashtree.ts +++ b/packages/persistent-merkle-tree/src/hasher/hashtree.ts @@ -1,4 +1,4 @@ -import {hash, hashInto} from "@chainsafe/hashtree"; +import {hashInto} from "@chainsafe/hashtree"; import {byteArrayToHashObject, hashObjectToByteArray} from "@chainsafe/as-sha256"; import {Hasher, HashObject} from "./types"; import {HashComputation, Node} from "../node"; @@ -37,20 +37,38 @@ export const hasher: Hasher = { }, // eslint-disable-next-line @typescript-eslint/no-unused-vars batchHashObjects(inputs: HashObject[]): HashObject[] { - // TODO - batch: remove if (inputs.length === 0) { return []; } - // size input array to 2 HashObject per computation * 32 bytes per object - const input = new Uint8Array(inputs.length * 32); - inputs.forEach((hashObject, i) => hashObjectToByteArray(hashObject, input, i * 32)); - const result = hash(input); - const outputs: HashObject[] = []; - for (let i = 0; i < inputs.length / 2; i++) { - const offset = i * 32; - outputs.push(byteArrayToHashObject(result.slice(offset, offset + 32))); + if (inputs.length % 2 !== 0) { + throw new Error("inputs length must be even"); } - return outputs; + + const batch = PARALLEL_FACTOR * 2; + const outHashObjects: HashObject[] = []; + for (const [i, hashInput] of inputs.entries()) { + const indexInBatch = i % batch; + hashObjectToByteArray(hashInput, input, indexInBatch * 32); + if (indexInBatch === batch - 1) { + hashInto(input, output); + for (let j = 0; j < batch / 2; j++) { + outHashObjects.push(byteArrayToHashObject(output.subarray(j * 32, j * 32 + 32))); + } + } + } + + // hash remaining + const remaining = inputs.length % batch; + if (remaining > 0) { + const remainingInput = input.subarray(0, remaining * 32); + const remainingOutput = output.subarray(0, remaining * 16); + hashInto(remainingInput, remainingOutput); + for (let i = 0; i < remaining / 2; i++) { + outHashObjects.push(byteArrayToHashObject(remainingOutput.subarray(i * 32, i * 32 + 32))); + } + } + + return outHashObjects; }, executeHashComputations(hashComputations: Array): void { for (let level = hashComputations.length - 1; level >= 0; level--) { diff --git a/packages/persistent-merkle-tree/test/perf/hasher.test.ts b/packages/persistent-merkle-tree/test/perf/hasher.test.ts index 4dbc3412..953fd7d1 100644 --- a/packages/persistent-merkle-tree/test/perf/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/perf/hasher.test.ts @@ -44,7 +44,7 @@ describe("hasher", function () { }); itBench({ - id: `batchHash - ${hasher.name}`, + id: `batchHashObjects - ${hasher.name}`, fn: () => { hasher.batchHashObjects(hashObjects); }, From c22f36883b6da682ffff24064af118cad3aa1a64 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Thu, 13 Jun 2024 16:57:03 +0700 Subject: [PATCH 044/113] fix: stablize hasher.test.ts benchmark --- .../test/perf/hasher.test.ts | 41 +++++++++++++------ 1 file changed, 29 insertions(+), 12 deletions(-) diff --git a/packages/persistent-merkle-tree/test/perf/hasher.test.ts b/packages/persistent-merkle-tree/test/perf/hasher.test.ts index 953fd7d1..9fd535fd 100644 --- a/packages/persistent-merkle-tree/test/perf/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/perf/hasher.test.ts @@ -19,16 +19,17 @@ describe("hasher", function () { root2[i] = 2; } - const hashObjects: HashObject[] = []; - for (let i = 0; i < iterations; i++) { - hashObjects.push(uint8ArrayToHashObject(root1)); - hashObjects.push(uint8ArrayToHashObject(root2)); - } - + const runsFactor = 10; for (const hasher of [asShaHasher, nobleHasher, hashtreeHasher]) { describe(hasher.name, () => { - itBench(`hash 2 Uint8Array ${iterations} times - ${hasher.name}`, () => { - for (let j = 0; j < iterations; j++) hasher.digest64(root1, root2); + itBench({ + id: `hash 2 Uint8Array ${iterations} times - ${hasher.name}`, + fn: () => { + for (let i = 0; i < runsFactor; i++) { + for (let j = 0; j < iterations; j++) hasher.digest64(root1, root2); + } + }, + runsFactor, }); itBench({ @@ -39,15 +40,31 @@ describe("hasher", function () { }), beforeEach: (params) => params, fn: ({obj1, obj2}) => { - for (let j = 0; j < iterations; j++) hasher.digest64HashObjects(obj1, obj2); + for (let i = 0; i < runsFactor; i++) { + for (let j = 0; j < iterations; j++) hasher.digest64HashObjects(obj1, obj2); + } }, + runsFactor, }); - itBench({ + // TODO: benchmark for this test is not stable, if it runs alone it's 20% - 30% faster + itBench.skip({ id: `batchHashObjects - ${hasher.name}`, - fn: () => { - hasher.batchHashObjects(hashObjects); + before: () => { + const hashObjects: HashObject[] = []; + for (let i = 0; i < iterations; i++) { + hashObjects.push(uint8ArrayToHashObject(root1)); + hashObjects.push(uint8ArrayToHashObject(root2)); + } + return hashObjects; + }, + beforeEach: (hashObjects) => hashObjects, + fn: (hashObjects: HashObject[]) => { + for (let i = 0; i < runsFactor; i++) { + hasher.batchHashObjects(hashObjects); + } }, + runsFactor: 10, }); itBench({ From 46ddb4c67454739c6e612781821bf03b01c10ebb Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Thu, 13 Jun 2024 14:58:40 +0700 Subject: [PATCH 045/113] feat: use Uint32Array --- .../src/hasher/hashtree.ts | 85 ++++++++++++------- 1 file changed, 56 insertions(+), 29 deletions(-) diff --git a/packages/persistent-merkle-tree/src/hasher/hashtree.ts b/packages/persistent-merkle-tree/src/hasher/hashtree.ts index 618ac2dd..e0b3a052 100644 --- a/packages/persistent-merkle-tree/src/hasher/hashtree.ts +++ b/packages/persistent-merkle-tree/src/hasher/hashtree.ts @@ -1,7 +1,7 @@ import {hashInto} from "@chainsafe/hashtree"; -import {byteArrayToHashObject, hashObjectToByteArray} from "@chainsafe/as-sha256"; import {Hasher, HashObject} from "./types"; import {HashComputation, Node} from "../node"; +import { byteArrayToHashObject, hashObjectToByteArray } from "@chainsafe/as-sha256"; /** * Best SIMD implementation is in 512 bits = 64 bytes @@ -10,30 +10,32 @@ import {HashComputation, Node} from "../node"; * Each input is 64 bytes */ const PARALLEL_FACTOR = 16; -const input = new Uint8Array(PARALLEL_FACTOR * 64); -const output = new Uint8Array(PARALLEL_FACTOR * 32); +const uint8Input = new Uint8Array(PARALLEL_FACTOR * 64); +const uint32Input = new Uint32Array(uint8Input.buffer); +const uint8Output = new Uint8Array(PARALLEL_FACTOR * 32); +const uint32Output = new Uint32Array(uint8Output.buffer); + export const hasher: Hasher = { name: "hashtree", digest64(obj1: Uint8Array, obj2: Uint8Array): Uint8Array { - // return hash(Buffer.concat([obj1, obj2], 64)); if (obj1.length !== 32 || obj2.length !== 32) { throw new Error("Invalid input length"); } - input.set(obj1, 0); - input.set(obj2, 32); - const hashInput = input.subarray(0, 64); - const hashOutput = output.subarray(0, 32); + uint8Input.set(obj1, 0); + uint8Input.set(obj2, 32); + const hashInput = uint8Input.subarray(0, 64); + const hashOutput = uint8Output.subarray(0, 32); hashInto(hashInput, hashOutput); return hashOutput.slice(); }, digest64HashObjects(obj1: HashObject, obj2: HashObject): HashObject { - hashObjectToByteArray(obj1, input, 0); - hashObjectToByteArray(obj2, input, 32); - const hashInput = input.subarray(0, 64); - const hashOutput = output.subarray(0, 32); + hashObjectToUint32Array(obj1, uint32Input, 0); + hashObjectToUint32Array(obj2, uint32Input, 8); + const hashInput = uint8Input.subarray(0, 64); + const hashOutput = uint8Output.subarray(0, 32); hashInto(hashInput, hashOutput); - return byteArrayToHashObject(hashOutput); + return uint32ArrayToHashObject(uint32Output, 0); }, // eslint-disable-next-line @typescript-eslint/no-unused-vars batchHashObjects(inputs: HashObject[]): HashObject[] { @@ -48,11 +50,11 @@ export const hasher: Hasher = { const outHashObjects: HashObject[] = []; for (const [i, hashInput] of inputs.entries()) { const indexInBatch = i % batch; - hashObjectToByteArray(hashInput, input, indexInBatch * 32); + hashObjectToUint32Array(hashInput, uint32Input, indexInBatch * 8); if (indexInBatch === batch - 1) { - hashInto(input, output); + hashInto(uint8Input, uint8Output); for (let j = 0; j < batch / 2; j++) { - outHashObjects.push(byteArrayToHashObject(output.subarray(j * 32, j * 32 + 32))); + outHashObjects.push(uint32ArrayToHashObject(uint32Output, j * 8)); } } } @@ -60,11 +62,11 @@ export const hasher: Hasher = { // hash remaining const remaining = inputs.length % batch; if (remaining > 0) { - const remainingInput = input.subarray(0, remaining * 32); - const remainingOutput = output.subarray(0, remaining * 16); + const remainingInput = uint8Input.subarray(0, remaining * 32); + const remainingOutput = uint8Output.subarray(0, remaining * 16); hashInto(remainingInput, remainingOutput); for (let i = 0; i < remaining / 2; i++) { - outHashObjects.push(byteArrayToHashObject(remainingOutput.subarray(i * 32, i * 32 + 32))); + outHashObjects.push(uint32ArrayToHashObject(uint32Output, i * 8)); } } @@ -85,15 +87,16 @@ export const hasher: Hasher = { // hash every 16 inputs at once to avoid memory allocation for (const [i, {src0, src1, dest}] of hcArr.entries()) { const indexInBatch = i % PARALLEL_FACTOR; - const offset = indexInBatch * 64; - hashObjectToByteArray(src0, input, offset); - hashObjectToByteArray(src1, input, offset + 32); + const offset = indexInBatch * 16; + + hashObjectToUint32Array(src0, uint32Input, offset); + hashObjectToUint32Array(src1, uint32Input, offset + 8); destNodes.push(dest); if (indexInBatch === PARALLEL_FACTOR - 1) { - hashInto(input, output); + hashInto(uint8Input, uint8Output); for (const [j, destNode] of destNodes.entries()) { - const outputOffset = j * 32; - destNode.applyHash(byteArrayToHashObject(output.subarray(outputOffset, outputOffset + 32))); + const outputOffset = j * 8; + destNode.applyHash(uint32ArrayToHashObject(uint32Output, outputOffset)); } destNodes = []; } @@ -102,15 +105,39 @@ export const hasher: Hasher = { const remaining = hcArr.length % PARALLEL_FACTOR; // we prepared data in input, now hash the remaining if (remaining > 0) { - const remainingInput = input.subarray(0, remaining * 64); - const remainingOutput = output.subarray(0, remaining * 32); + const remainingInput = uint8Input.subarray(0, remaining * 64); + const remainingOutput = uint8Output.subarray(0, remaining * 32); hashInto(remainingInput, remainingOutput); // destNodes was prepared above for (const [i, destNode] of destNodes.entries()) { - const offset = i * 32; - destNode.applyHash(byteArrayToHashObject(remainingOutput.subarray(offset, offset + 32))); + const offset = i * 8; + destNode.applyHash(uint32ArrayToHashObject(uint32Output, offset)); } } } }, }; + +function hashObjectToUint32Array(obj: HashObject, arr: Uint32Array, offset: number): void { + arr[offset] = obj.h0; + arr[offset + 1] = obj.h1; + arr[offset + 2] = obj.h2; + arr[offset + 3] = obj.h3; + arr[offset + 4] = obj.h4; + arr[offset + 5] = obj.h5; + arr[offset + 6] = obj.h6; + arr[offset + 7] = obj.h7; +} + +function uint32ArrayToHashObject(arr: Uint32Array, offset: number): HashObject { + return { + h0: arr[offset], + h1: arr[offset + 1], + h2: arr[offset + 2], + h3: arr[offset + 3], + h4: arr[offset + 4], + h5: arr[offset + 5], + h6: arr[offset + 6], + h7: arr[offset + 7], + }; +} From a4d8677a6d3bf0911ea3f13a27990455260ac688 Mon Sep 17 00:00:00 2001 From: matthewkeil Date: Fri, 14 Jun 2024 18:46:24 +0200 Subject: [PATCH 046/113] fix: remove binding devDependency --- packages/persistent-merkle-tree/package.json | 3 --- 1 file changed, 3 deletions(-) diff --git a/packages/persistent-merkle-tree/package.json b/packages/persistent-merkle-tree/package.json index 437ec53e..f47fb9ab 100644 --- a/packages/persistent-merkle-tree/package.json +++ b/packages/persistent-merkle-tree/package.json @@ -49,9 +49,6 @@ "@chainsafe/hashtree": "1.0.0", "@noble/hashes": "^1.3.0" }, - "devDependencies": { - "@chainsafe/hashtree-darwin-arm64": "1.0.0" - }, "peerDependencies": { "@chainsafe/hashtree-linux-x64-gnu": "1.0.0", "@chainsafe/hashtree-linux-arm64-gnu": "1.0.0", From 1a1271e9cfbd47f2a8b6fd03b611bb7970f19df9 Mon Sep 17 00:00:00 2001 From: matthewkeil Date: Fri, 14 Jun 2024 18:53:12 +0200 Subject: [PATCH 047/113] chore: update yarn.lock --- yarn.lock | 5 ----- 1 file changed, 5 deletions(-) diff --git a/yarn.lock b/yarn.lock index 75b41a42..1c14c626 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1252,11 +1252,6 @@ core-js "2.6.10" require-resolve "0.0.2" -"@chainsafe/hashtree-darwin-arm64@1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@chainsafe/hashtree-darwin-arm64/-/hashtree-darwin-arm64-1.0.0.tgz#a9fb6b70eaf1f715c14caff22a64152a1903258e" - integrity sha512-duJfn57lUXkSedvEisEhXNJcUZAZLKY3D3t5Jx2EUfNS1PpVLM9k5oBG2cjolyNso2n94LJGlyYKFMrPoPig1w== - "@chainsafe/hashtree-linux-arm64-gnu@1.0.0": version "1.0.0" resolved "https://registry.yarnpkg.com/@chainsafe/hashtree-linux-arm64-gnu/-/hashtree-linux-arm64-gnu-1.0.0.tgz#168db259636261d9f3612354cad9f730a4be7110" From 5cc208bc4a66427beaa523b4080ca86b8c782869 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Wed, 19 Jun 2024 10:15:55 +0700 Subject: [PATCH 048/113] fix: release backed tree after use in BranchNodeStruct --- packages/ssz/src/branchNodeStruct.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/ssz/src/branchNodeStruct.ts b/packages/ssz/src/branchNodeStruct.ts index b3a45470..de0357be 100644 --- a/packages/ssz/src/branchNodeStruct.ts +++ b/packages/ssz/src/branchNodeStruct.ts @@ -33,6 +33,8 @@ export class BranchNodeStruct extends Node { get rootHashObject(): HashObject { if (this.h0 === null) { super.applyHash(this.rootNode.rootHashObject); + // this node has been hashed, we can clear the backed tree to release a lot of memory + this._rootNode = null; } return this; } From c2f4dc09dc59443be26d17abface3b9de12bdf23 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Wed, 19 Jun 2024 14:55:05 +0700 Subject: [PATCH 049/113] feat: sync ValidatorNodeStructType from lodestar --- packages/ssz/test/lodestarTypes/phase0/sszTypes.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/ssz/test/lodestarTypes/phase0/sszTypes.ts b/packages/ssz/test/lodestarTypes/phase0/sszTypes.ts index c236be7e..34298087 100644 --- a/packages/ssz/test/lodestarTypes/phase0/sszTypes.ts +++ b/packages/ssz/test/lodestarTypes/phase0/sszTypes.ts @@ -2,7 +2,6 @@ import { BitListType, BitVectorType, ContainerType, - ContainerNodeStructType, ListBasicType, ListCompositeType, VectorBasicType, @@ -17,6 +16,9 @@ import { ATTESTATION_SUBNET_COUNT, } from "../params"; import * as primitiveSsz from "../primitive/sszTypes"; +import {ValidatorNodeStruct} from "./validator.js"; + +export {ValidatorNodeStruct}; const { EPOCHS_PER_ETH1_VOTING_PERIOD, @@ -245,7 +247,6 @@ export const ValidatorContainer = new ContainerType( {typeName: "Validator", jsonCase: "eth2"} ); -export const ValidatorNodeStruct = new ContainerNodeStructType(ValidatorContainer.fields, ValidatorContainer.opts); // The main Validator type is the 'ContainerNodeStructType' version export const Validator = ValidatorNodeStruct; From b518321307ebe75a616879ff4bfb5ff9ccef63ca Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Thu, 20 Jun 2024 10:49:43 +0700 Subject: [PATCH 050/113] feat: implement ValidatorViewDU class --- .../ssz/test/lodestarTypes/phase0/sszTypes.ts | 2 +- .../test/lodestarTypes/phase0/validator.ts | 84 ++++++ .../phase0/viewDU/validatorNodeStruct.ts | 251 ++++++++++++++++++ .../lodestarTypes/phase0/validator.test.ts | 59 ++++ .../phase0/viewDU/validatorNodeStruct.test.ts | 54 ++++ 5 files changed, 449 insertions(+), 1 deletion(-) create mode 100644 packages/ssz/test/lodestarTypes/phase0/validator.ts create mode 100644 packages/ssz/test/lodestarTypes/phase0/viewDU/validatorNodeStruct.ts create mode 100644 packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts create mode 100644 packages/ssz/test/unit/lodestarTypes/phase0/viewDU/validatorNodeStruct.test.ts diff --git a/packages/ssz/test/lodestarTypes/phase0/sszTypes.ts b/packages/ssz/test/lodestarTypes/phase0/sszTypes.ts index 34298087..5a9c84df 100644 --- a/packages/ssz/test/lodestarTypes/phase0/sszTypes.ts +++ b/packages/ssz/test/lodestarTypes/phase0/sszTypes.ts @@ -16,7 +16,7 @@ import { ATTESTATION_SUBNET_COUNT, } from "../params"; import * as primitiveSsz from "../primitive/sszTypes"; -import {ValidatorNodeStruct} from "./validator.js"; +import {ValidatorNodeStruct} from "./validator"; export {ValidatorNodeStruct}; diff --git a/packages/ssz/test/lodestarTypes/phase0/validator.ts b/packages/ssz/test/lodestarTypes/phase0/validator.ts new file mode 100644 index 00000000..3acf24b5 --- /dev/null +++ b/packages/ssz/test/lodestarTypes/phase0/validator.ts @@ -0,0 +1,84 @@ +import {ByteViews} from "../../../src/type/abstract"; +import {ContainerNodeStructType} from "../../../src/type/containerNodeStruct"; +import {ValueOfFields} from "../../../src/view/container"; +import * as primitiveSsz from "../primitive/sszTypes"; +import { ValidatorTreeViewDU } from "./viewDU/validatorNodeStruct"; +import {Node} from "@chainsafe/persistent-merkle-tree"; + +const {Boolean, Bytes32, UintNum64, BLSPubkey, EpochInf} = primitiveSsz; + +// this is to work with uint32, see https://github.com/ChainSafe/ssz/blob/ssz-v0.15.1/packages/ssz/src/type/uint.ts +const NUMBER_2_POW_32 = 2 ** 32; + +/* + * Below constants are respective to their ssz type in `ValidatorType`. + */ +const UINT32_SIZE = 4; +const PUBKEY_SIZE = 48; +const WITHDRAWAL_CREDENTIALS_SIZE = 32; +const SLASHED_SIZE = 1; + +export const ValidatorType = { + pubkey: BLSPubkey, + withdrawalCredentials: Bytes32, + effectiveBalance: UintNum64, + slashed: Boolean, + activationEligibilityEpoch: EpochInf, + activationEpoch: EpochInf, + exitEpoch: EpochInf, + withdrawableEpoch: EpochInf, +}; + +/** + * Improve serialization performance for state.validators.serialize(); + */ +export class ValidatorNodeStructType extends ContainerNodeStructType { + constructor() { + super(ValidatorType, {typeName: "Validator", jsonCase: "eth2"}); + } + + getViewDU(node: Node): ValidatorTreeViewDU { + return new ValidatorTreeViewDU(this, node); + } + + value_serializeToBytes( + {uint8Array: output, dataView}: ByteViews, + offset: number, + validator: ValueOfFields + ): number { + output.set(validator.pubkey, offset); + offset += PUBKEY_SIZE; + output.set(validator.withdrawalCredentials, offset); + offset += WITHDRAWAL_CREDENTIALS_SIZE; + const {effectiveBalance, activationEligibilityEpoch, activationEpoch, exitEpoch, withdrawableEpoch} = validator; + // effectiveBalance is UintNum64 + dataView.setUint32(offset, effectiveBalance & 0xffffffff, true); + offset += UINT32_SIZE; + dataView.setUint32(offset, (effectiveBalance / NUMBER_2_POW_32) & 0xffffffff, true); + offset += UINT32_SIZE; + output[offset] = validator.slashed ? 1 : 0; + offset += SLASHED_SIZE; + offset = writeEpochInf(dataView, offset, activationEligibilityEpoch); + offset = writeEpochInf(dataView, offset, activationEpoch); + offset = writeEpochInf(dataView, offset, exitEpoch); + offset = writeEpochInf(dataView, offset, withdrawableEpoch); + + return offset; + } +} + +function writeEpochInf(dataView: DataView, offset: number, value: number): number { + if (value === Infinity) { + dataView.setUint32(offset, 0xffffffff, true); + offset += UINT32_SIZE; + dataView.setUint32(offset, 0xffffffff, true); + offset += UINT32_SIZE; + } else { + dataView.setUint32(offset, value & 0xffffffff, true); + offset += UINT32_SIZE; + dataView.setUint32(offset, (value / NUMBER_2_POW_32) & 0xffffffff, true); + offset += UINT32_SIZE; + } + return offset; +} +export const ValidatorNodeStruct = new ValidatorNodeStructType(); diff --git a/packages/ssz/test/lodestarTypes/phase0/viewDU/validatorNodeStruct.ts b/packages/ssz/test/lodestarTypes/phase0/viewDU/validatorNodeStruct.ts new file mode 100644 index 00000000..44d9ad44 --- /dev/null +++ b/packages/ssz/test/lodestarTypes/phase0/viewDU/validatorNodeStruct.ts @@ -0,0 +1,251 @@ +import { HashObject, byteArrayToHashObject } from "@chainsafe/as-sha256"; +import { BranchNodeStruct } from "../../../../src/branchNodeStruct"; +import { ContainerTypeGeneric } from "../../../../src/view/container"; +import { TreeViewDU } from "../../../../src/viewDU/abstract"; +import { ValidatorType } from "../validator"; +import { + Node, + BranchNode, + HashComputationGroup, +} from "@chainsafe/persistent-merkle-tree"; +type Validator = { + pubkey: Uint8Array; + withdrawalCredentials: Uint8Array; + effectiveBalance: number; + slashed: boolean; + activationEligibilityEpoch: number; + activationEpoch: number; + exitEpoch: number; + withdrawableEpoch: number; +}; + +const numFields = 8; +const NUMBER_2_POW_32 = 2 ** 32; + +/** + * A specific ViewDU for validator designed to be efficient to batch hash and efficient to create tree + * because it uses prepopulated nodes to do that. + */ +export class ValidatorTreeViewDU extends TreeViewDU> { + protected valueChanged: Validator | null = null; + protected _rootNode: BranchNodeStruct; + + constructor(readonly type: ContainerTypeGeneric, node: Node) { + super(); + this._rootNode = node as BranchNodeStruct; + } + + get node(): Node { + return this._rootNode; + } + + get cache(): void { + return; + } + + commit(hashComps: HashComputationGroup | null = null): void { + if (this.valueChanged !== null) { + // TODO - batch: throw error when testing, should be committed by parent + const value = this.valueChanged; + this.valueChanged = null; + this._rootNode = this.type.value_toTree(value) as BranchNodeStruct; + } + + if (hashComps !== null) { + this._rootNode.getHashComputations(hashComps); + } + } + + get pubkey(): Uint8Array { + return (this.valueChanged || this._rootNode.value).pubkey; + } + + set pubkey(value: Uint8Array) { + if (this.valueChanged === null) { + this.valueChanged = this.type.clone(this._rootNode.value); + } + + this.valueChanged.pubkey = value; + } + + get withdrawalCredentials(): Uint8Array { + return (this.valueChanged || this._rootNode.value).withdrawalCredentials; + } + + set withdrawalCredentials(value: Uint8Array) { + if (this.valueChanged === null) { + this.valueChanged = this.type.clone(this._rootNode.value); + } + + this.valueChanged.withdrawalCredentials = value; + } + + get effectiveBalance(): number { + return (this.valueChanged || this._rootNode.value).effectiveBalance; + } + + set effectiveBalance(value: number) { + if (this.valueChanged === null) { + this.valueChanged = this.type.clone(this._rootNode.value); + } + + this.valueChanged.effectiveBalance = value; + } + + get slashed(): boolean { + return (this.valueChanged || this._rootNode.value).slashed; + } + + set slashed(value: boolean) { + if (this.valueChanged === null) { + this.valueChanged = this.type.clone(this._rootNode.value); + } + + this.valueChanged.slashed = value; + } + + get activationEligibilityEpoch(): number { + return (this.valueChanged || this._rootNode.value).activationEligibilityEpoch; + } + + set activationEligibilityEpoch(value: number) { + if (this.valueChanged === null) { + this.valueChanged = this.type.clone(this._rootNode.value); + } + + this.valueChanged.activationEligibilityEpoch = value; + } + + get activationEpoch(): number { + return (this.valueChanged || this._rootNode.value).activationEpoch; + } + + set activationEpoch(value: number) { + if (this.valueChanged === null) { + this.valueChanged = this.type.clone(this._rootNode.value); + } + + this.valueChanged.activationEpoch = value; + } + + get exitEpoch(): number { + return (this.valueChanged || this._rootNode.value).exitEpoch; + } + + set exitEpoch(value: number) { + if (this.valueChanged === null) { + this.valueChanged = this.type.clone(this._rootNode.value); + } + + this.valueChanged.exitEpoch = value; + } + + get withdrawableEpoch(): number { + return (this.valueChanged || this._rootNode.value).withdrawableEpoch; + } + + set withdrawableEpoch(value: number) { + if (this.valueChanged === null) { + this.valueChanged = this.type.clone(this._rootNode.value); + } + + this.valueChanged.withdrawableEpoch = value; + } + + /** + * This is called by parent to populate nodes with valueChanged. + * Parent will then hash the nodes to get the root hash, then call commitToHashObject to update the root hash. + * Note that node[0] should be a branch node due to pubkey of 48 bytes. + */ + valueToTree(nodes: Node[]): void { + if (this.valueChanged === null) { + return; + } + + if (nodes.length !== numFields) { + throw new Error(`Expected ${numFields} fields, got ${nodes.length}`); + } + + validatorToTree(nodes, this.valueChanged); + } + + /** + * The HashObject is computed by parent so that we don't need to create a tree from scratch. + */ + commitToHashObject(ho: HashObject): void { + if (this.valueChanged === null) { + return; + } + + const oldRoot = this._rootNode; + const value = this.valueChanged; + this._rootNode = new BranchNodeStruct(oldRoot["valueToNode"], value); + this._rootNode.applyHash(ho); + this.valueChanged = null; + } + + protected clearCache(): void { + this.valueChanged = null; + } + + get name(): string { + return this.type.typeName; + } +} + +/** + * Fast way to write value to tree. Input nodes are at level 3 as below: + * level + * 0 validator root + * / \ + * 1 10 11 + * / \ / \ + * 2 20 21 22 23 + * / \ / \ / \ / \ + * 3 pub with eff sla act act exit with + * / \ + * 4 pub0 pub1 + * + * After this function all nodes at level 4 and level 3 (except for pubkey) are populated + * We can then use HashComputation to compute the root hash in batch at state.validators ViewDU + * // TODO - batch: is it more performant to convert to Uint8Array and hash in batch? + */ +export function validatorToTree(nodes: Node[], value: Validator): void { + const { pubkey, withdrawalCredentials, effectiveBalance, slashed, activationEligibilityEpoch, activationEpoch, exitEpoch, withdrawableEpoch } = value; + + // pubkey 48 bytes = pub0 + pub1 + const node0 = nodes[0]; + if (node0.isLeaf()) { + throw Error("Expected pubkeyNode to be a BranchNode"); + } + const pubkeyNode = node0 as BranchNode; + pubkeyNode.left.applyHash(byteArrayToHashObject(pubkey.subarray(0, 32))); + pubkeyNode.right.applyHash(byteArrayToHashObject(pubkey.subarray(32, 48))); + // withdrawalCredentials + nodes[1].applyHash(byteArrayToHashObject(withdrawalCredentials)); + // effectiveBalance, 8 bytes = h0 + h1 + writeEpochInfToNode(effectiveBalance, nodes[2]); + // slashed + nodes[3].h0 = slashed ? 1 : 0; + // activationEligibilityEpoch, 8 bytes = h0 + h1 + writeEpochInfToNode(activationEligibilityEpoch, nodes[4]); + // activationEpoch, 8 bytes = h0 + h1 + writeEpochInfToNode(activationEpoch, nodes[5]); + // exitEpoch, 8 bytes = h0 + h1 + writeEpochInfToNode(exitEpoch, nodes[6]); + // withdrawableEpoch, 8 bytes = h0 + h1 + writeEpochInfToNode(withdrawableEpoch, nodes[7]); +} + +/** + * An epoch is a 64-bit number, split into two 32-bit numbers and populate to h0 and h1. + */ +function writeEpochInfToNode(epoch: number, node: Node): void { + if (epoch === Infinity) { + node.h0 = 0xffffffff; + node.h1 = 0xffffffff; + } else { + node.h0 = epoch & 0xffffffff; + node.h1 = (epoch / NUMBER_2_POW_32) & 0xffffffff; + } +} diff --git a/packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts b/packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts new file mode 100644 index 00000000..84806166 --- /dev/null +++ b/packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts @@ -0,0 +1,59 @@ +import { BranchNode, LeafNode, Node, subtreeFillToContents } from "@chainsafe/persistent-merkle-tree"; +import {ContainerType} from "../../../../../ssz/src/type/container"; +import {ssz} from "../../../lodestarTypes"; +import {ValidatorType} from "../../../lodestarTypes/phase0/validator"; +import {ValidatorTreeViewDU} from "../../../lodestarTypes/phase0/viewDU/validatorNodeStruct"; +import { expect } from "chai"; + +const ValidatorContainer = new ContainerType(ValidatorType, {typeName: "Validator", jsonCase: "eth2"}); + +describe("Validator ssz types", function () { + const seedValidator = { + activationEligibilityEpoch: 10, + activationEpoch: 11, + exitEpoch: Infinity, + slashed: false, + withdrawableEpoch: 13, + pubkey: Buffer.alloc(48, 100), + withdrawalCredentials: Buffer.alloc(32, 100), + }; + + const validators = [ + {...seedValidator, effectiveBalance: 31000000000, slashed: false}, + {...seedValidator, effectiveBalance: 32000000000, slashed: true}, + ]; + + it("should serialize and hash to the same value", () => { + for (const validator of validators) { + const serialized = ValidatorContainer.serialize(validator); + const serialized2 = ssz.phase0.Validator.serialize(validator); + const serialized3 = ssz.phase0.Validator.toViewDU(validator).serialize(); + expect(serialized2).to.be.deep.equal(serialized); + expect(serialized3).to.be.deep.equal(serialized); + + const root = ValidatorContainer.hashTreeRoot(validator); + const root2 = ssz.phase0.Validator.hashTreeRoot(validator); + const root3 = ssz.phase0.Validator.toViewDU(validator).hashTreeRoot(); + expect(root2).to.be.deep.equal(root); + expect(root3).to.be.deep.equal(root); + } + }); + + it("ViewDU.commitToHashObject()", () => { + // transform validator from 0 to 1 + // TODO - batch: avoid this type casting + const viewDU = ssz.phase0.Validator.toViewDU(validators[0]) as ValidatorTreeViewDU; + viewDU.effectiveBalance = validators[1].effectiveBalance; + viewDU.slashed = validators[1].slashed; + const nodes: Node[] = Array.from({length: 8}, () => LeafNode.fromZero()); + nodes[0] = new BranchNode(LeafNode.fromZero(), LeafNode.fromZero()); + viewDU.valueToTree(nodes); + const depth = 3; + const rootNode = subtreeFillToContents([...nodes], depth); + rootNode.root; + viewDU.commitToHashObject(rootNode); + const expectedRoot = ValidatorContainer.hashTreeRoot(validators[1]); + expect(viewDU.node.root).to.be.deep.equal(expectedRoot); + expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); + }); +}); diff --git a/packages/ssz/test/unit/lodestarTypes/phase0/viewDU/validatorNodeStruct.test.ts b/packages/ssz/test/unit/lodestarTypes/phase0/viewDU/validatorNodeStruct.test.ts new file mode 100644 index 00000000..eff98eae --- /dev/null +++ b/packages/ssz/test/unit/lodestarTypes/phase0/viewDU/validatorNodeStruct.test.ts @@ -0,0 +1,54 @@ +import { Node, BranchNode, LeafNode, subtreeFillToContents, getNodesAtDepth } from "@chainsafe/persistent-merkle-tree"; +import { validatorToTree } from "../../../../lodestarTypes/phase0/viewDU/validatorNodeStruct"; +import { HashObject, hashObjectToByteArray } from "@chainsafe/as-sha256"; +import { ValidatorNodeStruct } from "../../../../lodestarTypes/phase0/validator"; +import { expect } from "chai"; +import { Validator } from "../../../../lodestarTypes/phase0/sszTypes"; + +describe("validatorNodeStruct", () => { + it("should populate validator value to tree", () => { + const seedValidator = { + activationEligibilityEpoch: 10, + activationEpoch: 11, + exitEpoch: Infinity, + slashed: false, + withdrawableEpoch: 13, + pubkey: Buffer.alloc(48, 100), + withdrawalCredentials: Buffer.alloc(32, 100), + }; + + const validators = [ + {...seedValidator, effectiveBalance: 31000000000, slashed: false}, + {...seedValidator, effectiveBalance: 32000000000, slashed: true}, + ]; + + const nodes: Node[] = Array.from({length: 8}, () => LeafNode.fromZero()); + nodes[0] = new BranchNode(LeafNode.fromZero(), LeafNode.fromZero()); + for (const validator of validators) { + validatorToTree(nodes, validator); + const depth = 3; + const rootNode = subtreeFillToContents([...nodes], depth); + rootNode.root; + const root = new Uint8Array(32); + hashObjectToByteArray(rootNode, root, 0); + const expectedRootNode = Validator.value_toTree(validator); + const expectedNodes = getNodesAtDepth(expectedRootNode, depth, 0, 8); + expect(expectedNodes.length).to.be.equal(8); + for (let i = 0; i < 8; i++) { + expectEqualNode(nodes[i].rootHashObject, expectedNodes[i].rootHashObject, `node ${i}`); + } + expect(root).to.be.deep.equals(ValidatorNodeStruct.hashTreeRoot(validator)); + } + }); +}); + +function expectEqualNode(node1: HashObject, node2: HashObject, message: string) { + expect(node1.h0 >>> 0).to.be.equal(node2.h0 >>> 0, `${message} h0`); + expect(node1.h1 >>> 0).to.be.equal(node2.h1 >>> 0, `${message} h1`); + expect(node1.h2 >>> 0).to.be.equal(node2.h2 >>> 0, `${message} h2`); + expect(node1.h3 >>> 0).to.be.equal(node2.h3 >>> 0, `${message} h3`); + expect(node1.h4 >>> 0).to.be.equal(node2.h4 >>> 0, `${message} h4`); + expect(node1.h5 >>> 0).to.be.equal(node2.h5 >>> 0, `${message} h5`); + expect(node1.h6 >>> 0).to.be.equal(node2.h6 >>> 0, `${message} h6`); + expect(node1.h7 >>> 0).to.be.equal(node2.h7 >>> 0, `${message} h7`); +} From f991f52f1eaa42d9aa099271fe16deef53cb5978 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Thu, 20 Jun 2024 15:39:45 +0700 Subject: [PATCH 051/113] feat: implement ListCompositeTreeViewDU --- .../lodestarTypes/phase0/listValidator.ts | 15 ++ .../ssz/test/lodestarTypes/phase0/sszTypes.ts | 4 +- .../test/lodestarTypes/phase0/validator.ts | 2 +- .../phase0/viewDU/listValidator.ts | 191 ++++++++++++++++++ .../{validatorNodeStruct.ts => validator.ts} | 0 .../phase0/listValidator.test.ts | 38 ++++ .../lodestarTypes/phase0/validator.test.ts | 3 +- .../phase0/viewDU/validatorNodeStruct.test.ts | 2 +- 8 files changed, 251 insertions(+), 4 deletions(-) create mode 100644 packages/ssz/test/lodestarTypes/phase0/listValidator.ts create mode 100644 packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts rename packages/ssz/test/lodestarTypes/phase0/viewDU/{validatorNodeStruct.ts => validator.ts} (100%) create mode 100644 packages/ssz/test/unit/lodestarTypes/phase0/listValidator.test.ts diff --git a/packages/ssz/test/lodestarTypes/phase0/listValidator.ts b/packages/ssz/test/lodestarTypes/phase0/listValidator.ts new file mode 100644 index 00000000..ef189c85 --- /dev/null +++ b/packages/ssz/test/lodestarTypes/phase0/listValidator.ts @@ -0,0 +1,15 @@ +import { ListCompositeType } from "../../../src/type/listComposite"; +import { Node } from "@chainsafe/persistent-merkle-tree"; +import { ListCompositeTreeViewDU } from "../../../src/viewDU/listComposite"; +import { ValidatorNodeStructType } from "./validator"; +import { ListValidatorTreeViewDU } from "./viewDU/listValidator"; + +export class ListValidatorType extends ListCompositeType { + constructor(limit: number) { + super(new ValidatorNodeStructType(), limit); + } + + getViewDU(node: Node, cache?: unknown): ListCompositeTreeViewDU { + return new ListValidatorTreeViewDU(this, node, cache as any); + } +} \ No newline at end of file diff --git a/packages/ssz/test/lodestarTypes/phase0/sszTypes.ts b/packages/ssz/test/lodestarTypes/phase0/sszTypes.ts index 5a9c84df..a2e7e8ab 100644 --- a/packages/ssz/test/lodestarTypes/phase0/sszTypes.ts +++ b/packages/ssz/test/lodestarTypes/phase0/sszTypes.ts @@ -16,6 +16,7 @@ import { ATTESTATION_SUBNET_COUNT, } from "../params"; import * as primitiveSsz from "../primitive/sszTypes"; +import {ListValidatorType} from "./listValidator"; import {ValidatorNodeStruct} from "./validator"; export {ValidatorNodeStruct}; @@ -251,7 +252,8 @@ export const ValidatorContainer = new ContainerType( export const Validator = ValidatorNodeStruct; // Export as stand-alone for direct tree optimizations -export const Validators = new ListCompositeType(ValidatorNodeStruct, VALIDATOR_REGISTRY_LIMIT); +// export const Validators = new ListCompositeType(ValidatorNodeStruct, VALIDATOR_REGISTRY_LIMIT); +export const Validators = new ListValidatorType(VALIDATOR_REGISTRY_LIMIT); export const Balances = new ListUintNum64Type(VALIDATOR_REGISTRY_LIMIT); export const RandaoMixes = new VectorCompositeType(Bytes32, EPOCHS_PER_HISTORICAL_VECTOR); export const Slashings = new VectorBasicType(Gwei, EPOCHS_PER_SLASHINGS_VECTOR); diff --git a/packages/ssz/test/lodestarTypes/phase0/validator.ts b/packages/ssz/test/lodestarTypes/phase0/validator.ts index 3acf24b5..33d4cc3d 100644 --- a/packages/ssz/test/lodestarTypes/phase0/validator.ts +++ b/packages/ssz/test/lodestarTypes/phase0/validator.ts @@ -2,7 +2,7 @@ import {ByteViews} from "../../../src/type/abstract"; import {ContainerNodeStructType} from "../../../src/type/containerNodeStruct"; import {ValueOfFields} from "../../../src/view/container"; import * as primitiveSsz from "../primitive/sszTypes"; -import { ValidatorTreeViewDU } from "./viewDU/validatorNodeStruct"; +import { ValidatorTreeViewDU } from "./viewDU/validator"; import {Node} from "@chainsafe/persistent-merkle-tree"; const {Boolean, Bytes32, UintNum64, BLSPubkey, EpochInf} = primitiveSsz; diff --git a/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts b/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts new file mode 100644 index 00000000..15d3fb8c --- /dev/null +++ b/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts @@ -0,0 +1,191 @@ +import {BranchNode, HashComputation, HashComputationGroup, LeafNode, Node, arrayAtIndex, executeHashComputations, getHashComputations, setNodesAtDepth} from "@chainsafe/persistent-merkle-tree"; +import { ListCompositeType } from "../../../../src/type/listComposite"; +import { ArrayCompositeTreeViewDUCache } from "../../../../src/viewDU/arrayComposite"; +import { ListCompositeTreeViewDU } from "../../../../src/viewDU/listComposite"; +import { ValidatorNodeStructType } from "../validator"; +import { ValidatorTreeViewDU } from "./validator"; + +/** + * Best SIMD implementation is in 512 bits = 64 bytes + * If not, hashtree will make a loop inside + * Given sha256 operates on a block of 4 bytes, we can hash 16 inputs at once + * Each input is 64 bytes + * TODO - batch: is 8 better? + */ +const PARALLEL_FACTOR = 16; + +export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU { + private batchHashComputations: Array; + private singleHashComputations: Array; + private batchHashRootNodes: Array; + private singleHashRootNode: Node; + private batchLevel3Nodes: Array; + private singleLevel3Nodes: Node[]; + + constructor( + readonly type: ListCompositeType, + protected _rootNode: Node, + cache?: ArrayCompositeTreeViewDUCache + ) { + super(type, _rootNode, cache); + + this.batchHashComputations = []; + this.singleHashComputations = []; + this.batchHashRootNodes = []; + this.batchLevel3Nodes = []; + this.singleLevel3Nodes = []; + for (let i = 0; i < PARALLEL_FACTOR; i++) { + // level 3, validator.pubkey + const pubkey0 = LeafNode.fromZero(); + const pubkey1 = LeafNode.fromZero(); + const pubkey = new BranchNode(pubkey0, pubkey1); + let hc: HashComputation = {src0: pubkey0, src1: pubkey1, dest: pubkey}; + if (i === 0) { + arrayAtIndex(this.singleHashComputations, 3).push(hc); + this.singleLevel3Nodes.push(pubkey); + } + arrayAtIndex(this.batchHashComputations, 3).push(hc); + arrayAtIndex(this.batchLevel3Nodes, i).push(pubkey); + + // level 2 + const withdrawalCredential = LeafNode.fromZero(); + const node20 = new BranchNode(pubkey, withdrawalCredential); + hc = {src0: pubkey, src1: withdrawalCredential, dest: node20}; + if (i === 0) { + arrayAtIndex(this.singleHashComputations, 2).push(hc); + this.singleLevel3Nodes.push(withdrawalCredential); + } + arrayAtIndex(this.batchHashComputations, 2).push(hc); + arrayAtIndex(this.batchLevel3Nodes, i).push(withdrawalCredential); + // effectiveBalance, slashed + const effectiveBalance = LeafNode.fromZero(); + const slashed = LeafNode.fromZero(); + const node21 = new BranchNode(effectiveBalance, slashed); + hc = {src0: effectiveBalance, src1: slashed, dest: node21}; + if (i === 0) { + arrayAtIndex(this.singleHashComputations, 2).push(hc); + this.singleLevel3Nodes.push(effectiveBalance); + this.singleLevel3Nodes.push(slashed); + } + arrayAtIndex(this.batchHashComputations, 2).push(hc); + arrayAtIndex(this.batchLevel3Nodes, i).push(effectiveBalance); + arrayAtIndex(this.batchLevel3Nodes, i).push(slashed); + // activationEligibilityEpoch, activationEpoch + const activationEligibilityEpoch = LeafNode.fromZero(); + const activationEpoch = LeafNode.fromZero(); + const node22 = new BranchNode(activationEligibilityEpoch, activationEpoch); + hc = {src0: activationEligibilityEpoch, src1: activationEpoch, dest: node22}; + if (i === 0) { + arrayAtIndex(this.singleHashComputations, 2).push(hc); + this.singleLevel3Nodes.push(activationEligibilityEpoch); + this.singleLevel3Nodes.push(activationEpoch); + } + arrayAtIndex(this.batchHashComputations, 2).push(hc); + arrayAtIndex(this.batchLevel3Nodes, i).push(activationEligibilityEpoch); + arrayAtIndex(this.batchLevel3Nodes, i).push(activationEpoch); + // exitEpoch, withdrawableEpoch + const exitEpoch = LeafNode.fromZero(); + const withdrawableEpoch = LeafNode.fromZero(); + const node23 = new BranchNode(exitEpoch, withdrawableEpoch); + hc = {src0: exitEpoch, src1: withdrawableEpoch, dest: node23}; + if (i === 0) { + arrayAtIndex(this.singleHashComputations, 2).push(hc); + this.singleLevel3Nodes.push(exitEpoch); + this.singleLevel3Nodes.push(withdrawableEpoch); + } + arrayAtIndex(this.batchHashComputations, 2).push(hc); + arrayAtIndex(this.batchLevel3Nodes, i).push(exitEpoch); + arrayAtIndex(this.batchLevel3Nodes, i).push(withdrawableEpoch); + + // level 1 + const node10 = new BranchNode(node20, node21); + hc = {src0: node20, src1: node21, dest: node10}; + if (i === 0) { + arrayAtIndex(this.singleHashComputations, 1).push(hc); + } + arrayAtIndex(this.batchHashComputations, 1).push(hc); + const node11 = new BranchNode(node22, node23); + hc = {src0: node22, src1: node23, dest: node11}; + if (i === 0) { + arrayAtIndex(this.singleHashComputations, 1).push(hc); + } + arrayAtIndex(this.batchHashComputations, 1).push(hc); + + // level 0 + const node00 = new BranchNode(node10, node11); + hc = {src0: node10, src1: node11, dest: node00}; + if (i === 0) { + arrayAtIndex(this.singleHashComputations, 0).push(hc); + // this.singleHashRootNode = node00; + } + arrayAtIndex(this.batchHashComputations, 0).push(hc); + this.batchHashRootNodes.push(node00); + } + + this.singleHashRootNode = this.batchHashRootNodes[0]; + } + + commit(hashComps: HashComputationGroup | null = null): void { + const isOldRootHashed = this._rootNode.h0 !== null; + if (this.viewsChanged.size === 0) { + if (!isOldRootHashed && hashComps !== null) { + getHashComputations(this._rootNode, hashComps.offset, hashComps.byLevel); + } + return; + } + + // TODO - batch: remove this type cast + const viewsChanged = Array.from(this.viewsChanged.values()) as ValidatorTreeViewDU[]; + const endBatch = viewsChanged.length - (viewsChanged.length % PARALLEL_FACTOR); + // nodesChanged is sorted by index + const nodesChanged: {index: number; node: Node}[] = []; + // commit every 16 validators in batch + for (let i = 0; i < endBatch; i++) { + const indexInBatch = i % PARALLEL_FACTOR; + viewsChanged[i].valueToTree(this.batchLevel3Nodes[indexInBatch]); + if (indexInBatch === PARALLEL_FACTOR - 1) { + executeHashComputations(this.batchHashComputations); + // commit all validators in this batch + for (let j = PARALLEL_FACTOR - 1; j >= 0; j--) { + viewsChanged[i - j].commitToHashObject(this.batchHashRootNodes[PARALLEL_FACTOR - 1 - j]); + nodesChanged.push({index: i - j, node: viewsChanged[i - j].node}); + } + } + } + + // commit the remaining validators one by one + for (let i = endBatch; i < viewsChanged.length; i++) { + viewsChanged[i].valueToTree(this.singleLevel3Nodes); + executeHashComputations(this.singleHashComputations); + viewsChanged[i].commitToHashObject(this.singleHashRootNode); + nodesChanged.push({index: i, node: viewsChanged[i].node}); + } + + // do the remaining commit step the same to parent (ArrayCompositeTreeViewDU) + const indexes = nodesChanged.map((entry) => entry.index); + const nodes = nodesChanged.map((entry) => entry.node); + const chunksNode = this.type.tree_getChunksNode(this._rootNode); + const hashCompsThis = + hashComps != null && isOldRootHashed + ? { + byLevel: hashComps.byLevel, + offset: hashComps.offset + this.type.tree_chunksNodeOffset(), + } + : null; + const newChunksNode = setNodesAtDepth(chunksNode, this.type.chunkDepth, indexes, nodes, hashCompsThis); + + this._rootNode = this.type.tree_setChunksNode( + this._rootNode, + newChunksNode, + this.dirtyLength ? this._length : null, + hashComps + ); + + if (!isOldRootHashed && hashComps !== null) { + getHashComputations(this._rootNode, hashComps.offset, hashComps.byLevel); + } + + this.viewsChanged.clear(); + this.dirtyLength = false; + } +} diff --git a/packages/ssz/test/lodestarTypes/phase0/viewDU/validatorNodeStruct.ts b/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts similarity index 100% rename from packages/ssz/test/lodestarTypes/phase0/viewDU/validatorNodeStruct.ts rename to packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts diff --git a/packages/ssz/test/unit/lodestarTypes/phase0/listValidator.test.ts b/packages/ssz/test/unit/lodestarTypes/phase0/listValidator.test.ts new file mode 100644 index 00000000..c3e306df --- /dev/null +++ b/packages/ssz/test/unit/lodestarTypes/phase0/listValidator.test.ts @@ -0,0 +1,38 @@ +import { ListCompositeType } from "../../../../src/type/listComposite"; +import { ValidatorNodeStruct } from "../../../lodestarTypes/phase0/validator"; +import { + preset, +} from "../../../lodestarTypes/params"; +import { ssz } from "../../../lodestarTypes"; +import { expect } from "chai"; +const {VALIDATOR_REGISTRY_LIMIT} = preset; + +describe("ListValidator ssz type", function () { + const seedValidator = { + activationEligibilityEpoch: 10, + activationEpoch: 11, + exitEpoch: Infinity, + slashed: false, + withdrawableEpoch: 13, + pubkey: Buffer.alloc(48, 100), + withdrawalCredentials: Buffer.alloc(32, 100), + effectiveBalance: 32000000000, + }; + + const testCases = [32, 33, 34, 35]; + const oldValidatorsType = new ListCompositeType(ValidatorNodeStruct, VALIDATOR_REGISTRY_LIMIT); + for (const numValidators of testCases) { + it (`should commit ${numValidators} validators`, () => { + const validators = Array.from({length: numValidators}, (_, i) => ({...seedValidator, withdrawableEpoch: seedValidator.withdrawableEpoch + i})); + const oldViewDU = oldValidatorsType.toViewDU(validators); + const newViewDU = ssz.phase0.Validators.toViewDU(validators); + // modify all validators + for (let i = 0; i < numValidators; i++) { + oldViewDU.get(i).activationEpoch = 2024; + newViewDU.get(i).activationEpoch = 2024; + } + expect(newViewDU.hashTreeRoot()).to.be.deep.equal(oldViewDU.hashTreeRoot()); + expect(newViewDU.serialize()).to.be.deep.equal(oldViewDU.serialize()); + }); + } +}); diff --git a/packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts b/packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts index 84806166..891ca31c 100644 --- a/packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts +++ b/packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts @@ -2,7 +2,7 @@ import { BranchNode, LeafNode, Node, subtreeFillToContents } from "@chainsafe/pe import {ContainerType} from "../../../../../ssz/src/type/container"; import {ssz} from "../../../lodestarTypes"; import {ValidatorType} from "../../../lodestarTypes/phase0/validator"; -import {ValidatorTreeViewDU} from "../../../lodestarTypes/phase0/viewDU/validatorNodeStruct"; +import {ValidatorTreeViewDU} from "../../../lodestarTypes/phase0/viewDU/validator"; import { expect } from "chai"; const ValidatorContainer = new ContainerType(ValidatorType, {typeName: "Validator", jsonCase: "eth2"}); @@ -16,6 +16,7 @@ describe("Validator ssz types", function () { withdrawableEpoch: 13, pubkey: Buffer.alloc(48, 100), withdrawalCredentials: Buffer.alloc(32, 100), + effectiveBalance: 32000000000, }; const validators = [ diff --git a/packages/ssz/test/unit/lodestarTypes/phase0/viewDU/validatorNodeStruct.test.ts b/packages/ssz/test/unit/lodestarTypes/phase0/viewDU/validatorNodeStruct.test.ts index eff98eae..b11a5af8 100644 --- a/packages/ssz/test/unit/lodestarTypes/phase0/viewDU/validatorNodeStruct.test.ts +++ b/packages/ssz/test/unit/lodestarTypes/phase0/viewDU/validatorNodeStruct.test.ts @@ -1,5 +1,5 @@ import { Node, BranchNode, LeafNode, subtreeFillToContents, getNodesAtDepth } from "@chainsafe/persistent-merkle-tree"; -import { validatorToTree } from "../../../../lodestarTypes/phase0/viewDU/validatorNodeStruct"; +import { validatorToTree } from "../../../../lodestarTypes/phase0/viewDU/validator"; import { HashObject, hashObjectToByteArray } from "@chainsafe/as-sha256"; import { ValidatorNodeStruct } from "../../../../lodestarTypes/phase0/validator"; import { expect } from "chai"; From 0c8ac397e90b68bae43b9d0cb84f60ed4767d760 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Fri, 21 Jun 2024 17:30:50 +0700 Subject: [PATCH 052/113] feat: implement and use hashtree.digestNLevelUnsafe() --- .../src/hasher/as-sha256.ts | 3 + .../src/hasher/hashtree.ts | 39 +++- .../src/hasher/index.ts | 8 + .../src/hasher/noble.ts | 3 + .../src/hasher/types.ts | 6 + packages/persistent-merkle-tree/src/node.ts | 1 + .../phase0/viewDU/listValidator.ts | 177 +++++++----------- .../lodestarTypes/phase0/viewDU/validator.ts | 95 +++++++++- .../phase0/viewDU/validatorNodeStruct.test.ts | 51 +++-- 9 files changed, 245 insertions(+), 138 deletions(-) diff --git a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts index 4409d034..670f096b 100644 --- a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts +++ b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts @@ -6,6 +6,9 @@ export const hasher: Hasher = { name: "as-sha256", digest64: digest2Bytes32, digest64HashObjects, + digestNLevelUnsafe(data: Uint8Array, nLevel: number): Uint8Array { + throw new Error("Not implemented"); + }, batchHashObjects: (inputs: HashObject[]) => { // as-sha256 uses SIMD for batch hash if (inputs.length === 0) { diff --git a/packages/persistent-merkle-tree/src/hasher/hashtree.ts b/packages/persistent-merkle-tree/src/hasher/hashtree.ts index e0b3a052..22570759 100644 --- a/packages/persistent-merkle-tree/src/hasher/hashtree.ts +++ b/packages/persistent-merkle-tree/src/hasher/hashtree.ts @@ -1,4 +1,4 @@ -import {hashInto} from "@chainsafe/hashtree"; +import {hash, hashInto} from "@chainsafe/hashtree"; import {Hasher, HashObject} from "./types"; import {HashComputation, Node} from "../node"; import { byteArrayToHashObject, hashObjectToByteArray } from "@chainsafe/as-sha256"; @@ -10,7 +10,8 @@ import { byteArrayToHashObject, hashObjectToByteArray } from "@chainsafe/as-sha2 * Each input is 64 bytes */ const PARALLEL_FACTOR = 16; -const uint8Input = new Uint8Array(PARALLEL_FACTOR * 64); +const MAX_INPUT_SIZE = PARALLEL_FACTOR * 64; +const uint8Input = new Uint8Array(MAX_INPUT_SIZE); const uint32Input = new Uint32Array(uint8Input.buffer); const uint8Output = new Uint8Array(PARALLEL_FACTOR * 32); const uint32Output = new Uint32Array(uint8Output.buffer); @@ -37,6 +38,40 @@ export const hasher: Hasher = { hashInto(hashInput, hashOutput); return uint32ArrayToHashObject(uint32Output, 0); }, + // given nLevel = 3 + // digest multiple of 8 chunks = 256 bytes + // the result is multiple of 1 chunk = 32 bytes + // this is the same to hashTreeRoot() of multiple validators + digestNLevelUnsafe(data: Uint8Array, nLevel: number): Uint8Array { + let inputLength = data.length; + const bytesInBatch = Math.pow(2, nLevel) * 32; + if (nLevel < 1) { + throw new Error(`Invalid nLevel, expect to be greater than 0, got ${nLevel}`); + } + if (inputLength % bytesInBatch !== 0) { + throw new Error(`Invalid input length, expect to be multiple of ${bytesInBatch} for nLevel ${nLevel}, got ${inputLength}`); + } + if (inputLength > MAX_INPUT_SIZE) { + throw new Error(`Invalid input length, expect to be less than ${MAX_INPUT_SIZE}, got ${inputLength}`); + } + + let outputLength = Math.floor(inputLength / 2); + let hashOutput: Uint8Array | null = null; + for (let i = nLevel; i > 0; i--) { + uint8Input.set(hashOutput ?? data, 0); + const hashInput = uint8Input.subarray(0, inputLength); + hashOutput = uint8Output.subarray(0, outputLength); + hashInto(hashInput, hashOutput); + inputLength = outputLength; + outputLength = Math.floor(inputLength / 2); + } + + if (hashOutput === null) { + throw new Error("hashOutput is null"); + } + // the result is unsafe as it will be modified later, consumer should save the result if needed + return hashOutput; + }, // eslint-disable-next-line @typescript-eslint/no-unused-vars batchHashObjects(inputs: HashObject[]): HashObject[] { if (inputs.length === 0) { diff --git a/packages/persistent-merkle-tree/src/hasher/index.ts b/packages/persistent-merkle-tree/src/hasher/index.ts index 2c5170e2..13fb6a7c 100644 --- a/packages/persistent-merkle-tree/src/hasher/index.ts +++ b/packages/persistent-merkle-tree/src/hasher/index.ts @@ -26,3 +26,11 @@ export let hasher: Hasher = hashtreeHasher; export function setHasher(newHasher: Hasher): void { hasher = newHasher; } + +export function digest64(a: Uint8Array, b: Uint8Array): Uint8Array { + return hasher.digest64(a, b); +} + +export function digestNLevelUnsafe(data: Uint8Array, nLevel: number): Uint8Array { + return hasher.digestNLevelUnsafe(data, nLevel); +} diff --git a/packages/persistent-merkle-tree/src/hasher/noble.ts b/packages/persistent-merkle-tree/src/hasher/noble.ts index 43c32f10..9766a8d5 100644 --- a/packages/persistent-merkle-tree/src/hasher/noble.ts +++ b/packages/persistent-merkle-tree/src/hasher/noble.ts @@ -9,6 +9,9 @@ export const hasher: Hasher = { name: "noble", digest64, digest64HashObjects: (a, b) => uint8ArrayToHashObject(digest64(hashObjectToUint8Array(a), hashObjectToUint8Array(b))), + digestNLevelUnsafe(data: Uint8Array, nLevel: number): Uint8Array { + throw new Error("Not implemented"); + }, batchHashObjects: (inputs: HashObject[]) => { // noble does not support batch hash if (inputs.length === 0) { diff --git a/packages/persistent-merkle-tree/src/hasher/types.ts b/packages/persistent-merkle-tree/src/hasher/types.ts index 74812aee..1f2aa810 100644 --- a/packages/persistent-merkle-tree/src/hasher/types.ts +++ b/packages/persistent-merkle-tree/src/hasher/types.ts @@ -14,6 +14,12 @@ export type Hasher = { * Hash two 32-byte HashObjects */ digest64HashObjects(a: HashObject, b: HashObject): HashObject; + /** + * Hash multiple chunks (1 chunk = 32 bytes) at multiple levels + * With nLevel = 3, hash multiple of 256 bytes, return multiple of 32 bytes. + * The result is unsafe as it will be overwritten by the next call. + */ + digestNLevelUnsafe(data: Uint8Array, nLevel: number): Uint8Array /** * Batch hash 2 * n HashObjects, return n HashObjects output */ diff --git a/packages/persistent-merkle-tree/src/node.ts b/packages/persistent-merkle-tree/src/node.ts index 1c7def21..bbda88e9 100644 --- a/packages/persistent-merkle-tree/src/node.ts +++ b/packages/persistent-merkle-tree/src/node.ts @@ -412,6 +412,7 @@ export function getHashComputations(node: Node, offset: number, hashCompsByLevel // else stop the recursion, LeafNode should have h0 } +// TODO - batch: move to hasher/index.ts export function executeHashComputations(hashComputations: Array): void { hasher.executeHashComputations(hashComputations); } diff --git a/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts b/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts index 15d3fb8c..fa4e288b 100644 --- a/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts +++ b/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts @@ -1,26 +1,27 @@ -import {BranchNode, HashComputation, HashComputationGroup, LeafNode, Node, arrayAtIndex, executeHashComputations, getHashComputations, setNodesAtDepth} from "@chainsafe/persistent-merkle-tree"; +import {HashComputationGroup, Node, digestNLevelUnsafe, getHashComputations, setNodesAtDepth} from "@chainsafe/persistent-merkle-tree"; import { ListCompositeType } from "../../../../src/type/listComposite"; import { ArrayCompositeTreeViewDUCache } from "../../../../src/viewDU/arrayComposite"; import { ListCompositeTreeViewDU } from "../../../../src/viewDU/listComposite"; import { ValidatorNodeStructType } from "../validator"; import { ValidatorTreeViewDU } from "./validator"; +import { ByteViews } from "../../../../src"; +import { byteArrayToHashObject } from "@chainsafe/as-sha256"; /** - * Best SIMD implementation is in 512 bits = 64 bytes - * If not, hashtree will make a loop inside - * Given sha256 operates on a block of 4 bytes, we can hash 16 inputs at once - * Each input is 64 bytes - * TODO - batch: is 8 better? + * hashtree has a MAX_SIZE of 1024 bytes = 32 chunks + * Given a level3 of validators have 8 chunks, we can hash 4 validators at a time */ -const PARALLEL_FACTOR = 16; +const PARALLEL_FACTOR = 4; export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU { - private batchHashComputations: Array; - private singleHashComputations: Array; - private batchHashRootNodes: Array; - private singleHashRootNode: Node; - private batchLevel3Nodes: Array; - private singleLevel3Nodes: Node[]; + private batchLevel3Bytes: Uint8Array; + private batchLevel4Bytes: Uint8Array; + // 32 * 8 = 256 bytes each + private level3ByteViewsArr: ByteViews[]; + // 64 bytes each + private level4BytesArr: Uint8Array[]; + private singleLevel3ByteView: ByteViews; + private singleLevel4Bytes: Uint8Array; constructor( readonly type: ListCompositeType, @@ -28,101 +29,22 @@ export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU= 0; j--) { - viewsChanged[i - j].commitToHashObject(this.batchHashRootNodes[PARALLEL_FACTOR - 1 - j]); - nodesChanged.push({index: i - j, node: viewsChanged[i - j].node}); + const viewIndex = i - j; + const indexInBatch = viewIndex % PARALLEL_FACTOR; + const hashObject = byteArrayToHashObject(validatorRoots.subarray(indexInBatch * 32, (indexInBatch + 1) * 32)); + viewsChanged[viewIndex].commitToHashObject(hashObject); + nodesChanged.push({index: viewIndex, node: viewsChanged[viewIndex].node}); } } } - // commit the remaining validators one by one + // commit the remaining validators, we can do in batch too but don't want to create new Uint8Array views + // it's not much different to commit one by one for (let i = endBatch; i < viewsChanged.length; i++) { - viewsChanged[i].valueToTree(this.singleLevel3Nodes); - executeHashComputations(this.singleHashComputations); - viewsChanged[i].commitToHashObject(this.singleHashRootNode); + viewsChanged[i].valueToMerkleBytes(this.singleLevel3ByteView, this.singleLevel4Bytes); + // level 4 hash + const pubkeyRoot = digestNLevelUnsafe(this.singleLevel4Bytes, 1); + if (pubkeyRoot.length !== 32) { + throw new Error(`Invalid pubkeyRoot length, expect 32, got ${pubkeyRoot.length}`); + } + this.singleLevel3ByteView.uint8Array.set(pubkeyRoot, 0); + // level 3 hash + const validatorRoot = digestNLevelUnsafe(this.singleLevel3ByteView.uint8Array, 3); + if (validatorRoot.length !== 32) { + throw new Error(`Invalid validatorRoot length, expect 32, got ${validatorRoot.length}`); + } + const hashObject = byteArrayToHashObject(validatorRoot); + viewsChanged[i].commitToHashObject(hashObject); nodesChanged.push({index: i, node: viewsChanged[i].node}); } diff --git a/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts b/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts index 44d9ad44..10a549e0 100644 --- a/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts +++ b/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts @@ -1,4 +1,4 @@ -import { HashObject, byteArrayToHashObject } from "@chainsafe/as-sha256"; +import { HashObject, byteArrayToHashObject, hashObjectToByteArray } from "@chainsafe/as-sha256"; import { BranchNodeStruct } from "../../../../src/branchNodeStruct"; import { ContainerTypeGeneric } from "../../../../src/view/container"; import { TreeViewDU } from "../../../../src/viewDU/abstract"; @@ -8,6 +8,7 @@ import { BranchNode, HashComputationGroup, } from "@chainsafe/persistent-merkle-tree"; +import { ByteViews } from "../../../../src/type/abstract"; type Validator = { pubkey: Uint8Array; withdrawalCredentials: Uint8Array; @@ -21,6 +22,11 @@ type Validator = { const numFields = 8; const NUMBER_2_POW_32 = 2 ** 32; +/* + * Below constants are respective to their ssz type in `ValidatorType`. + */ +const UINT32_SIZE = 4; +const CHUNK_SIZE = 32; /** * A specific ViewDU for validator designed to be efficient to batch hash and efficient to create tree @@ -158,15 +164,37 @@ export class ValidatorTreeViewDU extends TreeViewDU { - it("should populate validator value to tree", () => { - const seedValidator = { - activationEligibilityEpoch: 10, - activationEpoch: 11, - exitEpoch: Infinity, - slashed: false, - withdrawableEpoch: 13, - pubkey: Buffer.alloc(48, 100), - withdrawalCredentials: Buffer.alloc(32, 100), - }; + const seedValidator = { + activationEligibilityEpoch: 10, + activationEpoch: 11, + exitEpoch: Infinity, + slashed: false, + withdrawableEpoch: 13, + pubkey: Buffer.alloc(48, 100), + withdrawalCredentials: Buffer.alloc(32, 100), + }; - const validators = [ - {...seedValidator, effectiveBalance: 31000000000, slashed: false}, - {...seedValidator, effectiveBalance: 32000000000, slashed: true}, - ]; + const validators = [ + {...seedValidator, effectiveBalance: 31000000000, slashed: false}, + {...seedValidator, effectiveBalance: 32000000000, slashed: true}, + ]; + it("should populate validator value to tree", () => { const nodes: Node[] = Array.from({length: 8}, () => LeafNode.fromZero()); nodes[0] = new BranchNode(LeafNode.fromZero(), LeafNode.fromZero()); for (const validator of validators) { @@ -40,6 +40,25 @@ describe("validatorNodeStruct", () => { expect(root).to.be.deep.equals(ValidatorNodeStruct.hashTreeRoot(validator)); } }); + + it("should populate validator value to merkle bytes", () => { + for (const validator of validators) { + const expectedRoot0 = ValidatorNodeStruct.hashTreeRoot(validator); + // validator has 8 fields + const level3 = new Uint8Array(32 * 8); + const dataView = new DataView(level3.buffer, level3.byteOffset, level3.byteLength); + // pubkey takes 2 chunks, has to go to another level + const level4 = new Uint8Array(32 * 2); + validatorToMerkleBytes({uint8Array: level3, dataView}, level4, validator); + // additional slice() call make it easier to debug + const pubkeyRoot = digestNLevelUnsafe(level4, 1).slice(); + level3.set(pubkeyRoot, 0); + const root = digestNLevelUnsafe(level3, 3).slice(); + const expectedRootNode2 = Validator.value_toTree(validator); + expect(root).to.be.deep.equals(expectedRoot0); + expect(root).to.be.deep.equals(expectedRootNode2.root); + } + }) }); function expectEqualNode(node1: HashObject, node2: HashObject, message: string) { From 724c057ca413b1e39cd0b147c9573a5c265c3e1b Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Sat, 22 Jun 2024 16:37:45 +0700 Subject: [PATCH 053/113] chore: export more classes --- packages/ssz/src/index.ts | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/packages/ssz/src/index.ts b/packages/ssz/src/index.ts index b0c6d077..5c427f14 100644 --- a/packages/ssz/src/index.ts +++ b/packages/ssz/src/index.ts @@ -26,8 +26,10 @@ export {Type, ValueOf, JsonPath, ByteViews} from "./type/abstract"; export {BasicType, isBasicType} from "./type/basic"; export {CompositeType, CompositeTypeAny, CompositeView, CompositeViewDU, isCompositeType} from "./type/composite"; export {TreeView} from "./view/abstract"; -export {ValueOfFields} from "./view/container"; +export {ValueOfFields, ContainerTypeGeneric} from "./view/container"; export {TreeViewDU} from "./viewDU/abstract"; +export {ListCompositeTreeViewDU} from "./viewDU/listComposite"; +export {ArrayCompositeTreeViewDUCache} from "./viewDU/arrayComposite"; // Values export {BitArray, getUint8ByteToBitBooleanArray} from "./value/bitArray"; @@ -36,3 +38,6 @@ export {BitArray, getUint8ByteToBitBooleanArray} from "./value/bitArray"; export {fromHexString, toHexString, byteArrayEquals} from "./util/byteArray"; export {hash64, symbolCachedPermanentRoot} from "./util/merkleize"; + +// others +export {BranchNodeStruct} from "./branchNodeStruct"; From 7b20306b0306c51e3ffae14339cad98d8c6d044d Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Sun, 23 Jun 2024 14:46:27 +0700 Subject: [PATCH 054/113] fix: spec tests --- .../phase0/viewDU/listValidator.ts | 31 +++++++----- .../lodestarTypes/phase0/viewDU/validator.ts | 7 +-- .../phase0/listValidator.test.ts | 48 ++++++++++++++++++- 3 files changed, 68 insertions(+), 18 deletions(-) diff --git a/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts b/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts index fa4e288b..462cb94c 100644 --- a/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts +++ b/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts @@ -57,14 +57,16 @@ export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU a - b); + const endBatch = indicesChanged.length - (indicesChanged.length % PARALLEL_FACTOR); // nodesChanged is sorted by index const nodesChanged: {index: number; node: Node}[] = []; // commit every 16 validators in batch for (let i = 0; i < endBatch; i++) { const indexInBatch = i % PARALLEL_FACTOR; - viewsChanged[i].valueToMerkleBytes(this.level3ByteViewsArr[indexInBatch], this.level4BytesArr[indexInBatch]); + const viewIndex = indicesChanged[i]; + const viewChanged = this.viewsChanged.get(viewIndex) as ValidatorTreeViewDU; + viewChanged.valueToMerkleBytes(this.level3ByteViewsArr[indexInBatch], this.level4BytesArr[indexInBatch]); if (indexInBatch === PARALLEL_FACTOR - 1) { // hash level 4 @@ -81,19 +83,24 @@ export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU= 0; j--) { - const viewIndex = i - j; - const indexInBatch = viewIndex % PARALLEL_FACTOR; + const viewIndex = indicesChanged[i - j]; + const indexInBatch = (i - j) % PARALLEL_FACTOR; const hashObject = byteArrayToHashObject(validatorRoots.subarray(indexInBatch * 32, (indexInBatch + 1) * 32)); - viewsChanged[viewIndex].commitToHashObject(hashObject); - nodesChanged.push({index: viewIndex, node: viewsChanged[viewIndex].node}); + const viewChanged = this.viewsChanged.get(viewIndex) as ValidatorTreeViewDU; + viewChanged.commitToHashObject(hashObject); + nodesChanged.push({index: viewIndex, node: viewChanged.node}); + // Set new node in nodes array to ensure data represented in the tree and fast nodes access is equal + this.nodes[viewIndex] = viewChanged.node; } } } // commit the remaining validators, we can do in batch too but don't want to create new Uint8Array views // it's not much different to commit one by one - for (let i = endBatch; i < viewsChanged.length; i++) { - viewsChanged[i].valueToMerkleBytes(this.singleLevel3ByteView, this.singleLevel4Bytes); + for (let i = endBatch; i < indicesChanged.length; i++) { + const viewIndex = indicesChanged[i]; + const viewChanged = this.viewsChanged.get(viewIndex) as ValidatorTreeViewDU; + viewChanged.valueToMerkleBytes(this.singleLevel3ByteView, this.singleLevel4Bytes); // level 4 hash const pubkeyRoot = digestNLevelUnsafe(this.singleLevel4Bytes, 1); if (pubkeyRoot.length !== 32) { @@ -106,8 +113,10 @@ export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU { const validators = Array.from({length: numValidators}, (_, i) => ({...seedValidator, withdrawableEpoch: seedValidator.withdrawableEpoch + i})); @@ -35,4 +38,45 @@ describe("ListValidator ssz type", function () { expect(newViewDU.serialize()).to.be.deep.equal(oldViewDU.serialize()); }); } + + const testCases2 = [[1], [3, 5], [1,9, 7]]; + const numValidator = 33; + for (const modifiedIndices of testCases2) { + it(`should modify ${modifiedIndices.length} validators`, () => { + const validators = Array.from({length: numValidator}, (_, i) => ({...seedValidator, withdrawableEpoch: seedValidator.withdrawableEpoch + i})); + const oldViewDU = oldValidatorsType.toViewDU(validators); + const newViewDU = ssz.phase0.Validators.toViewDU(validators); + for (const index of modifiedIndices) { + oldViewDU.get(index).activationEpoch = 2024; + newViewDU.get(index).activationEpoch = 2024; + } + expect(newViewDU.hashTreeRoot()).to.be.deep.equal(oldViewDU.hashTreeRoot()); + expect(newViewDU.serialize()).to.be.deep.equal(oldViewDU.serialize()); + }) + } + + const testCases3 = [1, 3, 5, 7]; + for (const numPush of testCases3) { + it(`should push ${numPush} validators`, () => { + const validators = Array.from({length: numValidator}, (_, i) => ({...seedValidator, withdrawableEpoch: seedValidator.withdrawableEpoch + i})); + const oldViewDU = oldValidatorsType.toViewDU(validators); + const newViewDU = ssz.phase0.Validators.toViewDU(validators); + const newValidators: Validator[] = []; + // this ensure the commit() should update nodes array + newViewDU.getAllReadonlyValues(); + for (let i = 0; i < numPush; i++) { + const validator = {...seedValidator, withdrawableEpoch: seedValidator.withdrawableEpoch + numValidator + i}; + newValidators.push(validator); + oldViewDU.push(ValidatorContainer.toViewDU(validator)); + newViewDU.push(ssz.phase0.Validator.toViewDU(validator)); + } + oldViewDU.commit(); + expect(newViewDU.hashTreeRoot()).to.be.deep.equal(oldViewDU.node.root); + expect(newViewDU.serialize()).to.be.deep.equal(oldViewDU.serialize()); + const allValidators = newViewDU.getAllReadonlyValues(); + for (let i = 0; i < numPush; i++) { + expect(allValidators[numValidator + i]).to.be.deep.equal(newValidators[i]); + } + }) + } }); From ac3a930631f19d5da643c4eb2aca6f7e548eccc3 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Mon, 24 Jun 2024 11:26:39 +0700 Subject: [PATCH 055/113] feat: ContainerNodeStruct does not support batch hash --- .../src/hasher/hashtree.ts | 1 + packages/ssz/src/branchNodeStruct.ts | 56 +++---------------- .../ssz/src/viewDU/containerNodeStruct.ts | 21 ++++--- .../lodestarTypes/phase0/viewDU/validator.ts | 20 ++++--- .../ssz/test/unit/eth2/validators.test.ts | 3 +- 5 files changed, 33 insertions(+), 68 deletions(-) diff --git a/packages/persistent-merkle-tree/src/hasher/hashtree.ts b/packages/persistent-merkle-tree/src/hasher/hashtree.ts index 22570759..39b9dcd6 100644 --- a/packages/persistent-merkle-tree/src/hasher/hashtree.ts +++ b/packages/persistent-merkle-tree/src/hasher/hashtree.ts @@ -42,6 +42,7 @@ export const hasher: Hasher = { // digest multiple of 8 chunks = 256 bytes // the result is multiple of 1 chunk = 32 bytes // this is the same to hashTreeRoot() of multiple validators + // TODO - batch: data, offset, length to avoid subarray call digestNLevelUnsafe(data: Uint8Array, nLevel: number): Uint8Array { let inputLength = data.length; const bytesInBatch = Math.pow(2, nLevel) * 32; diff --git a/packages/ssz/src/branchNodeStruct.ts b/packages/ssz/src/branchNodeStruct.ts index de0357be..471716c4 100644 --- a/packages/ssz/src/branchNodeStruct.ts +++ b/packages/ssz/src/branchNodeStruct.ts @@ -1,40 +1,23 @@ import {HashObject} from "@chainsafe/as-sha256/lib/hashObject"; -import { - hashObjectToUint8Array, - Node, - getHashComputations, - HashComputationGroup, -} from "@chainsafe/persistent-merkle-tree"; - -export type ValueToNodeFn = ( - value: T, - hashComps: HashComputationGroup | null, - hashCompRootNode: Node | null -) => Node; +import {hashObjectToUint8Array, Node} from "@chainsafe/persistent-merkle-tree"; /** - * BranchNode whose children's data is represented as a struct, the backed tree is lazily computed from the struct. + * BranchNode whose children's data is represented as a struct, not a tree. * * This approach is usefull for memory efficiency of data that is not modified often, for example the validators * registry in Ethereum consensus `state.validators`. The tradeoff is that getting the hash, are proofs is more * expensive because the tree has to be recreated every time. */ export class BranchNodeStruct extends Node { - /** - * this represents the backed tree which is lazily computed from value - */ - private _rootNode: Node | null = null; - constructor(private readonly valueToNode: ValueToNodeFn, readonly value: T) { + constructor(private readonly valueToNode: (value: T) => Node, readonly value: T) { // First null value is to save an extra variable to check if a node has a root or not super(null as unknown as number, 0, 0, 0, 0, 0, 0, 0); - this._rootNode = null; } get rootHashObject(): HashObject { if (this.h0 === null) { - super.applyHash(this.rootNode.rootHashObject); - // this node has been hashed, we can clear the backed tree to release a lot of memory - this._rootNode = null; + const node = this.valueToNode(this.value); + super.applyHash(node.rootHashObject); } return this; } @@ -48,35 +31,10 @@ export class BranchNodeStruct extends Node { } get left(): Node { - return this.rootNode.left; + return this.valueToNode(this.value).left; } get right(): Node { - return this.rootNode.right; - } - - getHashComputations(hashComps: HashComputationGroup): void { - if (this.h0 !== null) { - return; - } - - if (this._rootNode === null) { - // set dest of HashComputation to this node - this._rootNode = this.valueToNode(this.value, hashComps, this); - } else { - // not likely to hit this path if called from ViewDU, handle just in case - getHashComputations(this, hashComps.offset, hashComps.byLevel); - } - } - - /** - * Singleton implementation to make sure there is single backed tree for this node. - * This is important for batching HashComputations - */ - private get rootNode(): Node { - if (this._rootNode === null) { - this._rootNode = this.valueToNode(this.value, null, null); - } - return this._rootNode; + return this.valueToNode(this.value).right; } } diff --git a/packages/ssz/src/viewDU/containerNodeStruct.ts b/packages/ssz/src/viewDU/containerNodeStruct.ts index e9baef20..87993801 100644 --- a/packages/ssz/src/viewDU/containerNodeStruct.ts +++ b/packages/ssz/src/viewDU/containerNodeStruct.ts @@ -1,4 +1,4 @@ -import {HashComputationGroup, Node} from "@chainsafe/persistent-merkle-tree"; +import {Node} from "@chainsafe/persistent-merkle-tree"; import {Type, ValueOf} from "../type/abstract"; import {isCompositeType} from "../type/composite"; import {BranchNodeStruct} from "../branchNodeStruct"; @@ -27,16 +27,19 @@ class ContainerTreeViewDU>> extends return; } - commit(hashComps: HashComputationGroup | null = null): void { - if (this.valueChanged !== null) { - const value = this.valueChanged; - this.valueChanged = null; - this._rootNode = this.type.value_toTree(value) as BranchNodeStruct>; + commit(): void { + if (this.valueChanged === null) { + // this does not suppor batch hash + this._rootNode.root; + return; } - if (hashComps !== null) { - this._rootNode.getHashComputations(hashComps); - } + const value = this.valueChanged; + this.valueChanged = null; + + this._rootNode = this.type.value_toTree(value) as BranchNodeStruct>; + // this does not suppor batch hash + this._rootNode.root; } protected clearCache(): void { diff --git a/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts b/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts index 8e692739..bf6df02b 100644 --- a/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts +++ b/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts @@ -49,17 +49,19 @@ export class ValidatorTreeViewDU extends TreeViewDU; + commit(): void { + if (this.valueChanged === null) { + // this does not suppor batch hash + this._rootNode.root; + return; } - if (hashComps !== null) { - this._rootNode.getHashComputations(hashComps); - } + const value = this.valueChanged; + this.valueChanged = null; + + this._rootNode = this.type.value_toTree(value) as BranchNodeStruct; + // this does not suppor batch hash + this._rootNode.root; } get pubkey(): Uint8Array { diff --git a/packages/ssz/test/unit/eth2/validators.test.ts b/packages/ssz/test/unit/eth2/validators.test.ts index 8c6e1c18..286286e5 100644 --- a/packages/ssz/test/unit/eth2/validators.test.ts +++ b/packages/ssz/test/unit/eth2/validators.test.ts @@ -117,8 +117,9 @@ describe("Container with BranchNodeStruct", function () { * 3 pub with eff sla act act exit with * / \ * 4 pub0 pub1 + * This does not suport batch hash **/ -describe("getHashComputations BranchNodeStruct", function () { +describe.skip("getHashComputations BranchNodeStruct", function () { const testCases: {name: string; fn: (validator: ValueOf) => void}[] = [ {name: "modify pubkey", fn: (validator) => (validator.pubkey = Buffer.alloc(48, 0x01))}, { From 34d822f3ed1528702ee72721d2328f210c33bd9f Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Mon, 24 Jun 2024 13:26:26 +0700 Subject: [PATCH 056/113] fix: allocate memory once for ListValidatorTreeViewDU --- .../phase0/viewDU/listValidator.ts | 60 +++++++++---------- 1 file changed, 27 insertions(+), 33 deletions(-) diff --git a/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts b/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts index 462cb94c..9e06a46a 100644 --- a/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts +++ b/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts @@ -12,39 +12,33 @@ import { byteArrayToHashObject } from "@chainsafe/as-sha256"; * Given a level3 of validators have 8 chunks, we can hash 4 validators at a time */ const PARALLEL_FACTOR = 4; +/** + * Allocate memory once for batch hash validators. + */ +// each level 3 of validator has 8 chunks, each chunk has 32 bytes +const batchLevel3Bytes = new Uint8Array(PARALLEL_FACTOR * 8 * 32); +const level3ByteViewsArr: ByteViews[] = []; +for (let i = 0; i < PARALLEL_FACTOR; i++) { + const uint8Array = batchLevel3Bytes.subarray(i * 8 * 32, (i + 1) * 8 * 32); + const dataView = new DataView(uint8Array.buffer, uint8Array.byteOffset, uint8Array.byteLength); + level3ByteViewsArr.push({uint8Array, dataView}); +} +const singleLevel3ByteView = level3ByteViewsArr[0]; +// each level 4 of validator has 2 chunks for pubkey, each chunk has 32 bytes +const batchLevel4Bytes = new Uint8Array(PARALLEL_FACTOR * 2 * 32); +const level4BytesArr: Uint8Array[] = []; +for (let i = 0; i < PARALLEL_FACTOR; i++) { + level4BytesArr.push(batchLevel4Bytes.subarray(i * 2 * 32, (i + 1) * 2 * 32)); +} +const singleLevel4Bytes = level4BytesArr[0]; export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU { - private batchLevel3Bytes: Uint8Array; - private batchLevel4Bytes: Uint8Array; - // 32 * 8 = 256 bytes each - private level3ByteViewsArr: ByteViews[]; - // 64 bytes each - private level4BytesArr: Uint8Array[]; - private singleLevel3ByteView: ByteViews; - private singleLevel4Bytes: Uint8Array; - constructor( readonly type: ListCompositeType, protected _rootNode: Node, cache?: ArrayCompositeTreeViewDUCache ) { super(type, _rootNode, cache); - // each level 3 of validator has 8 chunks, each chunk has 32 bytes - this.batchLevel3Bytes = new Uint8Array(PARALLEL_FACTOR * 8 * 32); - this.level3ByteViewsArr = []; - for (let i = 0; i < PARALLEL_FACTOR; i++) { - const uint8Array = this.batchLevel3Bytes.subarray(i * 8 * 32, (i + 1) * 8 * 32); - const dataView = new DataView(uint8Array.buffer, uint8Array.byteOffset, uint8Array.byteLength); - this.level3ByteViewsArr.push({uint8Array, dataView}); - } - this.singleLevel3ByteView = this.level3ByteViewsArr[0]; - // each level 4 of validator has 2 chunks for pubkey, each chunk has 32 bytes - this.batchLevel4Bytes = new Uint8Array(PARALLEL_FACTOR * 2 * 32); - this.level4BytesArr = []; - for (let i = 0; i < PARALLEL_FACTOR; i++) { - this.level4BytesArr.push(this.batchLevel4Bytes.subarray(i * 2 * 32, (i + 1) * 2 * 32)); - } - this.singleLevel4Bytes = this.level4BytesArr[0]; } commit(hashComps: HashComputationGroup | null = null): void { @@ -66,18 +60,18 @@ export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU Date: Mon, 24 Jun 2024 15:28:43 +0700 Subject: [PATCH 057/113] fix: implement ValidatorTreeViewDU.commit() --- .../phase0/viewDU/listValidator.ts | 17 +------- .../lodestarTypes/phase0/viewDU/validator.ts | 41 +++++++++++++------ 2 files changed, 29 insertions(+), 29 deletions(-) diff --git a/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts b/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts index 9e06a46a..2f018851 100644 --- a/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts +++ b/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts @@ -23,14 +23,12 @@ for (let i = 0; i < PARALLEL_FACTOR; i++) { const dataView = new DataView(uint8Array.buffer, uint8Array.byteOffset, uint8Array.byteLength); level3ByteViewsArr.push({uint8Array, dataView}); } -const singleLevel3ByteView = level3ByteViewsArr[0]; // each level 4 of validator has 2 chunks for pubkey, each chunk has 32 bytes const batchLevel4Bytes = new Uint8Array(PARALLEL_FACTOR * 2 * 32); const level4BytesArr: Uint8Array[] = []; for (let i = 0; i < PARALLEL_FACTOR; i++) { level4BytesArr.push(batchLevel4Bytes.subarray(i * 2 * 32, (i + 1) * 2 * 32)); } -const singleLevel4Bytes = level4BytesArr[0]; export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU { constructor( @@ -94,20 +92,7 @@ export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU; } - const value = this.valueChanged; + if (this._rootNode.h0 === null) { + this.valueToMerkleBytes(singleLevel3ByteView, singleLevel4Bytes); + // level 4 hash + const pubkeyRoot = digestNLevelUnsafe(singleLevel4Bytes, 1); + if (pubkeyRoot.length !== 32) { + throw new Error(`Invalid pubkeyRoot length, expect 32, got ${pubkeyRoot.length}`); + } + singleLevel3ByteView.uint8Array.set(pubkeyRoot, 0); + // level 3 hash + const validatorRoot = digestNLevelUnsafe(singleLevel3ByteView.uint8Array, 3); + if (validatorRoot.length !== 32) { + throw new Error(`Invalid validatorRoot length, expect 32, got ${validatorRoot.length}`); + } + const hashObject = byteArrayToHashObject(validatorRoot); + this._rootNode.applyHash(hashObject); + } this.valueChanged = null; - - this._rootNode = this.type.value_toTree(value) as BranchNodeStruct; - // this does not suppor batch hash - this._rootNode.root; } get pubkey(): Uint8Array { @@ -203,10 +219,9 @@ export class ValidatorTreeViewDU extends TreeViewDU; this._rootNode.applyHash(ho); this.valueChanged = null; } @@ -330,4 +345,4 @@ function writeEpochInf(dataView: DataView, offset: number, value: number): void dataView.setUint32(offset, (value / NUMBER_2_POW_32) & 0xffffffff, true); offset += UINT32_SIZE; } -} \ No newline at end of file +} From f1a37d1f1bf2e19027cbd81d038e823d49f849d6 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Mon, 24 Jun 2024 15:30:50 +0700 Subject: [PATCH 058/113] fix: revert valueToTree() --- packages/ssz/src/type/containerNodeStruct.ts | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/packages/ssz/src/type/containerNodeStruct.ts b/packages/ssz/src/type/containerNodeStruct.ts index 49a8ffd5..2b069036 100644 --- a/packages/ssz/src/type/containerNodeStruct.ts +++ b/packages/ssz/src/type/containerNodeStruct.ts @@ -108,11 +108,10 @@ export class ContainerNodeStructType private valueToTree( value: ValueOfFields, - hashComps: HashComputationGroup | null = null, - hashCompRootNode: Node | null = null ): Node { - const nodes = this.fieldsEntries.map(({fieldName, fieldType}) => fieldType.value_toTree(value[fieldName])); - const rootNode = subtreeFillToContents(nodes, this.depth, hashComps, hashCompRootNode); - return rootNode; + const uint8Array = new Uint8Array(this.value_serializedSize(value)); + const dataView = new DataView(uint8Array.buffer, uint8Array.byteOffset, uint8Array.byteLength); + this.value_serializeToBytes({uint8Array, dataView}, 0, value); + return super.tree_deserializeFromBytes({uint8Array, dataView}, 0, uint8Array.length); } } From dc3f9f2d24b966be10c15b3e6626360d7af15030 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Mon, 24 Jun 2024 18:18:40 +0700 Subject: [PATCH 059/113] fix: composite type setters in validator ViewDU --- packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts b/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts index 93de762c..6cd089fb 100644 --- a/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts +++ b/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts @@ -89,7 +89,7 @@ export class ValidatorTreeViewDU extends TreeViewDU Date: Tue, 25 Jun 2024 09:06:31 +0700 Subject: [PATCH 060/113] chore: remove unused code in Validator ViewDU --- .../lodestarTypes/phase0/viewDU/validator.ts | 75 +------------------ .../lodestarTypes/phase0/validator.test.ts | 30 ++++++-- .../phase0/viewDU/validatorNodeStruct.test.ts | 26 +------ 3 files changed, 26 insertions(+), 105 deletions(-) diff --git a/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts b/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts index 6cd089fb..142ced3f 100644 --- a/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts +++ b/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts @@ -1,11 +1,10 @@ -import { HashObject, byteArrayToHashObject, hashObjectToByteArray } from "@chainsafe/as-sha256"; +import { HashObject, byteArrayToHashObject } from "@chainsafe/as-sha256"; import { BranchNodeStruct } from "../../../../src/branchNodeStruct"; import { ContainerTypeGeneric } from "../../../../src/view/container"; import { TreeViewDU } from "../../../../src/viewDU/abstract"; import { ValidatorType } from "../validator"; import { Node, - BranchNode, digestNLevelUnsafe, } from "@chainsafe/persistent-merkle-tree"; import { ByteViews } from "../../../../src/type/abstract"; @@ -176,21 +175,6 @@ export class ValidatorTreeViewDU extends TreeViewDU LeafNode.fromZero()); - nodes[0] = new BranchNode(LeafNode.fromZero(), LeafNode.fromZero()); - viewDU.valueToTree(nodes); - const depth = 3; - const rootNode = subtreeFillToContents([...nodes], depth); - rootNode.root; - viewDU.commitToHashObject(rootNode); + // same logic to viewDU.commit(); + // validator has 8 nodes at level 3 + const singleLevel3Bytes = new Uint8Array(8 * 32); + const singleLevel3ByteView = {uint8Array: singleLevel3Bytes, dataView: new DataView(singleLevel3Bytes.buffer)}; + // validator has 2 nodes at level 4 (pubkey has 48 bytes = 2 * nodes) + const singleLevel4Bytes = new Uint8Array(2 * 32); + viewDU.valueToMerkleBytes(singleLevel3ByteView, singleLevel4Bytes); + // level 4 hash + const pubkeyRoot = digestNLevelUnsafe(singleLevel4Bytes, 1); + if (pubkeyRoot.length !== 32) { + throw new Error(`Invalid pubkeyRoot length, expect 32, got ${pubkeyRoot.length}`); + } + singleLevel3ByteView.uint8Array.set(pubkeyRoot, 0); + // level 3 hash + const validatorRoot = digestNLevelUnsafe(singleLevel3ByteView.uint8Array, 3); + if (validatorRoot.length !== 32) { + throw new Error(`Invalid validatorRoot length, expect 32, got ${validatorRoot.length}`); + } + const hashObject = byteArrayToHashObject(validatorRoot); + viewDU.commitToHashObject(hashObject); const expectedRoot = ValidatorContainer.hashTreeRoot(validators[1]); expect(viewDU.node.root).to.be.deep.equal(expectedRoot); expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); diff --git a/packages/ssz/test/unit/lodestarTypes/phase0/viewDU/validatorNodeStruct.test.ts b/packages/ssz/test/unit/lodestarTypes/phase0/viewDU/validatorNodeStruct.test.ts index 14d7ed85..96085875 100644 --- a/packages/ssz/test/unit/lodestarTypes/phase0/viewDU/validatorNodeStruct.test.ts +++ b/packages/ssz/test/unit/lodestarTypes/phase0/viewDU/validatorNodeStruct.test.ts @@ -1,6 +1,6 @@ -import { Node, BranchNode, LeafNode, subtreeFillToContents, getNodesAtDepth, digest64, digestNLevelUnsafe } from "@chainsafe/persistent-merkle-tree"; -import { validatorToMerkleBytes, validatorToTree } from "../../../../lodestarTypes/phase0/viewDU/validator"; -import { HashObject, hashObjectToByteArray } from "@chainsafe/as-sha256"; +import { digestNLevelUnsafe } from "@chainsafe/persistent-merkle-tree"; +import { validatorToMerkleBytes } from "../../../../lodestarTypes/phase0/viewDU/validator"; +import { HashObject } from "@chainsafe/as-sha256"; import { ValidatorNodeStruct } from "../../../../lodestarTypes/phase0/validator"; import { expect } from "chai"; import { Validator } from "../../../../lodestarTypes/phase0/sszTypes"; @@ -21,26 +21,6 @@ describe("validatorNodeStruct", () => { {...seedValidator, effectiveBalance: 32000000000, slashed: true}, ]; - it("should populate validator value to tree", () => { - const nodes: Node[] = Array.from({length: 8}, () => LeafNode.fromZero()); - nodes[0] = new BranchNode(LeafNode.fromZero(), LeafNode.fromZero()); - for (const validator of validators) { - validatorToTree(nodes, validator); - const depth = 3; - const rootNode = subtreeFillToContents([...nodes], depth); - rootNode.root; - const root = new Uint8Array(32); - hashObjectToByteArray(rootNode, root, 0); - const expectedRootNode = Validator.value_toTree(validator); - const expectedNodes = getNodesAtDepth(expectedRootNode, depth, 0, 8); - expect(expectedNodes.length).to.be.equal(8); - for (let i = 0; i < 8; i++) { - expectEqualNode(nodes[i].rootHashObject, expectedNodes[i].rootHashObject, `node ${i}`); - } - expect(root).to.be.deep.equals(ValidatorNodeStruct.hashTreeRoot(validator)); - } - }); - it("should populate validator value to merkle bytes", () => { for (const validator of validators) { const expectedRoot0 = ValidatorNodeStruct.hashTreeRoot(validator); From ef6101f03eb7bffa6d0bbf4dc9d20f16f5a553a1 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Tue, 25 Jun 2024 09:09:30 +0700 Subject: [PATCH 061/113] fix: no need to create new validator node in commitToHashObject() --- .../test/lodestarTypes/phase0/viewDU/validator.ts | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts b/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts index 142ced3f..63086944 100644 --- a/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts +++ b/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts @@ -56,8 +56,7 @@ export class ValidatorTreeViewDU extends TreeViewDU; + this._rootNode = this.type.value_toTree(this.valueChanged) as BranchNodeStruct; } if (this._rootNode.h0 === null) { @@ -200,12 +199,13 @@ export class ValidatorTreeViewDU extends TreeViewDU; + // (this.valueChanged === null means this viewDU is new + if (this.valueChanged !== null) { + this._rootNode = this.type.value_toTree(this.valueChanged) as BranchNodeStruct; + } this._rootNode.applyHash(ho); this.valueChanged = null; } From 829b6448eccfeca8dba0c6432184a29c17da8fe7 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Tue, 25 Jun 2024 09:39:59 +0700 Subject: [PATCH 062/113] fix: remove getHashComputations() in ListValidator ViewDU --- .../test/lodestarTypes/phase0/viewDU/listValidator.ts | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts b/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts index 2f018851..8a793433 100644 --- a/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts +++ b/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts @@ -1,4 +1,4 @@ -import {HashComputationGroup, Node, digestNLevelUnsafe, getHashComputations, setNodesAtDepth} from "@chainsafe/persistent-merkle-tree"; +import {HashComputationGroup, Node, digestNLevelUnsafe, setNodesAtDepth} from "@chainsafe/persistent-merkle-tree"; import { ListCompositeType } from "../../../../src/type/listComposite"; import { ArrayCompositeTreeViewDUCache } from "../../../../src/viewDU/arrayComposite"; import { ListCompositeTreeViewDU } from "../../../../src/viewDU/listComposite"; @@ -43,7 +43,8 @@ export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU Date: Sat, 29 Jun 2024 13:31:20 +0700 Subject: [PATCH 063/113] fix: avoid memory allocation in hashtree --- packages/persistent-merkle-tree/package.json | 2 +- .../src/hasher/hashtree.ts | 88 ++++++++++--------- 2 files changed, 46 insertions(+), 44 deletions(-) diff --git a/packages/persistent-merkle-tree/package.json b/packages/persistent-merkle-tree/package.json index f47fb9ab..67ae38f8 100644 --- a/packages/persistent-merkle-tree/package.json +++ b/packages/persistent-merkle-tree/package.json @@ -46,7 +46,7 @@ "homepage": "https://github.com/ChainSafe/persistent-merkle-tree#readme", "dependencies": { "@chainsafe/as-sha256": "0.4.2", - "@chainsafe/hashtree": "1.0.0", + "@chainsafe/hashtree": "1.0.1", "@noble/hashes": "^1.3.0" }, "peerDependencies": { diff --git a/packages/persistent-merkle-tree/src/hasher/hashtree.ts b/packages/persistent-merkle-tree/src/hasher/hashtree.ts index 39b9dcd6..ce6cfd0b 100644 --- a/packages/persistent-merkle-tree/src/hasher/hashtree.ts +++ b/packages/persistent-merkle-tree/src/hasher/hashtree.ts @@ -1,7 +1,7 @@ -import {hash, hashInto} from "@chainsafe/hashtree"; +import {hashInto} from "@chainsafe/hashtree"; import {Hasher, HashObject} from "./types"; import {HashComputation, Node} from "../node"; -import { byteArrayToHashObject, hashObjectToByteArray } from "@chainsafe/as-sha256"; +import { byteArrayToHashObject } from "@chainsafe/as-sha256"; /** * Best SIMD implementation is in 512 bits = 64 bytes @@ -14,8 +14,11 @@ const MAX_INPUT_SIZE = PARALLEL_FACTOR * 64; const uint8Input = new Uint8Array(MAX_INPUT_SIZE); const uint32Input = new Uint32Array(uint8Input.buffer); const uint8Output = new Uint8Array(PARALLEL_FACTOR * 32); -const uint32Output = new Uint32Array(uint8Output.buffer); - +// having this will cause more memory to extract uint32 +// const uint32Output = new Uint32Array(uint8Output.buffer); +// convenient reusable Uint8Array for hash64 +const hash64Input = uint8Input.subarray(0, 64); +const hash64Output = uint8Output.subarray(0, 32); export const hasher: Hasher = { name: "hashtree", @@ -23,20 +26,15 @@ export const hasher: Hasher = { if (obj1.length !== 32 || obj2.length !== 32) { throw new Error("Invalid input length"); } - uint8Input.set(obj1, 0); - uint8Input.set(obj2, 32); - const hashInput = uint8Input.subarray(0, 64); - const hashOutput = uint8Output.subarray(0, 32); - hashInto(hashInput, hashOutput); - return hashOutput.slice(); + hash64Input.set(obj1, 0); + hash64Input.set(obj2, 32); + hashInto(hash64Input, hash64Output); + return hash64Output.slice(); }, digest64HashObjects(obj1: HashObject, obj2: HashObject): HashObject { - hashObjectToUint32Array(obj1, uint32Input, 0); - hashObjectToUint32Array(obj2, uint32Input, 8); - const hashInput = uint8Input.subarray(0, 64); - const hashOutput = uint8Output.subarray(0, 32); - hashInto(hashInput, hashOutput); - return uint32ArrayToHashObject(uint32Output, 0); + hashObjectsToUint32Array(obj1, obj2, uint32Input); + hashInto(hash64Input, hash64Output); + return byteArrayToHashObject(hash64Output); }, // given nLevel = 3 // digest multiple of 8 chunks = 256 bytes @@ -57,21 +55,20 @@ export const hasher: Hasher = { } let outputLength = Math.floor(inputLength / 2); - let hashOutput: Uint8Array | null = null; + + uint8Input.set(data, 0); + // hash into same buffer + let bufferIn = uint8Input.subarray(0, inputLength); for (let i = nLevel; i > 0; i--) { - uint8Input.set(hashOutput ?? data, 0); - const hashInput = uint8Input.subarray(0, inputLength); - hashOutput = uint8Output.subarray(0, outputLength); - hashInto(hashInput, hashOutput); + const bufferOut = bufferIn.subarray(0, outputLength); + hashInto(bufferIn, bufferOut); + bufferIn = bufferOut; inputLength = outputLength; outputLength = Math.floor(inputLength / 2); } - if (hashOutput === null) { - throw new Error("hashOutput is null"); - } // the result is unsafe as it will be modified later, consumer should save the result if needed - return hashOutput; + return bufferIn; }, // eslint-disable-next-line @typescript-eslint/no-unused-vars batchHashObjects(inputs: HashObject[]): HashObject[] { @@ -90,7 +87,7 @@ export const hasher: Hasher = { if (indexInBatch === batch - 1) { hashInto(uint8Input, uint8Output); for (let j = 0; j < batch / 2; j++) { - outHashObjects.push(uint32ArrayToHashObject(uint32Output, j * 8)); + outHashObjects.push(byteArrayToHashObject(uint8Output.subarray(j * 32, (j + 1) * 32))); } } } @@ -102,7 +99,7 @@ export const hasher: Hasher = { const remainingOutput = uint8Output.subarray(0, remaining * 16); hashInto(remainingInput, remainingOutput); for (let i = 0; i < remaining / 2; i++) { - outHashObjects.push(uint32ArrayToHashObject(uint32Output, i * 8)); + outHashObjects.push(byteArrayToHashObject(remainingOutput.subarray(i * 32, (i + 1) * 32))); } } @@ -131,8 +128,7 @@ export const hasher: Hasher = { if (indexInBatch === PARALLEL_FACTOR - 1) { hashInto(uint8Input, uint8Output); for (const [j, destNode] of destNodes.entries()) { - const outputOffset = j * 8; - destNode.applyHash(uint32ArrayToHashObject(uint32Output, outputOffset)); + destNode.applyHash(byteArrayToHashObject(uint8Output.subarray(j * 32, (j + 1) * 32))); } destNodes = []; } @@ -146,8 +142,7 @@ export const hasher: Hasher = { hashInto(remainingInput, remainingOutput); // destNodes was prepared above for (const [i, destNode] of destNodes.entries()) { - const offset = i * 8; - destNode.applyHash(uint32ArrayToHashObject(uint32Output, offset)); + destNode.applyHash(byteArrayToHashObject(remainingOutput.subarray(i * 32, (i + 1) * 32))); } } } @@ -165,15 +160,22 @@ function hashObjectToUint32Array(obj: HashObject, arr: Uint32Array, offset: numb arr[offset + 7] = obj.h7; } -function uint32ArrayToHashObject(arr: Uint32Array, offset: number): HashObject { - return { - h0: arr[offset], - h1: arr[offset + 1], - h2: arr[offset + 2], - h3: arr[offset + 3], - h4: arr[offset + 4], - h5: arr[offset + 5], - h6: arr[offset + 6], - h7: arr[offset + 7], - }; -} +// note that uint32ArrayToHashObject will cause more memory +function hashObjectsToUint32Array(obj1: HashObject, obj2: HashObject, arr: Uint32Array): void { + arr[0] = obj1.h0; + arr[1] = obj1.h1; + arr[2] = obj1.h2; + arr[3] = obj1.h3; + arr[4] = obj1.h4; + arr[5] = obj1.h5; + arr[6] = obj1.h6; + arr[7] = obj1.h7; + arr[8] = obj2.h0; + arr[9] = obj2.h1; + arr[10] = obj2.h2; + arr[11] = obj2.h3; + arr[12] = obj2.h4; + arr[13] = obj2.h5; + arr[14] = obj2.h6; + arr[15] = obj2.h7; +} \ No newline at end of file From e181ded196657afa836edb0ba5e57e19a3d65587 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Wed, 26 Jun 2024 16:50:35 +0700 Subject: [PATCH 064/113] feat: support digestNLevelUnsafe using as-sha256 --- packages/persistent-merkle-tree/package.json | 6 +-- .../src/hasher/as-sha256.ts | 36 +++++++++++++++- .../test/unit/hasher.test.ts | 20 +++++++++ setHasher.mjs | 2 +- yarn.lock | 43 ++++++++++++------- 5 files changed, 86 insertions(+), 21 deletions(-) diff --git a/packages/persistent-merkle-tree/package.json b/packages/persistent-merkle-tree/package.json index 67ae38f8..f44d77af 100644 --- a/packages/persistent-merkle-tree/package.json +++ b/packages/persistent-merkle-tree/package.json @@ -50,8 +50,8 @@ "@noble/hashes": "^1.3.0" }, "peerDependencies": { - "@chainsafe/hashtree-linux-x64-gnu": "1.0.0", - "@chainsafe/hashtree-linux-arm64-gnu": "1.0.0", - "@chainsafe/hashtree-darwin-arm64": "1.0.0" + "@chainsafe/hashtree-linux-x64-gnu": "1.0.1", + "@chainsafe/hashtree-linux-arm64-gnu": "1.0.1", + "@chainsafe/hashtree-darwin-arm64": "1.0.1" } } diff --git a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts index 670f096b..bb589b86 100644 --- a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts +++ b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts @@ -1,13 +1,45 @@ -import {digest2Bytes32, digest64HashObjects, HashObject, batchHash4HashObjectInputs} from "@chainsafe/as-sha256"; +import {digest2Bytes32, digest64HashObjects, HashObject, batchHash4HashObjectInputs, hashInto} from "@chainsafe/as-sha256"; import type {Hasher} from "./types"; import {HashComputation, Node} from "../node"; +// each validator needs to digest 8 chunks of 32 bytes = 4 hashes +// support up to 4 validators +const MAX_HASH = 16; +const MAX_INPUT_SIZE = MAX_HASH * 64; +const buffer = new Uint8Array(MAX_INPUT_SIZE); + export const hasher: Hasher = { name: "as-sha256", digest64: digest2Bytes32, digest64HashObjects, + // given nLevel = 3 + // digest multiple of 8 chunks = 256 bytes + // the result is multiple of 1 chunk = 32 bytes + // this is the same to hashTreeRoot() of multiple validators digestNLevelUnsafe(data: Uint8Array, nLevel: number): Uint8Array { - throw new Error("Not implemented"); + let inputLength = data.length; + const bytesInBatch = Math.pow(2, nLevel) * 32; + if (nLevel < 1) { + throw new Error(`Invalid nLevel, expect to be greater than 0, got ${nLevel}`); + } + if (inputLength % bytesInBatch !== 0) { + throw new Error(`Invalid input length, expect to be multiple of ${bytesInBatch} for nLevel ${nLevel}, got ${inputLength}`); + } + if (inputLength > MAX_INPUT_SIZE) { + throw new Error(`Invalid input length, expect to be less than ${MAX_INPUT_SIZE}, got ${inputLength}`); + } + + buffer.set(data, 0); + for (let i = nLevel; i > 0; i--) { + const outputLength = Math.floor(inputLength / 2); + const hashInput = buffer.subarray(0, inputLength); + const hashOutput = buffer.subarray(0, outputLength); + hashInto(hashInput, hashOutput); + inputLength = outputLength + } + + // the result is unsafe as it will be modified later, consumer should save the result if needed + return buffer.subarray(0, inputLength); }, batchHashObjects: (inputs: HashObject[]) => { // as-sha256 uses SIMD for batch hash diff --git a/packages/persistent-merkle-tree/test/unit/hasher.test.ts b/packages/persistent-merkle-tree/test/unit/hasher.test.ts index a2edf1d2..37dfe4d7 100644 --- a/packages/persistent-merkle-tree/test/unit/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/unit/hasher.test.ts @@ -5,6 +5,7 @@ import {hasher as asSha256Hasher} from "../../src/hasher/as-sha256"; import {hasher as hashtreeHasher} from "../../src/hasher/hashtree"; import {linspace} from "../utils/misc"; import {buildComparisonTrees} from "../utils/tree"; +import {LeafNode, subtreeFillToContents} from "../../src"; const hashers = [hashtreeHasher, asSha256Hasher, nobleHasher]; @@ -72,4 +73,23 @@ describe("hashers", function () { }); }); +describe("hasher.digestNLevelUnsafe", function () { + const hashers = [hashtreeHasher, asSha256Hasher]; + for (const hasher of hashers) { + const numValidators = [1, 2, 3, 4]; + for (const numValidator of numValidators) { + it (`${hasher.name} digestNLevelUnsafe ${numValidator} validators = ${8 * numValidator} chunk(s)`, () => { + const nodes = Array.from({length: 8 * numValidator}, (_, i) => LeafNode.fromRoot(Buffer.alloc(32, i + numValidator))); + const hashInput = Buffer.concat(nodes.map((node) => node.root)); + // slice() because output is unsafe + const hashOutput = hasher.digestNLevelUnsafe(hashInput, 3).slice(); + for (let i = 0; i < numValidator; i++) { + const root = subtreeFillToContents(nodes.slice(i * 8, (i + 1) * 8), 3).root; + expectEqualHex(hashOutput.subarray(i * 32, (i + 1) * 32), root); + } + }); + } + } +}); + // TODO - batch: test more methods diff --git a/setHasher.mjs b/setHasher.mjs index c210cdba..821986ea 100644 --- a/setHasher.mjs +++ b/setHasher.mjs @@ -1,5 +1,5 @@ // Set the hasher to hashtree // Used to run benchmarks with with visibility into hashtree performance, useful for Lodestar import {setHasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/index.js"; -import {hasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/hashtree.js"; +import {hasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/as-sha256.js"; setHasher(hasher); diff --git a/yarn.lock b/yarn.lock index 1c14c626..b74bf846 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1252,18 +1252,6 @@ core-js "2.6.10" require-resolve "0.0.2" -"@chainsafe/hashtree-linux-arm64-gnu@1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@chainsafe/hashtree-linux-arm64-gnu/-/hashtree-linux-arm64-gnu-1.0.0.tgz#168db259636261d9f3612354cad9f730a4be7110" - integrity sha512-XdYEV6z503Oxa7+mPtUEq9KoKfBAs0BcxGaRiDttCbZK2/J7CcTlobBGd7KMxJ/dQ4IUonaXsob0BnXBcrlwuw== - -"@chainsafe/hashtree@1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@chainsafe/hashtree/-/hashtree-1.0.0.tgz#529439fb07299758ca5bbe69a00d1dc4ad83a949" - integrity sha512-qft0MZiLl5jbe8omZaSp1vQ2YCO9qCb262+5qD1vsgN6l1ga3ZFKLyNI6xvwbhC7ZnzZd46vr+p+KvdUIgruOw== - optionalDependencies: - "@chainsafe/hashtree-linux-arm64-gnu" "1.0.0" - "@chainsafe/ssz@^0.15.1": version "0.15.1" resolved "https://registry.yarnpkg.com/@chainsafe/ssz/-/ssz-0.15.1.tgz#008a711c3bcdc0d207cd4be15108870b0b1c60c0" @@ -11303,7 +11291,7 @@ streamroller@^3.1.5: debug "^4.3.4" fs-extra "^8.1.0" -"string-width-cjs@npm:string-width@^4.2.0", "string-width@^1.0.2 || 2 || 3 || 4", string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: +"string-width-cjs@npm:string-width@^4.2.0": version "4.2.3" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== @@ -11320,6 +11308,15 @@ streamroller@^3.1.5: is-fullwidth-code-point "^2.0.0" strip-ansi "^4.0.0" +"string-width@^1.0.2 || 2 || 3 || 4", string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + string-width@^3.0.0, string-width@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" @@ -11398,7 +11395,7 @@ string_decoder@~1.1.1: dependencies: safe-buffer "~5.1.0" -"strip-ansi-cjs@npm:strip-ansi@^6.0.1", strip-ansi@^6.0.0, strip-ansi@^6.0.1: +"strip-ansi-cjs@npm:strip-ansi@^6.0.1": version "6.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== @@ -11426,6 +11423,13 @@ strip-ansi@^5.0.0, strip-ansi@^5.1.0, strip-ansi@^5.2.0: dependencies: ansi-regex "^4.1.0" +strip-ansi@^6.0.0, strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + strip-ansi@^7.0.1: version "7.1.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" @@ -12620,7 +12624,7 @@ workerpool@6.1.0: resolved "https://registry.yarnpkg.com/workerpool/-/workerpool-6.1.0.tgz#a8e038b4c94569596852de7a8ea4228eefdeb37b" integrity sha512-toV7q9rWNYha963Pl/qyeZ6wG+3nnsyvolaNUS8+R5Wtw6qJPTxIlOP1ZSvcGhEJw+l3HMMmtiNo9Gl61G4GVg== -"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0", wrap-ansi@^7.0.0: +"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": version "7.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== @@ -12647,6 +12651,15 @@ wrap-ansi@^6.0.1, wrap-ansi@^6.2.0: string-width "^4.1.0" strip-ansi "^6.0.0" +wrap-ansi@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + wrap-ansi@^8.1.0: version "8.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-8.1.0.tgz#56dc22368ee570face1b49819975d9b9a5ead214" From 158601ab77f19466014106969374024600aacc2b Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Sat, 29 Jun 2024 20:44:59 +0700 Subject: [PATCH 065/113] fix: use hashtree in setHasher.mjs for test --- setHasher.mjs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setHasher.mjs b/setHasher.mjs index 821986ea..c210cdba 100644 --- a/setHasher.mjs +++ b/setHasher.mjs @@ -1,5 +1,5 @@ // Set the hasher to hashtree // Used to run benchmarks with with visibility into hashtree performance, useful for Lodestar import {setHasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/index.js"; -import {hasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/as-sha256.js"; +import {hasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/hashtree.js"; setHasher(hasher); From 4037ffea9b1a5cc9bfffa12172dd1b3787aa5a49 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Sun, 30 Jun 2024 10:56:13 +0700 Subject: [PATCH 066/113] feat: do not allocate temp HashObjects --- .../src/hasher/as-sha256.ts | 5 +++-- .../src/hasher/hashtree.ts | 11 ++++++----- .../persistent-merkle-tree/src/hasher/noble.ts | 8 +++++--- .../persistent-merkle-tree/src/hasher/types.ts | 2 +- packages/persistent-merkle-tree/src/node.ts | 2 +- .../test/perf/hasher.test.ts | 3 ++- .../test/unit/hasher.test.ts | 17 ++++++++++++----- .../phase0/viewDU/listValidator.ts | 5 ++--- .../lodestarTypes/phase0/viewDU/validator.ts | 11 +++++------ .../ssz/test/perf/eth2/hashTreeRoot.test.ts | 4 +++- .../unit/lodestarTypes/phase0/validator.test.ts | 6 ++---- 11 files changed, 42 insertions(+), 32 deletions(-) diff --git a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts index bb589b86..3c91aa4d 100644 --- a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts +++ b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts @@ -1,4 +1,4 @@ -import {digest2Bytes32, digest64HashObjects, HashObject, batchHash4HashObjectInputs, hashInto} from "@chainsafe/as-sha256"; +import {digest2Bytes32, digest64HashObjectsInto, digest64HashObjects, HashObject, batchHash4HashObjectInputs, hashInto} from "@chainsafe/as-sha256"; import type {Hasher} from "./types"; import {HashComputation, Node} from "../node"; @@ -11,7 +11,7 @@ const buffer = new Uint8Array(MAX_INPUT_SIZE); export const hasher: Hasher = { name: "as-sha256", digest64: digest2Bytes32, - digest64HashObjects, + digest64HashObjects: digest64HashObjectsInto, // given nLevel = 3 // digest multiple of 8 chunks = 256 bytes // the result is multiple of 1 chunk = 32 bytes @@ -123,6 +123,7 @@ export const hasher: Hasher = { src1_3 !== null && dest3 !== null ) { + // TODO - batch: find a way not allocate here const [o0, o1, o2, o3] = batchHash4HashObjectInputs([ src0_0, src1_0, diff --git a/packages/persistent-merkle-tree/src/hasher/hashtree.ts b/packages/persistent-merkle-tree/src/hasher/hashtree.ts index ce6cfd0b..625ca6a8 100644 --- a/packages/persistent-merkle-tree/src/hasher/hashtree.ts +++ b/packages/persistent-merkle-tree/src/hasher/hashtree.ts @@ -2,6 +2,7 @@ import {hashInto} from "@chainsafe/hashtree"; import {Hasher, HashObject} from "./types"; import {HashComputation, Node} from "../node"; import { byteArrayToHashObject } from "@chainsafe/as-sha256"; +import { byteArrayIntoHashObject } from "@chainsafe/as-sha256/lib/hashObject"; /** * Best SIMD implementation is in 512 bits = 64 bytes @@ -31,10 +32,10 @@ export const hasher: Hasher = { hashInto(hash64Input, hash64Output); return hash64Output.slice(); }, - digest64HashObjects(obj1: HashObject, obj2: HashObject): HashObject { - hashObjectsToUint32Array(obj1, obj2, uint32Input); + digest64HashObjects(left: HashObject, right: HashObject, parent: HashObject): void { + hashObjectsToUint32Array(left, right, uint32Input); hashInto(hash64Input, hash64Output); - return byteArrayToHashObject(hash64Output); + byteArrayIntoHashObject(hash64Output, parent); }, // given nLevel = 3 // digest multiple of 8 chunks = 256 bytes @@ -128,7 +129,7 @@ export const hasher: Hasher = { if (indexInBatch === PARALLEL_FACTOR - 1) { hashInto(uint8Input, uint8Output); for (const [j, destNode] of destNodes.entries()) { - destNode.applyHash(byteArrayToHashObject(uint8Output.subarray(j * 32, (j + 1) * 32))); + byteArrayIntoHashObject(uint8Output.subarray(j * 32, (j + 1) * 32), destNode); } destNodes = []; } @@ -142,7 +143,7 @@ export const hasher: Hasher = { hashInto(remainingInput, remainingOutput); // destNodes was prepared above for (const [i, destNode] of destNodes.entries()) { - destNode.applyHash(byteArrayToHashObject(remainingOutput.subarray(i * 32, (i + 1) * 32))); + byteArrayIntoHashObject(remainingOutput.subarray(i * 32, (i + 1) * 32), destNode); } } } diff --git a/packages/persistent-merkle-tree/src/hasher/noble.ts b/packages/persistent-merkle-tree/src/hasher/noble.ts index 9766a8d5..d8f115b0 100644 --- a/packages/persistent-merkle-tree/src/hasher/noble.ts +++ b/packages/persistent-merkle-tree/src/hasher/noble.ts @@ -1,14 +1,16 @@ import {sha256} from "@noble/hashes/sha256"; -import {digest64HashObjects, HashObject} from "@chainsafe/as-sha256"; +import {digest64HashObjects, HashObject, byteArrayIntoHashObject} from "@chainsafe/as-sha256"; import type {Hasher} from "./types"; -import {hashObjectToUint8Array, uint8ArrayToHashObject} from "./util"; +import {hashObjectToUint8Array} from "./util"; const digest64 = (a: Uint8Array, b: Uint8Array): Uint8Array => sha256.create().update(a).update(b).digest(); export const hasher: Hasher = { name: "noble", digest64, - digest64HashObjects: (a, b) => uint8ArrayToHashObject(digest64(hashObjectToUint8Array(a), hashObjectToUint8Array(b))), + digest64HashObjects: (left, right, parent) => { + byteArrayIntoHashObject(digest64(hashObjectToUint8Array(left), hashObjectToUint8Array(right)), parent); + }, digestNLevelUnsafe(data: Uint8Array, nLevel: number): Uint8Array { throw new Error("Not implemented"); }, diff --git a/packages/persistent-merkle-tree/src/hasher/types.ts b/packages/persistent-merkle-tree/src/hasher/types.ts index 1f2aa810..64dd7993 100644 --- a/packages/persistent-merkle-tree/src/hasher/types.ts +++ b/packages/persistent-merkle-tree/src/hasher/types.ts @@ -13,7 +13,7 @@ export type Hasher = { /** * Hash two 32-byte HashObjects */ - digest64HashObjects(a: HashObject, b: HashObject): HashObject; + digest64HashObjects(left: HashObject, right: HashObject, parent: HashObject): void; /** * Hash multiple chunks (1 chunk = 32 bytes) at multiple levels * With nLevel = 3, hash multiple of 256 bytes, return multiple of 32 bytes. diff --git a/packages/persistent-merkle-tree/src/node.ts b/packages/persistent-merkle-tree/src/node.ts index bbda88e9..ac9ebb7d 100644 --- a/packages/persistent-merkle-tree/src/node.ts +++ b/packages/persistent-merkle-tree/src/node.ts @@ -94,7 +94,7 @@ export class BranchNode extends Node { get rootHashObject(): HashObject { if (this.h0 === null) { - super.applyHash(hasher.digest64HashObjects(this.left.rootHashObject, this.right.rootHashObject)); + hasher.digest64HashObjects(this.left.rootHashObject, this.right.rootHashObject, this); } return this; } diff --git a/packages/persistent-merkle-tree/test/perf/hasher.test.ts b/packages/persistent-merkle-tree/test/perf/hasher.test.ts index 9fd535fd..b30b7dab 100644 --- a/packages/persistent-merkle-tree/test/perf/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/perf/hasher.test.ts @@ -40,8 +40,9 @@ describe("hasher", function () { }), beforeEach: (params) => params, fn: ({obj1, obj2}) => { + const result = {} as HashObject; for (let i = 0; i < runsFactor; i++) { - for (let j = 0; j < iterations; j++) hasher.digest64HashObjects(obj1, obj2); + for (let j = 0; j < iterations; j++) hasher.digest64HashObjects(obj1, obj2, result); } }, runsFactor, diff --git a/packages/persistent-merkle-tree/test/unit/hasher.test.ts b/packages/persistent-merkle-tree/test/unit/hasher.test.ts index 37dfe4d7..afc75186 100644 --- a/packages/persistent-merkle-tree/test/unit/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/unit/hasher.test.ts @@ -5,7 +5,7 @@ import {hasher as asSha256Hasher} from "../../src/hasher/as-sha256"; import {hasher as hashtreeHasher} from "../../src/hasher/hashtree"; import {linspace} from "../utils/misc"; import {buildComparisonTrees} from "../utils/tree"; -import {LeafNode, subtreeFillToContents} from "../../src"; +import {HashObject, LeafNode, subtreeFillToContents} from "../../src"; const hashers = [hashtreeHasher, asSha256Hasher, nobleHasher]; @@ -19,7 +19,8 @@ describe("hashers", function () { const obj1 = uint8ArrayToHashObject(root1); const obj2 = uint8ArrayToHashObject(root2); - const obj = hasher.digest64HashObjects(obj1, obj2); + const obj = {} as HashObject; + hasher.digest64HashObjects(obj1, obj2, obj); const newRoot = hashObjectToUint8Array(obj); expectEqualHex(root, newRoot); }); @@ -41,9 +42,15 @@ describe("hashers", function () { const hashObject1 = uint8ArrayToHashObject(root1); const root2 = Buffer.alloc(32, 0xff); const hashObject2 = uint8ArrayToHashObject(root2); - const hash1 = hashObjectToUint8Array(nobleHasher.digest64HashObjects(hashObject1, hashObject2)); - const hash2 = hashObjectToUint8Array(asSha256Hasher.digest64HashObjects(hashObject1, hashObject2)); - const hash3 = hashObjectToUint8Array(hashtreeHasher.digest64HashObjects(hashObject1, hashObject2)); + const ho1 = {} as HashObject; + nobleHasher.digest64HashObjects(hashObject1, hashObject2, ho1) + const hash1 = hashObjectToUint8Array(ho1); + const ho2 = {} as HashObject; + asSha256Hasher.digest64HashObjects(hashObject1, hashObject2, ho2) + const hash2 = hashObjectToUint8Array(ho2); + const ho3 = {} as HashObject; + hashtreeHasher.digest64HashObjects(hashObject1, hashObject2, ho3); + const hash3 = hashObjectToUint8Array(ho3); expectEqualHex(hash1, hash2); expectEqualHex(hash1, hash3); }); diff --git a/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts b/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts index 8a793433..941fe5de 100644 --- a/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts +++ b/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts @@ -5,7 +5,6 @@ import { ListCompositeTreeViewDU } from "../../../../src/viewDU/listComposite"; import { ValidatorNodeStructType } from "../validator"; import { ValidatorTreeViewDU } from "./validator"; import { ByteViews } from "../../../../src"; -import { byteArrayToHashObject } from "@chainsafe/as-sha256"; /** * hashtree has a MAX_SIZE of 1024 bytes = 32 chunks @@ -78,9 +77,9 @@ export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU= 0; j--) { const viewIndex = indicesChanged[i - j]; const indexInBatch = (i - j) % PARALLEL_FACTOR; - const hashObject = byteArrayToHashObject(validatorRoots.subarray(indexInBatch * 32, (indexInBatch + 1) * 32)); + const validatorRoot = validatorRoots.subarray(indexInBatch * 32, (indexInBatch + 1) * 32); const viewChanged = this.viewsChanged.get(viewIndex) as ValidatorTreeViewDU; - viewChanged.commitToHashObject(hashObject); + viewChanged.commitToRoot(validatorRoot); nodesChanged.push({index: viewIndex, node: viewChanged.node}); // Set new node in nodes array to ensure data represented in the tree and fast nodes access is equal this.nodes[viewIndex] = viewChanged.node; diff --git a/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts b/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts index 63086944..31b67f99 100644 --- a/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts +++ b/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts @@ -1,4 +1,4 @@ -import { HashObject, byteArrayToHashObject } from "@chainsafe/as-sha256"; +import { byteArrayIntoHashObject } from "@chainsafe/as-sha256"; import { BranchNodeStruct } from "../../../../src/branchNodeStruct"; import { ContainerTypeGeneric } from "../../../../src/view/container"; import { TreeViewDU } from "../../../../src/viewDU/abstract"; @@ -72,8 +72,7 @@ export class ValidatorTreeViewDU extends TreeViewDU; } - this._rootNode.applyHash(ho); + byteArrayIntoHashObject(root, this._rootNode); this.valueChanged = null; } diff --git a/packages/ssz/test/perf/eth2/hashTreeRoot.test.ts b/packages/ssz/test/perf/eth2/hashTreeRoot.test.ts index 00923197..7723adc1 100644 --- a/packages/ssz/test/perf/eth2/hashTreeRoot.test.ts +++ b/packages/ssz/test/perf/eth2/hashTreeRoot.test.ts @@ -23,6 +23,7 @@ import { hash64, } from "../../../src"; import {CompositeTypeAny} from "../../../src/type/composite"; +import { HashObject } from "@chainsafe/as-sha256"; describe("HashTreeRoot frequent eth2 objects", () => { itBenchHashTreeRoot(sszPhase0.Attestation, getAttestation(0)); @@ -129,8 +130,9 @@ describe("HashTreeRoot individual components", () => { for (let i = 0; i < count; i++) hash64(buf, buf); }); + const hashResult = {} as HashObject; itBench(`hashTwoObjects x${count}`, () => { - for (let i = 0; i < count; i++) hasher.digest64HashObjects(ho, ho); + for (let i = 0; i < count; i++) hasher.digest64HashObjects(ho, ho, hashResult); }); } diff --git a/packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts b/packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts index b6564d43..04beee42 100644 --- a/packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts +++ b/packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts @@ -1,10 +1,9 @@ -import { BranchNode, LeafNode, Node, digestNLevelUnsafe, subtreeFillToContents } from "@chainsafe/persistent-merkle-tree"; +import {digestNLevelUnsafe} from "@chainsafe/persistent-merkle-tree"; import {ContainerType} from "../../../../../ssz/src/type/container"; import {ssz} from "../../../lodestarTypes"; import {ValidatorType} from "../../../lodestarTypes/phase0/validator"; import {ValidatorTreeViewDU} from "../../../lodestarTypes/phase0/viewDU/validator"; import { expect } from "chai"; -import { byteArrayToHashObject } from "@chainsafe/as-sha256"; const ValidatorContainer = new ContainerType(ValidatorType, {typeName: "Validator", jsonCase: "eth2"}); @@ -65,8 +64,7 @@ describe("Validator ssz types", function () { if (validatorRoot.length !== 32) { throw new Error(`Invalid validatorRoot length, expect 32, got ${validatorRoot.length}`); } - const hashObject = byteArrayToHashObject(validatorRoot); - viewDU.commitToHashObject(hashObject); + viewDU.commitToRoot(validatorRoot); const expectedRoot = ValidatorContainer.hashTreeRoot(validators[1]); expect(viewDU.node.root).to.be.deep.equal(expectedRoot); expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); From b41b28ec1d45bdd0e4b23134ecd86cd17aa65dd5 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Mon, 1 Jul 2024 11:22:28 +0700 Subject: [PATCH 067/113] chore: benchmark validator hashTreeRoot time --- .../ssz/test/perf/eth2/validators.test.ts | 45 +++++++++++++++++-- 1 file changed, 42 insertions(+), 3 deletions(-) diff --git a/packages/ssz/test/perf/eth2/validators.test.ts b/packages/ssz/test/perf/eth2/validators.test.ts index 2cd92faf..81820e74 100644 --- a/packages/ssz/test/perf/eth2/validators.test.ts +++ b/packages/ssz/test/perf/eth2/validators.test.ts @@ -1,7 +1,7 @@ -import {itBench} from "@dapplion/benchmark"; +import {itBench, setBenchOpts} from "@dapplion/benchmark"; import {Validator} from "../../lodestarTypes/phase0/types"; -import {ValidatorContainer, ValidatorNodeStruct} from "../../lodestarTypes/phase0/sszTypes"; -import {CompositeViewDU} from "../../../src"; +import {ValidatorContainer, ValidatorNodeStruct, Validators} from "../../lodestarTypes/phase0/sszTypes"; +import {BranchNodeStruct, CompositeViewDU} from "../../../src"; const validatorStruct: Validator = { pubkey: Buffer.alloc(48, 0xdd), @@ -49,3 +49,42 @@ describe("Validator vs ValidatorLeafNodeStruct", () => { } } }); + +describe("ContainerNodeStructViewDU vs ValidatorViewDU hashtreeroot", () => { + // ListValidatorTreeViewDU commits every 4 validators in batch + const listValidator = Validators.toViewDU(Array.from({length: 4}, () => validatorStruct)); + const nodes: BranchNodeStruct[] = []; + for (let i = 0; i < listValidator.length; i++) { + nodes.push(listValidator.get(i).node as BranchNodeStruct); + } + + // this does not create validator tree every time, and it compute roots in batch + itBench({ + id: "ValidatorViewDU hashTreeRoot", + beforeEach: () => { + for (let i = 0; i < listValidator.length; i++) { + listValidator.get(i).exitEpoch = 20242024; + } + }, + fn: () => { + listValidator.commit(); + }, + }) + + + // this needs to create validator tree every time + itBench({ + id: "ContainerNodeStructViewDU hashTreeRoot", + beforeEach: () => { + for (const node of nodes) { + node.value.exitEpoch = 20242024; + node.h0 = null as unknown as number; + } + }, + fn: () => { + for (const node of nodes) { + node.root; + } + }, + }); +}); From b68f729dc066da370108e7a530dd70efe0177faa Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Mon, 1 Jul 2024 15:18:12 +0700 Subject: [PATCH 068/113] fix: lint --- packages/persistent-merkle-tree/package.json | 1 + .../src/hasher/as-sha256.ts | 15 ++++- .../src/hasher/hashtree.ts | 10 ++-- .../src/hasher/noble.ts | 2 +- .../src/hasher/types.ts | 2 +- packages/persistent-merkle-tree/src/tree.ts | 3 +- packages/ssz/src/type/byteArray.ts | 4 +- packages/ssz/src/type/containerNodeStruct.ts | 6 +- packages/ssz/src/type/optional.ts | 4 +- packages/ssz/src/type/union.ts | 4 +- packages/ssz/src/viewDU/arrayBasic.ts | 10 +++- packages/ssz/src/viewDU/container.ts | 10 +++- .../lodestarTypes/phase0/listValidator.ts | 12 ++-- .../test/lodestarTypes/phase0/validator.ts | 2 +- .../phase0/viewDU/listValidator.ts | 16 +++--- .../lodestarTypes/phase0/viewDU/validator.ts | 55 ++++++++++--------- .../ssz/test/perf/eth2/hashTreeRoot.test.ts | 2 +- .../ssz/test/perf/eth2/validators.test.ts | 7 +-- .../phase0/listValidator.test.ts | 39 +++++++------ .../lodestarTypes/phase0/validator.test.ts | 2 +- .../phase0/viewDU/validatorNodeStruct.test.ts | 24 ++------ 21 files changed, 127 insertions(+), 103 deletions(-) diff --git a/packages/persistent-merkle-tree/package.json b/packages/persistent-merkle-tree/package.json index f44d77af..5702c9d2 100644 --- a/packages/persistent-merkle-tree/package.json +++ b/packages/persistent-merkle-tree/package.json @@ -20,6 +20,7 @@ "clean": "rm -rf lib", "build": "tsc", "lint": "eslint --color --ext .ts src/", + "lint:fix": "yarn run lint --fix", "benchmark:files": "node --max-old-space-size=4096 --expose-gc -r ts-node/register ../../node_modules/.bin/benchmark", "benchmark": "yarn benchmark:files 'test/perf/*.test.ts'", "benchmark:local": "yarn benchmark --local", diff --git a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts index 3c91aa4d..5e356ada 100644 --- a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts +++ b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts @@ -1,4 +1,11 @@ -import {digest2Bytes32, digest64HashObjectsInto, digest64HashObjects, HashObject, batchHash4HashObjectInputs, hashInto} from "@chainsafe/as-sha256"; +import { + digest2Bytes32, + digest64HashObjectsInto, + digest64HashObjects, + HashObject, + batchHash4HashObjectInputs, + hashInto, +} from "@chainsafe/as-sha256"; import type {Hasher} from "./types"; import {HashComputation, Node} from "../node"; @@ -23,7 +30,9 @@ export const hasher: Hasher = { throw new Error(`Invalid nLevel, expect to be greater than 0, got ${nLevel}`); } if (inputLength % bytesInBatch !== 0) { - throw new Error(`Invalid input length, expect to be multiple of ${bytesInBatch} for nLevel ${nLevel}, got ${inputLength}`); + throw new Error( + `Invalid input length, expect to be multiple of ${bytesInBatch} for nLevel ${nLevel}, got ${inputLength}` + ); } if (inputLength > MAX_INPUT_SIZE) { throw new Error(`Invalid input length, expect to be less than ${MAX_INPUT_SIZE}, got ${inputLength}`); @@ -35,7 +44,7 @@ export const hasher: Hasher = { const hashInput = buffer.subarray(0, inputLength); const hashOutput = buffer.subarray(0, outputLength); hashInto(hashInput, hashOutput); - inputLength = outputLength + inputLength = outputLength; } // the result is unsafe as it will be modified later, consumer should save the result if needed diff --git a/packages/persistent-merkle-tree/src/hasher/hashtree.ts b/packages/persistent-merkle-tree/src/hasher/hashtree.ts index 625ca6a8..24bfdd6e 100644 --- a/packages/persistent-merkle-tree/src/hasher/hashtree.ts +++ b/packages/persistent-merkle-tree/src/hasher/hashtree.ts @@ -1,8 +1,8 @@ import {hashInto} from "@chainsafe/hashtree"; import {Hasher, HashObject} from "./types"; import {HashComputation, Node} from "../node"; -import { byteArrayToHashObject } from "@chainsafe/as-sha256"; -import { byteArrayIntoHashObject } from "@chainsafe/as-sha256/lib/hashObject"; +import {byteArrayToHashObject} from "@chainsafe/as-sha256"; +import {byteArrayIntoHashObject} from "@chainsafe/as-sha256/lib/hashObject"; /** * Best SIMD implementation is in 512 bits = 64 bytes @@ -49,7 +49,9 @@ export const hasher: Hasher = { throw new Error(`Invalid nLevel, expect to be greater than 0, got ${nLevel}`); } if (inputLength % bytesInBatch !== 0) { - throw new Error(`Invalid input length, expect to be multiple of ${bytesInBatch} for nLevel ${nLevel}, got ${inputLength}`); + throw new Error( + `Invalid input length, expect to be multiple of ${bytesInBatch} for nLevel ${nLevel}, got ${inputLength}` + ); } if (inputLength > MAX_INPUT_SIZE) { throw new Error(`Invalid input length, expect to be less than ${MAX_INPUT_SIZE}, got ${inputLength}`); @@ -179,4 +181,4 @@ function hashObjectsToUint32Array(obj1: HashObject, obj2: HashObject, arr: Uint3 arr[13] = obj2.h5; arr[14] = obj2.h6; arr[15] = obj2.h7; -} \ No newline at end of file +} diff --git a/packages/persistent-merkle-tree/src/hasher/noble.ts b/packages/persistent-merkle-tree/src/hasher/noble.ts index d8f115b0..78999e7d 100644 --- a/packages/persistent-merkle-tree/src/hasher/noble.ts +++ b/packages/persistent-merkle-tree/src/hasher/noble.ts @@ -11,7 +11,7 @@ export const hasher: Hasher = { digest64HashObjects: (left, right, parent) => { byteArrayIntoHashObject(digest64(hashObjectToUint8Array(left), hashObjectToUint8Array(right)), parent); }, - digestNLevelUnsafe(data: Uint8Array, nLevel: number): Uint8Array { + digestNLevelUnsafe(): Uint8Array { throw new Error("Not implemented"); }, batchHashObjects: (inputs: HashObject[]) => { diff --git a/packages/persistent-merkle-tree/src/hasher/types.ts b/packages/persistent-merkle-tree/src/hasher/types.ts index 64dd7993..b6591cdb 100644 --- a/packages/persistent-merkle-tree/src/hasher/types.ts +++ b/packages/persistent-merkle-tree/src/hasher/types.ts @@ -19,7 +19,7 @@ export type Hasher = { * With nLevel = 3, hash multiple of 256 bytes, return multiple of 32 bytes. * The result is unsafe as it will be overwritten by the next call. */ - digestNLevelUnsafe(data: Uint8Array, nLevel: number): Uint8Array + digestNLevelUnsafe(data: Uint8Array, nLevel: number): Uint8Array; /** * Batch hash 2 * n HashObjects, return n HashObjects output */ diff --git a/packages/persistent-merkle-tree/src/tree.ts b/packages/persistent-merkle-tree/src/tree.ts index 1f6b8ac3..1ade6129 100644 --- a/packages/persistent-merkle-tree/src/tree.ts +++ b/packages/persistent-merkle-tree/src/tree.ts @@ -1,6 +1,6 @@ import {zeroNode} from "./zeroNode"; import {Gindex, GindexBitstring, convertGindexToBitstring} from "./gindex"; -import {Node, LeafNode, BranchNode, HashComputation, HashComputationGroup, arrayAtIndex} from "./node"; +import {Node, LeafNode, BranchNode, HashComputationGroup, arrayAtIndex} from "./node"; import {createNodeFromProof, createProof, Proof, ProofInput} from "./proof"; import {createSingleProof} from "./proof/single"; @@ -799,7 +799,6 @@ export function findDiffDepthi(from: number, to: number): number { return findDiffDepthi32Bits(from, to); } - /** * Returns true if the `index` at `depth` is a left node, false if it is a right node. * diff --git a/packages/ssz/src/type/byteArray.ts b/packages/ssz/src/type/byteArray.ts index b6dbb128..48c5263f 100644 --- a/packages/ssz/src/type/byteArray.ts +++ b/packages/ssz/src/type/byteArray.ts @@ -1,4 +1,4 @@ -import {concatGindices, Gindex, HashComputationGroup, Node, toGindex, Tree} from "@chainsafe/persistent-merkle-tree"; +import {concatGindices, Gindex, Node, toGindex, Tree} from "@chainsafe/persistent-merkle-tree"; import {fromHexString, toHexString, byteArrayEquals} from "../util/byteArray"; import {splitIntoRootChunks} from "../util/merkleize"; import {ByteViews} from "./abstract"; @@ -38,7 +38,7 @@ export abstract class ByteArrayType extends CompositeType return new BranchNodeStruct(this.valueToTree.bind(this), value); } - private valueToTree( - value: ValueOfFields, - ): Node { + private valueToTree(value: ValueOfFields): Node { const uint8Array = new Uint8Array(this.value_serializedSize(value)); const dataView = new DataView(uint8Array.buffer, uint8Array.byteOffset, uint8Array.byteLength); this.value_serializeToBytes({uint8Array, dataView}, 0, value); diff --git a/packages/ssz/src/type/optional.ts b/packages/ssz/src/type/optional.ts index d2d8fb0c..c82df001 100644 --- a/packages/ssz/src/type/optional.ts +++ b/packages/ssz/src/type/optional.ts @@ -1,4 +1,4 @@ -import {concatGindices, Gindex, HashComputationGroup, Node, Tree, zeroNode} from "@chainsafe/persistent-merkle-tree"; +import {concatGindices, Gindex, Node, Tree, zeroNode} from "@chainsafe/persistent-merkle-tree"; import {mixInLength} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; @@ -76,7 +76,7 @@ export class OptionalType> extends CompositeTy // TODO add an OptionalViewDU // TODO - batch - commitViewDU(view: ValueOfType, hashComps: HashComputationGroup | null = null): Node { + commitViewDU(view: ValueOfType): Node { return this.value_toTree(view); } diff --git a/packages/ssz/src/type/union.ts b/packages/ssz/src/type/union.ts index ac60e1b7..5bd664e0 100644 --- a/packages/ssz/src/type/union.ts +++ b/packages/ssz/src/type/union.ts @@ -1,4 +1,4 @@ -import {concatGindices, getNode, Gindex, HashComputationGroup, Node, Tree} from "@chainsafe/persistent-merkle-tree"; +import {concatGindices, getNode, Gindex, Node, Tree} from "@chainsafe/persistent-merkle-tree"; import {mixInLength} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; @@ -107,7 +107,7 @@ export class UnionType[]> extends CompositeType< } // TODO - batch - commitViewDU(view: ValueOfTypes, hashComps: HashComputationGroup | null = null): Node { + commitViewDU(view: ValueOfTypes): Node { return this.value_toTree(view); } diff --git a/packages/ssz/src/viewDU/arrayBasic.ts b/packages/ssz/src/viewDU/arrayBasic.ts index fee0e801..159c525a 100644 --- a/packages/ssz/src/viewDU/arrayBasic.ts +++ b/packages/ssz/src/viewDU/arrayBasic.ts @@ -1,4 +1,12 @@ -import {getHashComputations, getNodeAtDepth, getNodesAtDepth, HashComputationGroup, LeafNode, Node, setNodesAtDepth} from "@chainsafe/persistent-merkle-tree"; +import { + getHashComputations, + getNodeAtDepth, + getNodesAtDepth, + HashComputationGroup, + LeafNode, + Node, + setNodesAtDepth, +} from "@chainsafe/persistent-merkle-tree"; import {ValueOf} from "../type/abstract"; import {BasicType} from "../type/basic"; import {ArrayBasicType} from "../view/arrayBasic"; diff --git a/packages/ssz/src/viewDU/container.ts b/packages/ssz/src/viewDU/container.ts index e0c16cdd..ed01b958 100644 --- a/packages/ssz/src/viewDU/container.ts +++ b/packages/ssz/src/viewDU/container.ts @@ -1,10 +1,16 @@ -import {getHashComputations, getNodeAtDepth, HashComputationGroup, LeafNode, Node, setNodesAtDepth} from "@chainsafe/persistent-merkle-tree"; +import { + getHashComputations, + getNodeAtDepth, + HashComputationGroup, + LeafNode, + Node, + setNodesAtDepth, +} from "@chainsafe/persistent-merkle-tree"; import {ByteViews, Type} from "../type/abstract"; import {BasicType, isBasicType} from "../type/basic"; import {CompositeType, isCompositeType, CompositeTypeAny} from "../type/composite"; import {ContainerTypeGeneric} from "../view/container"; import {TreeViewDU} from "./abstract"; -import { isNullOrUndefined } from "util"; /* eslint-disable @typescript-eslint/member-ordering */ diff --git a/packages/ssz/test/lodestarTypes/phase0/listValidator.ts b/packages/ssz/test/lodestarTypes/phase0/listValidator.ts index ef189c85..66cec422 100644 --- a/packages/ssz/test/lodestarTypes/phase0/listValidator.ts +++ b/packages/ssz/test/lodestarTypes/phase0/listValidator.ts @@ -1,8 +1,8 @@ -import { ListCompositeType } from "../../../src/type/listComposite"; -import { Node } from "@chainsafe/persistent-merkle-tree"; -import { ListCompositeTreeViewDU } from "../../../src/viewDU/listComposite"; -import { ValidatorNodeStructType } from "./validator"; -import { ListValidatorTreeViewDU } from "./viewDU/listValidator"; +import {ListCompositeType} from "../../../src/type/listComposite"; +import {Node} from "@chainsafe/persistent-merkle-tree"; +import {ListCompositeTreeViewDU} from "../../../src/viewDU/listComposite"; +import {ValidatorNodeStructType} from "./validator"; +import {ListValidatorTreeViewDU} from "./viewDU/listValidator"; export class ListValidatorType extends ListCompositeType { constructor(limit: number) { @@ -12,4 +12,4 @@ export class ListValidatorType extends ListCompositeType { return new ListValidatorTreeViewDU(this, node, cache as any); } -} \ No newline at end of file +} diff --git a/packages/ssz/test/lodestarTypes/phase0/validator.ts b/packages/ssz/test/lodestarTypes/phase0/validator.ts index 33d4cc3d..758d95d7 100644 --- a/packages/ssz/test/lodestarTypes/phase0/validator.ts +++ b/packages/ssz/test/lodestarTypes/phase0/validator.ts @@ -2,7 +2,7 @@ import {ByteViews} from "../../../src/type/abstract"; import {ContainerNodeStructType} from "../../../src/type/containerNodeStruct"; import {ValueOfFields} from "../../../src/view/container"; import * as primitiveSsz from "../primitive/sszTypes"; -import { ValidatorTreeViewDU } from "./viewDU/validator"; +import {ValidatorTreeViewDU} from "./viewDU/validator"; import {Node} from "@chainsafe/persistent-merkle-tree"; const {Boolean, Bytes32, UintNum64, BLSPubkey, EpochInf} = primitiveSsz; diff --git a/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts b/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts index 941fe5de..487b8659 100644 --- a/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts +++ b/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts @@ -1,10 +1,10 @@ import {HashComputationGroup, Node, digestNLevelUnsafe, setNodesAtDepth} from "@chainsafe/persistent-merkle-tree"; -import { ListCompositeType } from "../../../../src/type/listComposite"; -import { ArrayCompositeTreeViewDUCache } from "../../../../src/viewDU/arrayComposite"; -import { ListCompositeTreeViewDU } from "../../../../src/viewDU/listComposite"; -import { ValidatorNodeStructType } from "../validator"; -import { ValidatorTreeViewDU } from "./validator"; -import { ByteViews } from "../../../../src"; +import {ListCompositeType} from "../../../../src/type/listComposite"; +import {ArrayCompositeTreeViewDUCache} from "../../../../src/viewDU/arrayComposite"; +import {ListCompositeTreeViewDU} from "../../../../src/viewDU/listComposite"; +import {ValidatorNodeStructType} from "../validator"; +import {ValidatorTreeViewDU} from "./validator"; +import {ByteViews} from "../../../../src"; /** * hashtree has a MAX_SIZE of 1024 bytes = 32 chunks @@ -71,7 +71,9 @@ export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU= 0; j--) { diff --git a/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts b/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts index 31b67f99..d606ec7f 100644 --- a/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts +++ b/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts @@ -1,13 +1,10 @@ -import { byteArrayIntoHashObject } from "@chainsafe/as-sha256"; -import { BranchNodeStruct } from "../../../../src/branchNodeStruct"; -import { ContainerTypeGeneric } from "../../../../src/view/container"; -import { TreeViewDU } from "../../../../src/viewDU/abstract"; -import { ValidatorType } from "../validator"; -import { - Node, - digestNLevelUnsafe, -} from "@chainsafe/persistent-merkle-tree"; -import { ByteViews } from "../../../../src/type/abstract"; +import {byteArrayIntoHashObject} from "@chainsafe/as-sha256"; +import {BranchNodeStruct} from "../../../../src/branchNodeStruct"; +import {ContainerTypeGeneric} from "../../../../src/view/container"; +import {TreeViewDU} from "../../../../src/viewDU/abstract"; +import {ValidatorType} from "../validator"; +import {Node, digestNLevelUnsafe} from "@chainsafe/persistent-merkle-tree"; +import {ByteViews} from "../../../../src/type/abstract"; type Validator = { pubkey: Uint8Array; withdrawalCredentials: Uint8Array; @@ -19,7 +16,6 @@ type Validator = { withdrawableEpoch: number; }; -const numFields = 8; const NUMBER_2_POW_32 = 2 ** 32; /* * Below constants are respective to their ssz type in `ValidatorType`. @@ -218,21 +214,30 @@ export class ValidatorTreeViewDU extends TreeViewDU { itBenchHashTreeRoot(sszPhase0.Attestation, getAttestation(0)); diff --git a/packages/ssz/test/perf/eth2/validators.test.ts b/packages/ssz/test/perf/eth2/validators.test.ts index 81820e74..3e15ce24 100644 --- a/packages/ssz/test/perf/eth2/validators.test.ts +++ b/packages/ssz/test/perf/eth2/validators.test.ts @@ -1,4 +1,4 @@ -import {itBench, setBenchOpts} from "@dapplion/benchmark"; +import {itBench} from "@dapplion/benchmark"; import {Validator} from "../../lodestarTypes/phase0/types"; import {ValidatorContainer, ValidatorNodeStruct, Validators} from "../../lodestarTypes/phase0/sszTypes"; import {BranchNodeStruct, CompositeViewDU} from "../../../src"; @@ -69,8 +69,7 @@ describe("ContainerNodeStructViewDU vs ValidatorViewDU hashtreeroot", () => { fn: () => { listValidator.commit(); }, - }) - + }); // this needs to create validator tree every time itBench({ @@ -81,7 +80,7 @@ describe("ContainerNodeStructViewDU vs ValidatorViewDU hashtreeroot", () => { node.h0 = null as unknown as number; } }, - fn: () => { + fn: () => { for (const node of nodes) { node.root; } diff --git a/packages/ssz/test/unit/lodestarTypes/phase0/listValidator.test.ts b/packages/ssz/test/unit/lodestarTypes/phase0/listValidator.test.ts index 438ebe7f..6602afed 100644 --- a/packages/ssz/test/unit/lodestarTypes/phase0/listValidator.test.ts +++ b/packages/ssz/test/unit/lodestarTypes/phase0/listValidator.test.ts @@ -1,12 +1,10 @@ -import { ListCompositeType } from "../../../../src/type/listComposite"; -import { ValidatorType } from "../../../lodestarTypes/phase0/validator"; -import { - preset, -} from "../../../lodestarTypes/params"; -import { ssz } from "../../../lodestarTypes"; -import { expect } from "chai"; -import { ContainerType } from "../../../../src/type/container"; -import { Validator } from "../../../lodestarTypes/phase0"; +import {ListCompositeType} from "../../../../src/type/listComposite"; +import {ValidatorType} from "../../../lodestarTypes/phase0/validator"; +import {preset} from "../../../lodestarTypes/params"; +import {ssz} from "../../../lodestarTypes"; +import {expect} from "chai"; +import {ContainerType} from "../../../../src/type/container"; +import {Validator} from "../../../lodestarTypes/phase0"; const {VALIDATOR_REGISTRY_LIMIT} = preset; describe("ListValidator ssz type", function () { @@ -25,8 +23,11 @@ describe("ListValidator ssz type", function () { const ValidatorContainer = new ContainerType(ValidatorType, {typeName: "Validator", jsonCase: "eth2"}); const oldValidatorsType = new ListCompositeType(ValidatorContainer, VALIDATOR_REGISTRY_LIMIT); for (const numValidators of testCases) { - it (`should commit ${numValidators} validators`, () => { - const validators = Array.from({length: numValidators}, (_, i) => ({...seedValidator, withdrawableEpoch: seedValidator.withdrawableEpoch + i})); + it(`should commit ${numValidators} validators`, () => { + const validators = Array.from({length: numValidators}, (_, i) => ({ + ...seedValidator, + withdrawableEpoch: seedValidator.withdrawableEpoch + i, + })); const oldViewDU = oldValidatorsType.toViewDU(validators); const newViewDU = ssz.phase0.Validators.toViewDU(validators); // modify all validators @@ -39,11 +40,14 @@ describe("ListValidator ssz type", function () { }); } - const testCases2 = [[1], [3, 5], [1,9, 7]]; + const testCases2 = [[1], [3, 5], [1, 9, 7]]; const numValidator = 33; for (const modifiedIndices of testCases2) { it(`should modify ${modifiedIndices.length} validators`, () => { - const validators = Array.from({length: numValidator}, (_, i) => ({...seedValidator, withdrawableEpoch: seedValidator.withdrawableEpoch + i})); + const validators = Array.from({length: numValidator}, (_, i) => ({ + ...seedValidator, + withdrawableEpoch: seedValidator.withdrawableEpoch + i, + })); const oldViewDU = oldValidatorsType.toViewDU(validators); const newViewDU = ssz.phase0.Validators.toViewDU(validators); for (const index of modifiedIndices) { @@ -52,13 +56,16 @@ describe("ListValidator ssz type", function () { } expect(newViewDU.hashTreeRoot()).to.be.deep.equal(oldViewDU.hashTreeRoot()); expect(newViewDU.serialize()).to.be.deep.equal(oldViewDU.serialize()); - }) + }); } const testCases3 = [1, 3, 5, 7]; for (const numPush of testCases3) { it(`should push ${numPush} validators`, () => { - const validators = Array.from({length: numValidator}, (_, i) => ({...seedValidator, withdrawableEpoch: seedValidator.withdrawableEpoch + i})); + const validators = Array.from({length: numValidator}, (_, i) => ({ + ...seedValidator, + withdrawableEpoch: seedValidator.withdrawableEpoch + i, + })); const oldViewDU = oldValidatorsType.toViewDU(validators); const newViewDU = ssz.phase0.Validators.toViewDU(validators); const newValidators: Validator[] = []; @@ -77,6 +84,6 @@ describe("ListValidator ssz type", function () { for (let i = 0; i < numPush; i++) { expect(allValidators[numValidator + i]).to.be.deep.equal(newValidators[i]); } - }) + }); } }); diff --git a/packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts b/packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts index 04beee42..4ecfc8e0 100644 --- a/packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts +++ b/packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts @@ -3,7 +3,7 @@ import {ContainerType} from "../../../../../ssz/src/type/container"; import {ssz} from "../../../lodestarTypes"; import {ValidatorType} from "../../../lodestarTypes/phase0/validator"; import {ValidatorTreeViewDU} from "../../../lodestarTypes/phase0/viewDU/validator"; -import { expect } from "chai"; +import {expect} from "chai"; const ValidatorContainer = new ContainerType(ValidatorType, {typeName: "Validator", jsonCase: "eth2"}); diff --git a/packages/ssz/test/unit/lodestarTypes/phase0/viewDU/validatorNodeStruct.test.ts b/packages/ssz/test/unit/lodestarTypes/phase0/viewDU/validatorNodeStruct.test.ts index 96085875..c2a43333 100644 --- a/packages/ssz/test/unit/lodestarTypes/phase0/viewDU/validatorNodeStruct.test.ts +++ b/packages/ssz/test/unit/lodestarTypes/phase0/viewDU/validatorNodeStruct.test.ts @@ -1,9 +1,8 @@ -import { digestNLevelUnsafe } from "@chainsafe/persistent-merkle-tree"; -import { validatorToMerkleBytes } from "../../../../lodestarTypes/phase0/viewDU/validator"; -import { HashObject } from "@chainsafe/as-sha256"; -import { ValidatorNodeStruct } from "../../../../lodestarTypes/phase0/validator"; -import { expect } from "chai"; -import { Validator } from "../../../../lodestarTypes/phase0/sszTypes"; +import {digestNLevelUnsafe} from "@chainsafe/persistent-merkle-tree"; +import {validatorToMerkleBytes} from "../../../../lodestarTypes/phase0/viewDU/validator"; +import {ValidatorNodeStruct} from "../../../../lodestarTypes/phase0/validator"; +import {expect} from "chai"; +import {Validator} from "../../../../lodestarTypes/phase0/sszTypes"; describe("validatorNodeStruct", () => { const seedValidator = { @@ -38,16 +37,5 @@ describe("validatorNodeStruct", () => { expect(root).to.be.deep.equals(expectedRoot0); expect(root).to.be.deep.equals(expectedRootNode2.root); } - }) + }); }); - -function expectEqualNode(node1: HashObject, node2: HashObject, message: string) { - expect(node1.h0 >>> 0).to.be.equal(node2.h0 >>> 0, `${message} h0`); - expect(node1.h1 >>> 0).to.be.equal(node2.h1 >>> 0, `${message} h1`); - expect(node1.h2 >>> 0).to.be.equal(node2.h2 >>> 0, `${message} h2`); - expect(node1.h3 >>> 0).to.be.equal(node2.h3 >>> 0, `${message} h3`); - expect(node1.h4 >>> 0).to.be.equal(node2.h4 >>> 0, `${message} h4`); - expect(node1.h5 >>> 0).to.be.equal(node2.h5 >>> 0, `${message} h5`); - expect(node1.h6 >>> 0).to.be.equal(node2.h6 >>> 0, `${message} h6`); - expect(node1.h7 >>> 0).to.be.equal(node2.h7 >>> 0, `${message} h7`); -} From 80d3de2a2fbf1f399779a9d1828d50d685f5fd68 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Tue, 2 Jul 2024 09:30:39 +0700 Subject: [PATCH 069/113] feat: implement merkleizeInto() for as-sha256 and hashtree --- .../src/hasher/as-sha256.ts | 33 +++++++++++++++++ .../src/hasher/hashtree.ts | 37 ++++++++++++++++++- .../src/hasher/index.ts | 4 ++ .../src/hasher/noble.ts | 3 ++ .../src/hasher/types.ts | 6 +++ .../persistent-merkle-tree/src/zeroHash.ts | 15 ++++++++ .../test/unit/hasher.test.ts | 28 ++++++++++++++ 7 files changed, 125 insertions(+), 1 deletion(-) create mode 100644 packages/persistent-merkle-tree/src/zeroHash.ts diff --git a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts index 5e356ada..4657cb12 100644 --- a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts +++ b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts @@ -8,6 +8,7 @@ import { } from "@chainsafe/as-sha256"; import type {Hasher} from "./types"; import {HashComputation, Node} from "../node"; +import { zeroHash } from "../zeroHash"; // each validator needs to digest 8 chunks of 32 bytes = 4 hashes // support up to 4 validators @@ -19,6 +20,38 @@ export const hasher: Hasher = { name: "as-sha256", digest64: digest2Bytes32, digest64HashObjects: digest64HashObjectsInto, + merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { + if (padFor < 1) { + throw new Error(`Invalid padFor, expect to be greater than 0, got ${padFor}`); + } + + if (data.length % 64 !== 0) { + throw new Error(`Invalid input length, expect to be multiple of 64 bytes, got ${data.length}`); + } + + const layerCount = padFor <= 1 ? 1 : Math.ceil(Math.log2(padFor)); + let inputLength = data.length; + let outputLength = Math.floor(inputLength / 2); + let bufferIn = data; + // hash into the same buffer + for (let i = 0; i < layerCount; i++) { + const bufferOut = data.subarray(0, outputLength); + hashInto(bufferIn, bufferOut); + const chunkCount = Math.floor(outputLength / 32); + if (chunkCount % 2 === 1 && i < layerCount - 1) { + // extend to 1 more chunk + inputLength = outputLength + 32; + bufferIn = data.subarray(0, inputLength); + bufferIn.set(zeroHash(i + 1), outputLength); + } else { + bufferIn = bufferOut; + inputLength = outputLength; + } + outputLength = Math.floor(inputLength / 2); + } + + output.set(bufferIn.subarray(0, 32), offset); + }, // given nLevel = 3 // digest multiple of 8 chunks = 256 bytes // the result is multiple of 1 chunk = 32 bytes diff --git a/packages/persistent-merkle-tree/src/hasher/hashtree.ts b/packages/persistent-merkle-tree/src/hasher/hashtree.ts index 24bfdd6e..8d11eccb 100644 --- a/packages/persistent-merkle-tree/src/hasher/hashtree.ts +++ b/packages/persistent-merkle-tree/src/hasher/hashtree.ts @@ -3,6 +3,7 @@ import {Hasher, HashObject} from "./types"; import {HashComputation, Node} from "../node"; import {byteArrayToHashObject} from "@chainsafe/as-sha256"; import {byteArrayIntoHashObject} from "@chainsafe/as-sha256/lib/hashObject"; +import { zeroHash } from "../zeroHash"; /** * Best SIMD implementation is in 512 bits = 64 bytes @@ -37,11 +38,45 @@ export const hasher: Hasher = { hashInto(hash64Input, hash64Output); byteArrayIntoHashObject(hash64Output, parent); }, + // input data is unsafe because it's modified + // if its chunk count is not even, need to be appended with zero hash at layer 0 so that we don't need + // a new memory allocation here + merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { + if (padFor < 1) { + throw new Error(`Invalid padFor, expect to be greater than 0, got ${padFor}`); + } + + if (data.length % 64 !== 0) { + throw new Error(`Invalid input length, expect to be multiple of 64 bytes, got ${data.length}`); + } + + const layerCount = padFor <= 1 ? 1 : Math.ceil(Math.log2(padFor)); + let inputLength = data.length; + let outputLength = Math.floor(inputLength / 2); + let bufferIn = data; + // hash into the same buffer + for (let i = 0; i < layerCount; i++) { + const bufferOut = data.subarray(0, outputLength); + hashInto(bufferIn, bufferOut); + const chunkCount = Math.floor(outputLength / 32); + if (chunkCount % 2 === 1 && i < layerCount - 1) { + // extend to 1 more chunk + inputLength = outputLength + 32; + bufferIn = data.subarray(0, inputLength); + bufferIn.set(zeroHash(i + 1), outputLength); + } else { + bufferIn = bufferOut; + inputLength = outputLength; + } + outputLength = Math.floor(inputLength / 2); + } + + output.set(bufferIn.subarray(0, 32), offset); + }, // given nLevel = 3 // digest multiple of 8 chunks = 256 bytes // the result is multiple of 1 chunk = 32 bytes // this is the same to hashTreeRoot() of multiple validators - // TODO - batch: data, offset, length to avoid subarray call digestNLevelUnsafe(data: Uint8Array, nLevel: number): Uint8Array { let inputLength = data.length; const bytesInBatch = Math.pow(2, nLevel) * 32; diff --git a/packages/persistent-merkle-tree/src/hasher/index.ts b/packages/persistent-merkle-tree/src/hasher/index.ts index 13fb6a7c..73614951 100644 --- a/packages/persistent-merkle-tree/src/hasher/index.ts +++ b/packages/persistent-merkle-tree/src/hasher/index.ts @@ -34,3 +34,7 @@ export function digest64(a: Uint8Array, b: Uint8Array): Uint8Array { export function digestNLevelUnsafe(data: Uint8Array, nLevel: number): Uint8Array { return hasher.digestNLevelUnsafe(data, nLevel); } + +export function merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { + hasher.merkleizeInto(data, padFor, output, offset); +} diff --git a/packages/persistent-merkle-tree/src/hasher/noble.ts b/packages/persistent-merkle-tree/src/hasher/noble.ts index 78999e7d..fde458f4 100644 --- a/packages/persistent-merkle-tree/src/hasher/noble.ts +++ b/packages/persistent-merkle-tree/src/hasher/noble.ts @@ -11,6 +11,9 @@ export const hasher: Hasher = { digest64HashObjects: (left, right, parent) => { byteArrayIntoHashObject(digest64(hashObjectToUint8Array(left), hashObjectToUint8Array(right)), parent); }, + merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { + throw new Error("Not implemented"); + }, digestNLevelUnsafe(): Uint8Array { throw new Error("Not implemented"); }, diff --git a/packages/persistent-merkle-tree/src/hasher/types.ts b/packages/persistent-merkle-tree/src/hasher/types.ts index b6591cdb..558ef6fd 100644 --- a/packages/persistent-merkle-tree/src/hasher/types.ts +++ b/packages/persistent-merkle-tree/src/hasher/types.ts @@ -14,6 +14,12 @@ export type Hasher = { * Hash two 32-byte HashObjects */ digest64HashObjects(left: HashObject, right: HashObject, parent: HashObject): void; + /** + * Merkleize n chunk of data, 32 bytes each + * padFor is maxChunkCount, use it to compute layers to hash + * data is mutated after the function + */ + merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Array, offset: number): void; /** * Hash multiple chunks (1 chunk = 32 bytes) at multiple levels * With nLevel = 3, hash multiple of 256 bytes, return multiple of 32 bytes. diff --git a/packages/persistent-merkle-tree/src/zeroHash.ts b/packages/persistent-merkle-tree/src/zeroHash.ts new file mode 100644 index 00000000..536c6096 --- /dev/null +++ b/packages/persistent-merkle-tree/src/zeroHash.ts @@ -0,0 +1,15 @@ +// TODO - batch: deduplicate to ssz +// use as-sh256 hasher here instead of using hasher variable because this is used inside hasher itself +import {digest2Bytes32} from "@chainsafe/as-sha256"; + +// create array of "zero hashes", successively hashed zero chunks +const zeroHashes = [new Uint8Array(32)]; + +export function zeroHash(depth: number): Uint8Array { + if (depth >= zeroHashes.length) { + for (let i = zeroHashes.length; i <= depth; i++) { + zeroHashes[i] = digest2Bytes32(zeroHashes[i - 1], zeroHashes[i - 1]); + } + } + return zeroHashes[depth]; +} diff --git a/packages/persistent-merkle-tree/test/unit/hasher.test.ts b/packages/persistent-merkle-tree/test/unit/hasher.test.ts index afc75186..324d797a 100644 --- a/packages/persistent-merkle-tree/test/unit/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/unit/hasher.test.ts @@ -6,6 +6,8 @@ import {hasher as hashtreeHasher} from "../../src/hasher/hashtree"; import {linspace} from "../utils/misc"; import {buildComparisonTrees} from "../utils/tree"; import {HashObject, LeafNode, subtreeFillToContents} from "../../src"; +import { expect } from "chai"; +import { zeroHash } from "../../src/zeroHash"; const hashers = [hashtreeHasher, asSha256Hasher, nobleHasher]; @@ -99,4 +101,30 @@ describe("hasher.digestNLevelUnsafe", function () { } }); +describe("hasher.merkleizeInto", function () { + const numNodes = [5, 6, 7, 8]; + for (const hasher of [hashtreeHasher, asSha256Hasher]) { + it (`${hasher.name} should throw error if not multiple of 64 bytes`, () => { + const data = Buffer.alloc(63, 0); + const output = Buffer.alloc(32); + expect(() => hasher.merkleizeInto(data, 1, output, 0)).to.throw("Invalid input length"); + }); + + for (const numNode of numNodes) { + it(`${hasher.name}.merkleizeInto for ${numNode} nodes`, () => { + + const nodes = Array.from({length: numNode}, (_, i) => LeafNode.fromRoot(Buffer.alloc(32, i))); + const data = Buffer.concat(nodes.map((node) => node.root)); + const output = Buffer.alloc(32); + const maxChunkCount = 8; + const padData = numNode % 2 === 1 ? Buffer.concat([data, zeroHash(0)]) : data; + hasher.merkleizeInto(padData, maxChunkCount, output, 0); + const depth = Math.ceil(Math.log2(maxChunkCount)); + const root = subtreeFillToContents(nodes, depth).root; + expectEqualHex(output, root); + }); + } + } +}); + // TODO - batch: test more methods From f1b5483b763fb377925bb1f126a988dc6b1bf6c0 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Wed, 3 Jul 2024 16:55:28 +0700 Subject: [PATCH 070/113] fix: deduplicate merkleizeInto() implementations --- .../src/hasher/as-sha256.ts | 34 ++------------ .../src/hasher/hashtree.ts | 36 ++------------ .../src/hasher/noble.ts | 2 +- .../persistent-merkle-tree/src/hasher/util.ts | 47 +++++++++++++++++++ .../test/unit/hasher.test.ts | 2 +- 5 files changed, 56 insertions(+), 65 deletions(-) diff --git a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts index 4657cb12..b9aeef85 100644 --- a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts +++ b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts @@ -8,7 +8,7 @@ import { } from "@chainsafe/as-sha256"; import type {Hasher} from "./types"; import {HashComputation, Node} from "../node"; -import { zeroHash } from "../zeroHash"; +import {merkleize} from "./util"; // each validator needs to digest 8 chunks of 32 bytes = 4 hashes // support up to 4 validators @@ -20,37 +20,9 @@ export const hasher: Hasher = { name: "as-sha256", digest64: digest2Bytes32, digest64HashObjects: digest64HashObjectsInto, + // TODO - batch: deduplicate with hashtree merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { - if (padFor < 1) { - throw new Error(`Invalid padFor, expect to be greater than 0, got ${padFor}`); - } - - if (data.length % 64 !== 0) { - throw new Error(`Invalid input length, expect to be multiple of 64 bytes, got ${data.length}`); - } - - const layerCount = padFor <= 1 ? 1 : Math.ceil(Math.log2(padFor)); - let inputLength = data.length; - let outputLength = Math.floor(inputLength / 2); - let bufferIn = data; - // hash into the same buffer - for (let i = 0; i < layerCount; i++) { - const bufferOut = data.subarray(0, outputLength); - hashInto(bufferIn, bufferOut); - const chunkCount = Math.floor(outputLength / 32); - if (chunkCount % 2 === 1 && i < layerCount - 1) { - // extend to 1 more chunk - inputLength = outputLength + 32; - bufferIn = data.subarray(0, inputLength); - bufferIn.set(zeroHash(i + 1), outputLength); - } else { - bufferIn = bufferOut; - inputLength = outputLength; - } - outputLength = Math.floor(inputLength / 2); - } - - output.set(bufferIn.subarray(0, 32), offset); + return merkleize(data, padFor, output, offset, hashInto); }, // given nLevel = 3 // digest multiple of 8 chunks = 256 bytes diff --git a/packages/persistent-merkle-tree/src/hasher/hashtree.ts b/packages/persistent-merkle-tree/src/hasher/hashtree.ts index 8d11eccb..5a99c7b2 100644 --- a/packages/persistent-merkle-tree/src/hasher/hashtree.ts +++ b/packages/persistent-merkle-tree/src/hasher/hashtree.ts @@ -3,7 +3,7 @@ import {Hasher, HashObject} from "./types"; import {HashComputation, Node} from "../node"; import {byteArrayToHashObject} from "@chainsafe/as-sha256"; import {byteArrayIntoHashObject} from "@chainsafe/as-sha256/lib/hashObject"; -import { zeroHash } from "../zeroHash"; +import {merkleize} from "./util"; /** * Best SIMD implementation is in 512 bits = 64 bytes @@ -40,38 +40,10 @@ export const hasher: Hasher = { }, // input data is unsafe because it's modified // if its chunk count is not even, need to be appended with zero hash at layer 0 so that we don't need - // a new memory allocation here + // a new memory allocation here (even through we don't need it if padFor = 1) + // TODO - batch: deduplicate with as-sha256 merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { - if (padFor < 1) { - throw new Error(`Invalid padFor, expect to be greater than 0, got ${padFor}`); - } - - if (data.length % 64 !== 0) { - throw new Error(`Invalid input length, expect to be multiple of 64 bytes, got ${data.length}`); - } - - const layerCount = padFor <= 1 ? 1 : Math.ceil(Math.log2(padFor)); - let inputLength = data.length; - let outputLength = Math.floor(inputLength / 2); - let bufferIn = data; - // hash into the same buffer - for (let i = 0; i < layerCount; i++) { - const bufferOut = data.subarray(0, outputLength); - hashInto(bufferIn, bufferOut); - const chunkCount = Math.floor(outputLength / 32); - if (chunkCount % 2 === 1 && i < layerCount - 1) { - // extend to 1 more chunk - inputLength = outputLength + 32; - bufferIn = data.subarray(0, inputLength); - bufferIn.set(zeroHash(i + 1), outputLength); - } else { - bufferIn = bufferOut; - inputLength = outputLength; - } - outputLength = Math.floor(inputLength / 2); - } - - output.set(bufferIn.subarray(0, 32), offset); + return merkleize(data, padFor, output, offset, hashInto); }, // given nLevel = 3 // digest multiple of 8 chunks = 256 bytes diff --git a/packages/persistent-merkle-tree/src/hasher/noble.ts b/packages/persistent-merkle-tree/src/hasher/noble.ts index fde458f4..da2938b3 100644 --- a/packages/persistent-merkle-tree/src/hasher/noble.ts +++ b/packages/persistent-merkle-tree/src/hasher/noble.ts @@ -11,7 +11,7 @@ export const hasher: Hasher = { digest64HashObjects: (left, right, parent) => { byteArrayIntoHashObject(digest64(hashObjectToUint8Array(left), hashObjectToUint8Array(right)), parent); }, - merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { + merkleizeInto(): void { throw new Error("Not implemented"); }, digestNLevelUnsafe(): Uint8Array { diff --git a/packages/persistent-merkle-tree/src/hasher/util.ts b/packages/persistent-merkle-tree/src/hasher/util.ts index 7f3f45ee..7632691d 100644 --- a/packages/persistent-merkle-tree/src/hasher/util.ts +++ b/packages/persistent-merkle-tree/src/hasher/util.ts @@ -1,4 +1,5 @@ import {byteArrayToHashObject, HashObject, hashObjectToByteArray} from "@chainsafe/as-sha256/lib/hashObject"; +import {zeroHash} from "../zeroHash"; export function hashObjectToUint8Array(obj: HashObject): Uint8Array { const byteArr = new Uint8Array(32); @@ -9,3 +10,49 @@ export function hashObjectToUint8Array(obj: HashObject): Uint8Array { export function uint8ArrayToHashObject(byteArr: Uint8Array): HashObject { return byteArrayToHashObject(byteArr, 0); } + +type HashIntoFn = (input: Uint8Array, output: Uint8Array) => void; + +export function merkleize( + data: Uint8Array, + padFor: number, + output: Uint8Array, + offset: number, + hashInto: HashIntoFn +): void { + if (padFor < 1) { + throw new Error(`Invalid padFor, expect to be greater than 0, got ${padFor}`); + } + + if (data.length < 32) { + throw new Error(`Invalid input length, expect to be at least 32 bytes, got ${data.length}`); + } + + // if padFor = 1, only need 32 bytes + if (padFor > 1 && data.length % 64 !== 0) { + throw new Error(`Invalid input length, expect to be multiple of 64 bytes, got ${data.length}, padFor=${padFor}`); + } + + const layerCount = Math.ceil(Math.log2(padFor)); + let inputLength = data.length; + let outputLength = Math.floor(inputLength / 2); + let bufferIn = data; + // hash into the same buffer + for (let i = 0; i < layerCount; i++) { + const bufferOut = data.subarray(0, outputLength); + hashInto(bufferIn, bufferOut); + const chunkCount = Math.floor(outputLength / 32); + if (chunkCount % 2 === 1 && i < layerCount - 1) { + // extend to 1 more chunk + inputLength = outputLength + 32; + bufferIn = data.subarray(0, inputLength); + bufferIn.set(zeroHash(i + 1), outputLength); + } else { + bufferIn = bufferOut; + inputLength = outputLength; + } + outputLength = Math.floor(inputLength / 2); + } + + output.set(bufferIn.subarray(0, 32), offset); +} diff --git a/packages/persistent-merkle-tree/test/unit/hasher.test.ts b/packages/persistent-merkle-tree/test/unit/hasher.test.ts index 324d797a..fbe468e4 100644 --- a/packages/persistent-merkle-tree/test/unit/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/unit/hasher.test.ts @@ -107,7 +107,7 @@ describe("hasher.merkleizeInto", function () { it (`${hasher.name} should throw error if not multiple of 64 bytes`, () => { const data = Buffer.alloc(63, 0); const output = Buffer.alloc(32); - expect(() => hasher.merkleizeInto(data, 1, output, 0)).to.throw("Invalid input length"); + expect(() => hasher.merkleizeInto(data, 2, output, 0)).to.throw("Invalid input length"); }); for (const numNode of numNodes) { From 743b7cff1ecb4e0acde6dec2ec9c085f816537d7 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Thu, 4 Jul 2024 09:38:10 +0700 Subject: [PATCH 071/113] fix: handle empty data in merkleizeInto() --- packages/persistent-merkle-tree/src/hasher/util.ts | 11 ++++++++--- .../persistent-merkle-tree/test/unit/hasher.test.ts | 9 +++++---- 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/packages/persistent-merkle-tree/src/hasher/util.ts b/packages/persistent-merkle-tree/src/hasher/util.ts index 7632691d..9d063e3c 100644 --- a/packages/persistent-merkle-tree/src/hasher/util.ts +++ b/packages/persistent-merkle-tree/src/hasher/util.ts @@ -24,8 +24,14 @@ export function merkleize( throw new Error(`Invalid padFor, expect to be greater than 0, got ${padFor}`); } - if (data.length < 32) { - throw new Error(`Invalid input length, expect to be at least 32 bytes, got ${data.length}`); + const layerCount = Math.ceil(Math.log2(padFor)); + if (data.length === 0) { + output.set(zeroHash(layerCount), offset); + return; + } + + if (data.length % 32 !== 0) { + throw new Error(`Invalid input length, expect to be multiple of 32 bytes, got ${data.length}`); } // if padFor = 1, only need 32 bytes @@ -33,7 +39,6 @@ export function merkleize( throw new Error(`Invalid input length, expect to be multiple of 64 bytes, got ${data.length}, padFor=${padFor}`); } - const layerCount = Math.ceil(Math.log2(padFor)); let inputLength = data.length; let outputLength = Math.floor(inputLength / 2); let bufferIn = data; diff --git a/packages/persistent-merkle-tree/test/unit/hasher.test.ts b/packages/persistent-merkle-tree/test/unit/hasher.test.ts index fbe468e4..8707578c 100644 --- a/packages/persistent-merkle-tree/test/unit/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/unit/hasher.test.ts @@ -101,8 +101,9 @@ describe("hasher.digestNLevelUnsafe", function () { } }); + describe("hasher.merkleizeInto", function () { - const numNodes = [5, 6, 7, 8]; + const numNodes = [0, 1, 2, 3, 4, 5, 6, 7, 8]; for (const hasher of [hashtreeHasher, asSha256Hasher]) { it (`${hasher.name} should throw error if not multiple of 64 bytes`, () => { const data = Buffer.alloc(63, 0); @@ -116,10 +117,10 @@ describe("hasher.merkleizeInto", function () { const nodes = Array.from({length: numNode}, (_, i) => LeafNode.fromRoot(Buffer.alloc(32, i))); const data = Buffer.concat(nodes.map((node) => node.root)); const output = Buffer.alloc(32); - const maxChunkCount = 8; + const chunkCount = Math.max(numNode, 1); const padData = numNode % 2 === 1 ? Buffer.concat([data, zeroHash(0)]) : data; - hasher.merkleizeInto(padData, maxChunkCount, output, 0); - const depth = Math.ceil(Math.log2(maxChunkCount)); + hasher.merkleizeInto(padData, chunkCount, output, 0); + const depth = Math.ceil(Math.log2(chunkCount)); const root = subtreeFillToContents(nodes, depth).root; expectEqualHex(output, root); }); From 0e064799ee144da754946ea2bb7705791074575e Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Thu, 4 Jul 2024 09:47:53 +0700 Subject: [PATCH 072/113] feat: type.hashTreeRootInto() using hasher merkleizeInto() --- packages/ssz/src/type/abstract.ts | 5 ++ packages/ssz/src/type/arrayComposite.ts | 26 +++++--- packages/ssz/src/type/basic.ts | 12 +++- packages/ssz/src/type/bitArray.ts | 13 +++- packages/ssz/src/type/bitList.ts | 29 ++++++++- packages/ssz/src/type/byteArray.ts | 25 +++++++- packages/ssz/src/type/byteList.ts | 29 ++++++++- packages/ssz/src/type/composite.ts | 33 +++++++++-- packages/ssz/src/type/container.ts | 13 ++-- packages/ssz/src/type/listBasic.ts | 59 ++++++++++++++----- packages/ssz/src/type/listComposite.ts | 52 ++++++++++++---- packages/ssz/src/type/optional.ts | 32 ++++++++-- packages/ssz/src/type/union.ts | 27 +++++++-- packages/ssz/src/type/vectorBasic.ts | 14 +++-- packages/ssz/src/type/vectorComposite.ts | 10 +++- packages/ssz/test/spec/runValidTest.ts | 9 +-- .../unit/cachePermanentRootStruct.test.ts | 6 +- packages/ssz/test/unit/load_state.test.ts | 18 ++++++ packages/ssz/test/unit/merkleize.test.ts | 36 ++++++++++- 19 files changed, 358 insertions(+), 90 deletions(-) create mode 100644 packages/ssz/test/unit/load_state.test.ts diff --git a/packages/ssz/src/type/abstract.ts b/packages/ssz/src/type/abstract.ts index b96b7355..792ca077 100644 --- a/packages/ssz/src/type/abstract.ts +++ b/packages/ssz/src/type/abstract.ts @@ -145,6 +145,11 @@ export abstract class Type { */ abstract hashTreeRoot(value: V): Uint8Array; + /** + * Same to hashTreeRoot() but here we write result to output. + */ + abstract hashTreeRootInto(value: V, output: Uint8Array, offset: number): void; + // JSON support /** Parse JSON representation of a type to value */ diff --git a/packages/ssz/src/type/arrayComposite.ts b/packages/ssz/src/type/arrayComposite.ts index d3b0a8fb..986b0e0a 100644 --- a/packages/ssz/src/type/arrayComposite.ts +++ b/packages/ssz/src/type/arrayComposite.ts @@ -211,21 +211,29 @@ export function tree_deserializeFromBytesArrayComposite>( +export function value_getChunkBytesArrayComposite>( elementType: ElementType, length: number, - value: ValueOf[] -): Uint8Array[] { - const roots = new Array(length); + value: ValueOf[], + chunkBytesBuffer: Uint8Array +): Uint8Array { + const isOddChunk = length % 2 === 1; + const chunkBytesLen = isOddChunk ? length * 32 + 32 : length * 32; + if (chunkBytesLen > chunkBytesBuffer.length) { + throw new Error(`chunkBytesBuffer is too small: ${chunkBytesBuffer.length} < ${chunkBytesLen}`); + } + const chunkBytes = chunkBytesBuffer.subarray(0, chunkBytesLen); for (let i = 0; i < length; i++) { - roots[i] = elementType.hashTreeRoot(value[i]); + elementType.hashTreeRootInto(value[i], chunkBytes, i * 32); + } + + if (isOddChunk) { + // similar to append zeroHash(0) + chunkBytes.subarray(length * 32, chunkBytesLen).fill(0); } - return roots; + return chunkBytes; } function readOffsetsArrayComposite( diff --git a/packages/ssz/src/type/basic.ts b/packages/ssz/src/type/basic.ts index 0260ea49..add1d9f1 100644 --- a/packages/ssz/src/type/basic.ts +++ b/packages/ssz/src/type/basic.ts @@ -30,11 +30,17 @@ export abstract class BasicType extends Type { } hashTreeRoot(value: V): Uint8Array { - // TODO: Optimize - const uint8Array = new Uint8Array(32); + const root = new Uint8Array(32); + this.hashTreeRootInto(value, root, 0); + return root; + } + + hashTreeRootInto(value: V, output: Uint8Array, offset: number): void { + const uint8Array = output.subarray(offset, offset + 32); + // output could have preallocated data, some types may not fill the whole 32 bytes + uint8Array.fill(0); const dataView = new DataView(uint8Array.buffer, uint8Array.byteOffset, uint8Array.byteLength); this.value_serializeToBytes({uint8Array, dataView}, 0, value); - return uint8Array; } clone(value: V): V { diff --git a/packages/ssz/src/type/bitArray.ts b/packages/ssz/src/type/bitArray.ts index 22556986..65c76b4f 100644 --- a/packages/ssz/src/type/bitArray.ts +++ b/packages/ssz/src/type/bitArray.ts @@ -1,10 +1,10 @@ import {concatGindices, Gindex, HashComputationGroup, Node, toGindex, Tree} from "@chainsafe/persistent-merkle-tree"; import {fromHexString, toHexString, byteArrayEquals} from "../util/byteArray"; -import {splitIntoRootChunks} from "../util/merkleize"; import {CompositeType, LENGTH_GINDEX} from "./composite"; import {BitArray} from "../value/bitArray"; import {BitArrayTreeView} from "../view/bitArray"; import {BitArrayTreeViewDU} from "../viewDU/bitArray"; +import {getChunkBytes} from "./byteArray"; /* eslint-disable @typescript-eslint/member-ordering */ @@ -40,8 +40,15 @@ export abstract class BitArrayType extends CompositeType this.chunkBytesBuffer.length) { + const chunkCount = Math.ceil(value.bitLen / 8 / 32); + const chunkBytes = chunkCount * 32; + // pad 1 chunk if maxChunkCount is not even + this.chunkBytesBuffer = chunkCount % 2 === 1 ? new Uint8Array(chunkBytes + 32) : new Uint8Array(chunkBytes); + } + return getChunkBytes(value.uint8Array, this.chunkBytesBuffer); } // Proofs diff --git a/packages/ssz/src/type/bitList.ts b/packages/ssz/src/type/bitList.ts index 0d8268b2..8f2bb1b1 100644 --- a/packages/ssz/src/type/bitList.ts +++ b/packages/ssz/src/type/bitList.ts @@ -1,5 +1,11 @@ -import {getNodesAtDepth, Node, packedNodeRootsToBytes, packedRootsBytesToNode} from "@chainsafe/persistent-merkle-tree"; -import {mixInLength, maxChunksToDepth} from "../util/merkleize"; +import { + getNodesAtDepth, + merkleizeInto, + Node, + packedNodeRootsToBytes, + packedRootsBytesToNode, +} from "@chainsafe/persistent-merkle-tree"; +import {maxChunksToDepth} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; import {ByteViews} from "./composite"; @@ -29,6 +35,12 @@ export class BitListType extends BitArrayType { readonly maxSize: number; readonly maxChunkCount: number; readonly isList = true; + readonly mixInLengthChunkBytes = new Uint8Array(64); + readonly mixInLengthBuffer = Buffer.from( + this.mixInLengthChunkBytes.buffer, + this.mixInLengthChunkBytes.byteOffset, + this.mixInLengthChunkBytes.byteLength + ); constructor(readonly limitBits: number, opts?: BitListOptions) { super(); @@ -101,7 +113,18 @@ export class BitListType extends BitArrayType { // Merkleization: inherited from BitArrayType hashTreeRoot(value: BitArray): Uint8Array { - return mixInLength(super.hashTreeRoot(value), value.bitLen); + const root = new Uint8Array(32); + this.hashTreeRootInto(value, root, 0); + return root; + } + + hashTreeRootInto(value: BitArray, output: Uint8Array, offset: number): void { + super.hashTreeRootInto(value, this.mixInLengthChunkBytes, 0); + // mixInLength + this.mixInLengthBuffer.writeUIntLE(value.bitLen, 32, 6); + // one for hashTreeRoot(value), one for length + const chunkCount = 2; + merkleizeInto(this.mixInLengthChunkBytes, chunkCount, output, offset); } // Proofs: inherited from BitArrayType diff --git a/packages/ssz/src/type/byteArray.ts b/packages/ssz/src/type/byteArray.ts index 48c5263f..c8469b74 100644 --- a/packages/ssz/src/type/byteArray.ts +++ b/packages/ssz/src/type/byteArray.ts @@ -1,6 +1,5 @@ import {concatGindices, Gindex, Node, toGindex, Tree} from "@chainsafe/persistent-merkle-tree"; import {fromHexString, toHexString, byteArrayEquals} from "../util/byteArray"; -import {splitIntoRootChunks} from "../util/merkleize"; import {ByteViews} from "./abstract"; import {CompositeType, LENGTH_GINDEX} from "./composite"; @@ -78,8 +77,15 @@ export abstract class ByteArrayType extends CompositeType this.chunkBytesBuffer.length) { + const chunkCount = Math.ceil(value.length / 32); + const chunkBytes = chunkCount * 32; + // pad 1 chunk if maxChunkCount is not even + this.chunkBytesBuffer = chunkCount % 2 === 1 ? new Uint8Array(chunkBytes + 32) : new Uint8Array(chunkBytes); + } + return getChunkBytes(value, this.chunkBytesBuffer); } // Proofs @@ -143,3 +149,16 @@ export abstract class ByteArrayType extends CompositeType merkleBytesBuffer.length) { + throw new Error(`data length ${data.length} exceeds merkleBytesBuffer length ${merkleBytesBuffer.length}`); + } + + merkleBytesBuffer.set(data); + const valueLen = data.length; + const chunkByteLen = Math.ceil(valueLen / 64) * 64; + // all padding bytes must be zero, this is similar to set zeroHash(0) + merkleBytesBuffer.subarray(valueLen, chunkByteLen).fill(0); + return merkleBytesBuffer.subarray(0, chunkByteLen); +} diff --git a/packages/ssz/src/type/byteList.ts b/packages/ssz/src/type/byteList.ts index 6f12fff7..a71a9c81 100644 --- a/packages/ssz/src/type/byteList.ts +++ b/packages/ssz/src/type/byteList.ts @@ -1,5 +1,11 @@ -import {getNodesAtDepth, Node, packedNodeRootsToBytes, packedRootsBytesToNode} from "@chainsafe/persistent-merkle-tree"; -import {mixInLength, maxChunksToDepth} from "../util/merkleize"; +import { + getNodesAtDepth, + Node, + packedNodeRootsToBytes, + packedRootsBytesToNode, + merkleizeInto, +} from "@chainsafe/persistent-merkle-tree"; +import {maxChunksToDepth} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; import {addLengthNode, getChunksNodeFromRootNode, getLengthFromRootNode} from "./arrayBasic"; @@ -34,6 +40,12 @@ export class ByteListType extends ByteArrayType { readonly maxSize: number; readonly maxChunkCount: number; readonly isList = true; + readonly mixInLengthChunkBytes = new Uint8Array(64); + readonly mixInLengthBuffer = Buffer.from( + this.mixInLengthChunkBytes.buffer, + this.mixInLengthChunkBytes.byteOffset, + this.mixInLengthChunkBytes.byteLength + ); constructor(readonly limitBytes: number, opts?: ByteListOptions) { super(); @@ -89,7 +101,18 @@ export class ByteListType extends ByteArrayType { // Merkleization: inherited from ByteArrayType hashTreeRoot(value: ByteArray): Uint8Array { - return mixInLength(super.hashTreeRoot(value), value.length); + const root = new Uint8Array(32); + this.hashTreeRootInto(value, root, 0); + return root; + } + + hashTreeRootInto(value: Uint8Array, output: Uint8Array, offset: number): void { + super.hashTreeRootInto(value, this.mixInLengthChunkBytes, 0); + // mixInLength + this.mixInLengthBuffer.writeUIntLE(value.length, 32, 6); + // one for hashTreeRoot(value), one for length + const chunkCount = 2; + merkleizeInto(this.mixInLengthChunkBytes, chunkCount, output, offset); } // Proofs: inherited from BitArrayType diff --git a/packages/ssz/src/type/composite.ts b/packages/ssz/src/type/composite.ts index a2cef558..c21d7c87 100644 --- a/packages/ssz/src/type/composite.ts +++ b/packages/ssz/src/type/composite.ts @@ -8,9 +8,10 @@ import { Proof, ProofType, Tree, + merkleizeInto, } from "@chainsafe/persistent-merkle-tree"; import {byteArrayEquals} from "../util/byteArray"; -import {merkleize, symbolCachedPermanentRoot, ValueWithCachedPermanentRoot} from "../util/merkleize"; +import {symbolCachedPermanentRoot, ValueWithCachedPermanentRoot} from "../util/merkleize"; import {treePostProcessFromProofNode} from "../util/proof/treePostProcessFromProofNode"; import {Type, ByteViews, JsonPath, JsonPathProp} from "./abstract"; export {ByteViews}; @@ -59,6 +60,7 @@ export abstract class CompositeType extends Type { * Required for ContainerNodeStruct to ensure no dangerous types are constructed. */ abstract readonly isViewMutable: boolean; + protected chunkBytesBuffer = new Uint8Array(0); constructor( /** @@ -216,13 +218,29 @@ export abstract class CompositeType extends Type { } } - const root = merkleize(this.getRoots(value), this.maxChunkCount); + const root = new Uint8Array(32); + this.hashTreeRootInto(value, root, 0); + // hashTreeRootInto will cache the root if cachePermanentRootStruct is true + + return root; + } + + hashTreeRootInto(value: V, output: Uint8Array, offset: number): void { + // Return cached mutable root if any if (this.cachePermanentRootStruct) { - (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = root; + const cachedRoot = (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot]; + if (cachedRoot) { + output.set(cachedRoot, offset); + return; + } } - return root; + const merkleBytes = this.getChunkBytes(value); + merkleizeInto(merkleBytes, this.maxChunkCount, output, offset); + if (this.cachePermanentRootStruct) { + (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = output.slice(offset, 32); + } } // For debugging and testing this feature @@ -236,7 +254,12 @@ export abstract class CompositeType extends Type { // and feed those numbers directly to the hasher input with a DataView // - The return of the hasher should be customizable too, to reduce conversions from Uint8Array // to hashObject and back. - protected abstract getRoots(value: V): Uint8Array[]; + + /** + * Get merkle bytes of each value, the returned Uint8Array should be multiple of 64 bytes. + * If chunk count is not even, need to append zeroHash(0) + */ + protected abstract getChunkBytes(value: V): Uint8Array; // Proofs API diff --git a/packages/ssz/src/type/container.ts b/packages/ssz/src/type/container.ts index 57e8215b..67d6afb3 100644 --- a/packages/ssz/src/type/container.ts +++ b/packages/ssz/src/type/container.ts @@ -130,6 +130,9 @@ export class ContainerType>> extends // Refactor this constructor to allow customization without pollutin the options this.TreeView = opts?.getContainerTreeViewClass?.(this) ?? getContainerTreeViewClass(this); this.TreeViewDU = opts?.getContainerTreeViewDUClass?.(this) ?? getContainerTreeViewDUClass(this); + const fieldBytes = this.fieldsEntries.length * 32; + const chunkBytes = Math.ceil(fieldBytes / 64) * 64; + this.chunkBytesBuffer = new Uint8Array(chunkBytes); } static named>>( @@ -268,15 +271,13 @@ export class ContainerType>> extends // Merkleization - protected getRoots(struct: ValueOfFields): Uint8Array[] { - const roots = new Array(this.fieldsEntries.length); - + protected getChunkBytes(struct: ValueOfFields): Uint8Array { for (let i = 0; i < this.fieldsEntries.length; i++) { const {fieldName, fieldType} = this.fieldsEntries[i]; - roots[i] = fieldType.hashTreeRoot(struct[fieldName]); + fieldType.hashTreeRootInto(struct[fieldName], this.chunkBytesBuffer, i * 32); } - - return roots; + // remaining bytes are zeroed as we never write them + return this.chunkBytesBuffer; } // Proofs diff --git a/packages/ssz/src/type/listBasic.ts b/packages/ssz/src/type/listBasic.ts index b4c6ff9e..ed4e24c9 100644 --- a/packages/ssz/src/type/listBasic.ts +++ b/packages/ssz/src/type/listBasic.ts @@ -1,4 +1,4 @@ -import {HashComputationGroup, LeafNode, Node, Tree} from "@chainsafe/persistent-merkle-tree"; +import {HashComputationGroup, LeafNode, Node, Tree, merkleizeInto} from "@chainsafe/persistent-merkle-tree"; import {ValueOf} from "./abstract"; import {BasicType} from "./basic"; import {ByteViews} from "./composite"; @@ -10,13 +10,7 @@ import { addLengthNode, setChunksNode, } from "./arrayBasic"; -import { - mixInLength, - maxChunksToDepth, - splitIntoRootChunks, - symbolCachedPermanentRoot, - ValueWithCachedPermanentRoot, -} from "../util/merkleize"; +import {maxChunksToDepth, symbolCachedPermanentRoot, ValueWithCachedPermanentRoot} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; import {ArrayBasicType} from "../view/arrayBasic"; @@ -52,6 +46,12 @@ export class ListBasicType> readonly maxSize: number; readonly isList = true; readonly isViewMutable = true; + readonly mixInLengthChunkBytes = new Uint8Array(64); + readonly mixInLengthBuffer = Buffer.from( + this.mixInLengthChunkBytes.buffer, + this.mixInLengthChunkBytes.byteOffset, + this.mixInLengthChunkBytes.byteLength + ); protected readonly defaultLen = 0; constructor(readonly elementType: ElementType, readonly limit: number, opts?: ListBasicOpts) { @@ -169,20 +169,51 @@ export class ListBasicType> } } - const root = mixInLength(super.hashTreeRoot(value), value.length); + const root = new Uint8Array(32); + this.hashTreeRootInto(value, root, 0); + + // hashTreeRootInto will cache the root if cachePermanentRootStruct is true + return root; + } + + hashTreeRootInto(value: ValueOf[], output: Uint8Array, offset: number): void { if (this.cachePermanentRootStruct) { - (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = root; + const cachedRoot = (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot]; + if (cachedRoot) { + output.set(cachedRoot, offset); + return; + } } - return root; + super.hashTreeRootInto(value, this.mixInLengthChunkBytes, 0); + // mixInLength + this.mixInLengthBuffer.writeUIntLE(value.length, 32, 6); + // one for hashTreeRoot(value), one for length + const chunkCount = 2; + merkleizeInto(this.mixInLengthChunkBytes, chunkCount, output, offset); + + if (this.cachePermanentRootStruct) { + (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = output.subarray(offset, offset + 32).slice(); + } } - protected getRoots(value: ValueOf[]): Uint8Array[] { - const uint8Array = new Uint8Array(this.value_serializedSize(value)); + protected getChunkBytes(value: ValueOf[]): Uint8Array { + const byteLen = this.value_serializedSize(value); + const chunkByteLen = Math.ceil(byteLen / 64) * 64; + // reallocate this.verkleBytes if needed + if (byteLen > this.chunkBytesBuffer.length) { + // pad 1 chunk if maxChunkCount is not even + this.chunkBytesBuffer = new Uint8Array(chunkByteLen); + } + const chunkBytes = this.chunkBytesBuffer.subarray(0, chunkByteLen); + const uint8Array = chunkBytes.subarray(0, byteLen); const dataView = new DataView(uint8Array.buffer, uint8Array.byteOffset, uint8Array.byteLength); value_serializeToBytesArrayBasic(this.elementType, value.length, {uint8Array, dataView}, 0, value); - return splitIntoRootChunks(uint8Array); + + // all padding bytes must be zero, this is similar to set zeroHash(0) + this.chunkBytesBuffer.subarray(byteLen, chunkByteLen).fill(0); + return chunkBytes; } // JSON: inherited from ArrayType diff --git a/packages/ssz/src/type/listComposite.ts b/packages/ssz/src/type/listComposite.ts index 8e97f90e..f74a3c88 100644 --- a/packages/ssz/src/type/listComposite.ts +++ b/packages/ssz/src/type/listComposite.ts @@ -1,10 +1,5 @@ -import {HashComputationGroup, Node, Tree} from "@chainsafe/persistent-merkle-tree"; -import { - mixInLength, - maxChunksToDepth, - symbolCachedPermanentRoot, - ValueWithCachedPermanentRoot, -} from "../util/merkleize"; +import {HashComputationGroup, Node, Tree, merkleizeInto} from "@chainsafe/persistent-merkle-tree"; +import {maxChunksToDepth, symbolCachedPermanentRoot, ValueWithCachedPermanentRoot} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; import {ValueOf, ByteViews} from "./abstract"; @@ -17,8 +12,8 @@ import { tree_serializedSizeArrayComposite, tree_deserializeFromBytesArrayComposite, tree_serializeToBytesArrayComposite, - value_getRootsArrayComposite, maxSizeArrayComposite, + value_getChunkBytesArrayComposite, } from "./arrayComposite"; import {ArrayCompositeType} from "../view/arrayComposite"; import {ListCompositeTreeView} from "../view/listComposite"; @@ -56,6 +51,12 @@ export class ListCompositeType< readonly maxSize: number; readonly isList = true; readonly isViewMutable = true; + readonly mixInLengthChunkBytes = new Uint8Array(64); + readonly mixInLengthBuffer = Buffer.from( + this.mixInLengthChunkBytes.buffer, + this.mixInLengthChunkBytes.byteOffset, + this.mixInLengthChunkBytes.byteLength + ); protected readonly defaultLen = 0; constructor(readonly elementType: ElementType, readonly limit: number, opts?: ListCompositeOpts) { @@ -175,17 +176,42 @@ export class ListCompositeType< } } - const root = mixInLength(super.hashTreeRoot(value), value.length); + const root = new Uint8Array(32); + this.hashTreeRootInto(value, root, 0); + + // hashTreeRootInto will cache the root if cachePermanentRootStruct is true + return root; + } + + hashTreeRootInto(value: ValueOf[], output: Uint8Array, offset: number): void { if (this.cachePermanentRootStruct) { - (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = root; + const cachedRoot = (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot]; + if (cachedRoot) { + output.set(cachedRoot, offset); + return; + } } - return root; + super.hashTreeRootInto(value, this.mixInLengthChunkBytes, 0); + // mixInLength + this.mixInLengthBuffer.writeUIntLE(value.length, 32, 6); + // one for hashTreeRoot(value), one for length + const chunkCount = 2; + merkleizeInto(this.mixInLengthChunkBytes, chunkCount, output, offset); + + if (this.cachePermanentRootStruct) { + (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = output.slice(offset, offset + 32); + } } - protected getRoots(value: ValueOf[]): Uint8Array[] { - return value_getRootsArrayComposite(this.elementType, value.length, value); + protected getChunkBytes(value: ValueOf[]): Uint8Array { + const byteLen = value.length * 32; + const chunkByteLen = this.chunkBytesBuffer.byteLength; + if (byteLen > chunkByteLen) { + this.chunkBytesBuffer = new Uint8Array(Math.ceil(byteLen / 64) * 64); + } + return value_getChunkBytesArrayComposite(this.elementType, value.length, value, this.chunkBytesBuffer); } // JSON: inherited from ArrayType diff --git a/packages/ssz/src/type/optional.ts b/packages/ssz/src/type/optional.ts index c82df001..1ed15cc9 100644 --- a/packages/ssz/src/type/optional.ts +++ b/packages/ssz/src/type/optional.ts @@ -1,5 +1,4 @@ -import {concatGindices, Gindex, Node, Tree, zeroNode} from "@chainsafe/persistent-merkle-tree"; -import {mixInLength} from "../util/merkleize"; +import {concatGindices, Gindex, merkleizeInto, Node, Tree, zeroNode} from "@chainsafe/persistent-merkle-tree"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; import {Type, ByteViews, JsonPath, JsonPathProp} from "./abstract"; @@ -34,6 +33,12 @@ export class OptionalType> extends CompositeTy readonly maxSize: number; readonly isList = true; readonly isViewMutable = true; + readonly mixInLengthChunkBytes = new Uint8Array(64); + readonly mixInLengthBuffer = Buffer.from( + this.mixInLengthChunkBytes.buffer, + this.mixInLengthChunkBytes.byteOffset, + this.mixInLengthChunkBytes.byteLength + ); constructor(readonly elementType: ElementType, opts?: OptionalOpts) { super(); @@ -46,6 +51,7 @@ export class OptionalType> extends CompositeTy this.minSize = 0; // Max size includes prepended 0x01 byte this.maxSize = elementType.maxSize + 1; + this.chunkBytesBuffer = new Uint8Array(32); } static named>( @@ -155,13 +161,27 @@ export class OptionalType> extends CompositeTy // Merkleization hashTreeRoot(value: ValueOfType): Uint8Array { + const root = new Uint8Array(32); + this.hashTreeRootInto(value, root, 0); + return root; + } + + hashTreeRootInto(value: ValueOfType, output: Uint8Array, offset: number): void { + super.hashTreeRootInto(value, this.mixInLengthChunkBytes, 0); const selector = value === null ? 0 : 1; - return mixInLength(super.hashTreeRoot(value), selector); + this.mixInLengthBuffer.writeUIntLE(selector, 32, 6); + // one for hashTreeRoot(value), one for selector + const chunkCount = 2; + merkleizeInto(this.mixInLengthChunkBytes, chunkCount, output, offset); } - protected getRoots(value: ValueOfType): Uint8Array[] { - const valueRoot = value === null ? new Uint8Array(32) : this.elementType.hashTreeRoot(value); - return [valueRoot]; + protected getChunkBytes(value: ValueOfType): Uint8Array { + if (value === null) { + this.chunkBytesBuffer.fill(0); + } else { + this.elementType.hashTreeRootInto(value, this.chunkBytesBuffer, 0); + } + return this.chunkBytesBuffer; } // Proofs diff --git a/packages/ssz/src/type/union.ts b/packages/ssz/src/type/union.ts index 5bd664e0..97049eb1 100644 --- a/packages/ssz/src/type/union.ts +++ b/packages/ssz/src/type/union.ts @@ -1,5 +1,4 @@ -import {concatGindices, getNode, Gindex, Node, Tree} from "@chainsafe/persistent-merkle-tree"; -import {mixInLength} from "../util/merkleize"; +import {concatGindices, getNode, Gindex, Node, Tree, merkleizeInto} from "@chainsafe/persistent-merkle-tree"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; import {Type, ByteViews} from "./abstract"; @@ -40,6 +39,12 @@ export class UnionType[]> extends CompositeType< readonly maxSize: number; readonly isList = true; readonly isViewMutable = true; + readonly mixInLengthChunkBytes = new Uint8Array(64); + readonly mixInLengthBuffer = Buffer.from( + this.mixInLengthChunkBytes.buffer, + this.mixInLengthChunkBytes.byteOffset, + this.mixInLengthChunkBytes.byteLength + ); protected readonly maxSelector: number; @@ -77,6 +82,7 @@ export class UnionType[]> extends CompositeType< this.minSize = 1 + Math.min(...minLens); this.maxSize = 1 + Math.max(...maxLens); this.maxSelector = this.types.length - 1; + this.chunkBytesBuffer = new Uint8Array(32); } static named[]>(types: Types, opts: Require): UnionType { @@ -159,12 +165,21 @@ export class UnionType[]> extends CompositeType< // Merkleization hashTreeRoot(value: ValueOfTypes): Uint8Array { - return mixInLength(super.hashTreeRoot(value), value.selector); + const root = new Uint8Array(32); + this.hashTreeRootInto(value, root, 0); + return root; } - protected getRoots(value: ValueOfTypes): Uint8Array[] { - const valueRoot = this.types[value.selector].hashTreeRoot(value.value); - return [valueRoot]; + hashTreeRootInto(value: ValueOfTypes, output: Uint8Array, offset: number): void { + super.hashTreeRootInto(value, this.mixInLengthChunkBytes, 0); + this.mixInLengthBuffer.writeUIntLE(value.selector, 32, 6); + const chunkCount = 2; + merkleizeInto(this.mixInLengthChunkBytes, chunkCount, output, offset); + } + + protected getChunkBytes(value: ValueOfTypes): Uint8Array { + this.types[value.selector].hashTreeRootInto(value.value, this.chunkBytesBuffer, 0); + return this.chunkBytesBuffer; } // Proofs diff --git a/packages/ssz/src/type/vectorBasic.ts b/packages/ssz/src/type/vectorBasic.ts index c6b1b459..1b86ad03 100644 --- a/packages/ssz/src/type/vectorBasic.ts +++ b/packages/ssz/src/type/vectorBasic.ts @@ -1,5 +1,5 @@ import {HashComputationGroup, Node, Tree} from "@chainsafe/persistent-merkle-tree"; -import {maxChunksToDepth, splitIntoRootChunks} from "../util/merkleize"; +import {maxChunksToDepth} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; import {ValueOf, ByteViews} from "./abstract"; @@ -59,6 +59,10 @@ export class VectorBasicType> this.minSize = this.fixedSize; this.maxSize = this.fixedSize; this.defaultLen = length; + // pad 1 chunk if maxChunkCount is not even + this.chunkBytesBuffer = new Uint8Array( + this.maxChunkCount % 2 === 1 ? this.maxChunkCount * 32 + 32 : this.maxChunkCount * 32 + ); } static named>( @@ -142,11 +146,13 @@ export class VectorBasicType> // Merkleization - protected getRoots(value: ValueOf[]): Uint8Array[] { - const uint8Array = new Uint8Array(this.fixedSize); + protected getChunkBytes(value: ValueOf[]): Uint8Array { + const uint8Array = this.chunkBytesBuffer.subarray(0, this.fixedSize); const dataView = new DataView(uint8Array.buffer, uint8Array.byteOffset, uint8Array.byteLength); value_serializeToBytesArrayBasic(this.elementType, this.length, {uint8Array, dataView}, 0, value); - return splitIntoRootChunks(uint8Array); + + // remaining bytes from this.fixedSize to this.chunkBytesBuffer.length must be zeroed + return this.chunkBytesBuffer; } // JSON: inherited from ArrayType diff --git a/packages/ssz/src/type/vectorComposite.ts b/packages/ssz/src/type/vectorComposite.ts index 9f0d7f8c..af5b1d1f 100644 --- a/packages/ssz/src/type/vectorComposite.ts +++ b/packages/ssz/src/type/vectorComposite.ts @@ -11,9 +11,9 @@ import { tree_serializedSizeArrayComposite, tree_deserializeFromBytesArrayComposite, tree_serializeToBytesArrayComposite, - value_getRootsArrayComposite, maxSizeArrayComposite, minSizeArrayComposite, + value_getChunkBytesArrayComposite, } from "./arrayComposite"; import {ArrayCompositeType, ArrayCompositeTreeView} from "../view/arrayComposite"; import {ArrayCompositeTreeViewDU} from "../viewDU/arrayComposite"; @@ -65,6 +65,10 @@ export class VectorCompositeType< this.minSize = minSizeArrayComposite(elementType, length); this.maxSize = maxSizeArrayComposite(elementType, length); this.defaultLen = length; + this.chunkBytesBuffer = + this.maxChunkCount % 2 === 1 + ? new Uint8Array(this.maxChunkCount * 32 + 32) + : new Uint8Array(this.maxChunkCount * 32); } // eslint-disable-next-line @typescript-eslint/no-explicit-any @@ -149,8 +153,8 @@ export class VectorCompositeType< // Merkleization - protected getRoots(value: ValueOf[]): Uint8Array[] { - return value_getRootsArrayComposite(this.elementType, this.length, value); + protected getChunkBytes(value: ValueOf[]): Uint8Array { + return value_getChunkBytesArrayComposite(this.elementType, this.length, value, this.chunkBytesBuffer); } // JSON: inherited from ArrayType diff --git a/packages/ssz/test/spec/runValidTest.ts b/packages/ssz/test/spec/runValidTest.ts index 56307baf..0d12a4b6 100644 --- a/packages/ssz/test/spec/runValidTest.ts +++ b/packages/ssz/test/spec/runValidTest.ts @@ -101,13 +101,10 @@ export function runValidSszTest(type: Type, testData: ValidTestCaseData // 0x0000000000000000000000000000000000000000000000000000000000000000 if (process.env.RENDER_ROOTS) { if (type.isBasic) { - console.log("ROOTS Basic", toHexString(type.serialize(testDataValue))); + console.log("Chunk Bytes Basic", toHexString(type.serialize(testDataValue))); } else { - const roots = (type as CompositeType)["getRoots"](testDataValue); - console.log( - "ROOTS Composite", - roots.map((root) => toHexString(root)) - ); + const chunkBytes = (type as CompositeType)["getChunkBytes"](testDataValue); + console.log("Chunk Bytes Composite", toHexString(chunkBytes)); } } diff --git a/packages/ssz/test/unit/cachePermanentRootStruct.test.ts b/packages/ssz/test/unit/cachePermanentRootStruct.test.ts index 169557a7..1e3fc0cf 100644 --- a/packages/ssz/test/unit/cachePermanentRootStruct.test.ts +++ b/packages/ssz/test/unit/cachePermanentRootStruct.test.ts @@ -11,8 +11,10 @@ describe("cachePermanentRootStruct", () => { const root = type.hashTreeRoot(value); const root2 = type.hashTreeRoot(value); - expect(root).to.equal(root2, "Second hashTreeRoot should return the same Uint8Array"); + // previously this is the same reference, since we move to merkleizeInto() it is not anymore + // this should not be an issue anyway + expect(root).to.deep.equal(root2, "Second hashTreeRoot should return the same Uint8Array"); - expect(type["getCachedPermanentRoot"](value)).to.equal(root, "Should have cached root"); + expect(type["getCachedPermanentRoot"](value)).to.deep.equal(root, "Should have cached root"); }); }); diff --git a/packages/ssz/test/unit/load_state.test.ts b/packages/ssz/test/unit/load_state.test.ts new file mode 100644 index 00000000..cfee3a4c --- /dev/null +++ b/packages/ssz/test/unit/load_state.test.ts @@ -0,0 +1,18 @@ +import fs from "fs"; +import {BeaconState} from "../lodestarTypes/deneb/sszTypes"; +import {toHexString} from "../../src/util/byteArray"; + +describe.skip("load holesky state", function () { + this.timeout(0); + const stateFilePath = "/Users/tuyennguyen/Downloads/holesky_finalized_state.ssz"; + it("should load state from file", function () { + const stateBytes = fs.readFileSync(stateFilePath); + console.log("@@@ stateBytes", stateBytes.length); + const now = Date.now(); + const wsState = BeaconState.deserializeToViewDU(stateBytes); + console.log("@@@ got wsState slot", wsState.slot, "in", Date.now() - now, "ms"); + wsState.node.root; + // now = Date.now(); + console.log("@@@ hashTreeRoot", toHexString(wsState.hashTreeRoot()), "in", Date.now() - now, "ms"); + }); +}); diff --git a/packages/ssz/test/unit/merkleize.test.ts b/packages/ssz/test/unit/merkleize.test.ts index 6b996c8c..f728a877 100644 --- a/packages/ssz/test/unit/merkleize.test.ts +++ b/packages/ssz/test/unit/merkleize.test.ts @@ -1,5 +1,7 @@ import {expect} from "chai"; -import {bitLength, maxChunksToDepth, nextPowerOf2} from "../../src/util/merkleize"; +import {bitLength, maxChunksToDepth, merkleize, mixInLength, nextPowerOf2} from "../../src/util/merkleize"; +import {merkleizeInto, LeafNode} from "@chainsafe/persistent-merkle-tree"; +import {zeroHash} from "../../src/util/zeros"; describe("util / merkleize / bitLength", () => { const bitLengthByIndex = [0, 1, 2, 2, 3, 3, 3, 3, 4, 4]; @@ -30,3 +32,35 @@ describe("util / merkleize / nextPowerOf2", () => { }); } }); + +describe("util / merkleize / mixInLength", () => { + const root = Buffer.alloc(32, 1); + const lengths = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]; + for (const length of lengths) { + it(`mixInLength(${length})`, () => { + const mixInLengthBuffer = Buffer.alloc(64); + mixInLengthBuffer.set(root, 0); + mixInLengthBuffer.writeUIntLE(length, 32, 6); + const finalRoot = new Uint8Array(32); + merkleizeInto(mixInLengthBuffer, 2, finalRoot, 0); + const expectedRoot = mixInLength(root, length); + expect(finalRoot).to.be.deep.equal(expectedRoot); + }); + } +}); + +describe("merkleize should be equal to merkleizeInto of hasher", () => { + const numNodes = [0, 1, 2, 3, 4, 5, 6, 7, 8]; + for (const numNode of numNodes) { + it(`merkleize for ${numNode} nodes`, () => { + const nodes = Array.from({length: numNode}, (_, i) => LeafNode.fromRoot(Buffer.alloc(32, i))); + const data = Buffer.concat(nodes.map((node) => node.root)); + const padData = numNode % 2 === 1 ? Buffer.concat([data, zeroHash(0)]) : data; + const roots = nodes.map((node) => node.root); + const expectedRoot = Buffer.alloc(32); + const chunkCount = Math.max(numNode, 1); + merkleizeInto(padData, chunkCount, expectedRoot, 0); + expect(merkleize(roots, chunkCount)).to.be.deep.equal(expectedRoot); + }); + } +}); From 6c8e27ffa325d1f15a1ebc94561c31725ecae708 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Thu, 4 Jul 2024 10:26:07 +0700 Subject: [PATCH 073/113] chore: benchmark BeaconBlock hashTreeRoot() --- .../ssz/test/perf/eth2/beaconBlock.test.ts | 94 +++++++++++++++++++ 1 file changed, 94 insertions(+) create mode 100644 packages/ssz/test/perf/eth2/beaconBlock.test.ts diff --git a/packages/ssz/test/perf/eth2/beaconBlock.test.ts b/packages/ssz/test/perf/eth2/beaconBlock.test.ts new file mode 100644 index 00000000..56293a01 --- /dev/null +++ b/packages/ssz/test/perf/eth2/beaconBlock.test.ts @@ -0,0 +1,94 @@ +import {itBench, setBenchOpts} from "@dapplion/benchmark"; +import {ValueWithCachedPermanentRoot, symbolCachedPermanentRoot} from "../../../src/util/merkleize"; +import {deneb, ssz} from "../../lodestarTypes"; +import {preset} from "../../lodestarTypes/params"; +import {BitArray, toHexString} from "../../../src"; +const {MAX_ATTESTATIONS, MAX_DEPOSITS, MAX_VOLUNTARY_EXITS, MAX_BLS_TO_EXECUTION_CHANGES} = preset; + +describe("Benchmark BeaconBlock.hashTreeRoot()", function () { + setBenchOpts({ + minMs: 10_000, + }); + + const block = ssz.deneb.BeaconBlock.defaultValue(); + for (let i = 0; i < MAX_ATTESTATIONS; i++) { + block.body.attestations.push({ + aggregationBits: BitArray.fromBoolArray(Array.from({length: 64}, () => true)), + data: { + slot: 1, + index: 1, + beaconBlockRoot: Buffer.alloc(32, 1), + source: { + epoch: 1, + root: Buffer.alloc(32, 1), + }, + target: { + epoch: 1, + root: Buffer.alloc(32, 1), + }, + }, + signature: Buffer.alloc(96, 1), + }); + } + for (let i = 0; i < MAX_DEPOSITS; i++) { + block.body.deposits.push({ + proof: ssz.phase0.Deposit.fields.proof.defaultValue(), + data: { + pubkey: Buffer.alloc(48, 1), + withdrawalCredentials: Buffer.alloc(32, 1), + amount: 32 * 1e9, + signature: Buffer.alloc(96, 1), + }, + }); + } + for (let i = 0; i < MAX_VOLUNTARY_EXITS; i++) { + block.body.voluntaryExits.push({ + signature: Buffer.alloc(96, 1), + message: { + epoch: 1, + validatorIndex: 1, + }, + }); + } + // common data on mainnet as of Jun 2024 + const numTransaction = 200; + const transactionLen = 500; + for (let i = 0; i < numTransaction; i++) { + block.body.executionPayload.transactions.push(Buffer.alloc(transactionLen, 1)); + } + for (let i = 0; i < MAX_BLS_TO_EXECUTION_CHANGES; i++) { + block.body.blsToExecutionChanges.push({ + signature: Buffer.alloc(96, 1), + message: { + validatorIndex: 1, + fromBlsPubkey: Buffer.alloc(48, 1), + toExecutionAddress: Buffer.alloc(20, 1), + }, + }); + } + + const root = ssz.deneb.BeaconBlock.hashTreeRoot(block); + console.log("BeaconBlock.hashTreeRoot() root", toHexString(root)); + itBench({ + id: `Deneb BeaconBlock.hashTreeRoot(), numTransaciton=${numTransaction}`, + beforeEach: () => { + clearCachedRoots(block); + return block; + }, + fn: (block: deneb.BeaconBlock) => { + ssz.deneb.BeaconBlock.hashTreeRoot(block); + }, + }); +}); + +function clearCachedRoots(block: deneb.BeaconBlock): void { + (block as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = undefined; + (block.body as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = undefined; + const attestations = block.body.attestations; + for (const attestation of attestations) { + (attestation.data as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = undefined; + } + for (const exit of block.body.voluntaryExits) { + (exit as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = undefined; + } +} From 49846bf8cd3b141b0665d28d1564b4d680ae8f29 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Thu, 4 Jul 2024 11:15:19 +0700 Subject: [PATCH 074/113] fix: correct cached root --- packages/ssz/src/type/composite.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/ssz/src/type/composite.ts b/packages/ssz/src/type/composite.ts index c21d7c87..52fcb1d1 100644 --- a/packages/ssz/src/type/composite.ts +++ b/packages/ssz/src/type/composite.ts @@ -239,7 +239,7 @@ export abstract class CompositeType extends Type { const merkleBytes = this.getChunkBytes(value); merkleizeInto(merkleBytes, this.maxChunkCount, output, offset); if (this.cachePermanentRootStruct) { - (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = output.slice(offset, 32); + (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = output.slice(offset, offset + 32); } } From 100183303b6e18fa8d0483f8a18c14f53a3635a3 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Thu, 4 Jul 2024 15:23:34 +0700 Subject: [PATCH 075/113] fix: improve digestNLevel to hash to input data --- .../src/hasher/as-sha256.ts | 66 +-------------- .../src/hasher/hashtree.ts | 80 +------------------ .../src/hasher/index.ts | 4 +- .../src/hasher/noble.ts | 19 +---- .../src/hasher/types.ts | 6 +- .../persistent-merkle-tree/src/hasher/util.ts | 42 +++++++++- .../test/perf/hasher.test.ts | 20 ----- .../test/unit/hasher.test.ts | 20 +---- .../phase0/viewDU/listValidator.ts | 37 +++++---- .../lodestarTypes/phase0/viewDU/validator.ts | 30 ++----- .../lodestarTypes/phase0/validator.test.ts | 8 +- .../phase0/viewDU/validatorNodeStruct.test.ts | 10 +-- 12 files changed, 95 insertions(+), 247 deletions(-) diff --git a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts index b9aeef85..6a5b7fa5 100644 --- a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts +++ b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts @@ -2,80 +2,22 @@ import { digest2Bytes32, digest64HashObjectsInto, digest64HashObjects, - HashObject, batchHash4HashObjectInputs, hashInto, } from "@chainsafe/as-sha256"; import type {Hasher} from "./types"; import {HashComputation, Node} from "../node"; -import {merkleize} from "./util"; - -// each validator needs to digest 8 chunks of 32 bytes = 4 hashes -// support up to 4 validators -const MAX_HASH = 16; -const MAX_INPUT_SIZE = MAX_HASH * 64; -const buffer = new Uint8Array(MAX_INPUT_SIZE); +import {doDigestNLevel, doMerkleizeInto} from "./util"; export const hasher: Hasher = { name: "as-sha256", digest64: digest2Bytes32, digest64HashObjects: digest64HashObjectsInto, - // TODO - batch: deduplicate with hashtree merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { - return merkleize(data, padFor, output, offset, hashInto); - }, - // given nLevel = 3 - // digest multiple of 8 chunks = 256 bytes - // the result is multiple of 1 chunk = 32 bytes - // this is the same to hashTreeRoot() of multiple validators - digestNLevelUnsafe(data: Uint8Array, nLevel: number): Uint8Array { - let inputLength = data.length; - const bytesInBatch = Math.pow(2, nLevel) * 32; - if (nLevel < 1) { - throw new Error(`Invalid nLevel, expect to be greater than 0, got ${nLevel}`); - } - if (inputLength % bytesInBatch !== 0) { - throw new Error( - `Invalid input length, expect to be multiple of ${bytesInBatch} for nLevel ${nLevel}, got ${inputLength}` - ); - } - if (inputLength > MAX_INPUT_SIZE) { - throw new Error(`Invalid input length, expect to be less than ${MAX_INPUT_SIZE}, got ${inputLength}`); - } - - buffer.set(data, 0); - for (let i = nLevel; i > 0; i--) { - const outputLength = Math.floor(inputLength / 2); - const hashInput = buffer.subarray(0, inputLength); - const hashOutput = buffer.subarray(0, outputLength); - hashInto(hashInput, hashOutput); - inputLength = outputLength; - } - - // the result is unsafe as it will be modified later, consumer should save the result if needed - return buffer.subarray(0, inputLength); + return doMerkleizeInto(data, padFor, output, offset, hashInto); }, - batchHashObjects: (inputs: HashObject[]) => { - // as-sha256 uses SIMD for batch hash - if (inputs.length === 0) { - return []; - } else if (inputs.length % 2 !== 0) { - throw new Error(`Expect inputs.length to be even, got ${inputs.length}`); - } - - const batch = Math.floor(inputs.length / 8); - const outputs = new Array(); - for (let i = 0; i < batch; i++) { - const outs = batchHash4HashObjectInputs(inputs.slice(i * 8, i * 8 + 8)); - outputs.push(...outs); - } - - for (let i = batch * 8; i < inputs.length; i += 2) { - const output = digest64HashObjects(inputs[i], inputs[i + 1]); - outputs.push(output); - } - - return outputs; + digestNLevel(data: Uint8Array, nLevel: number): Uint8Array { + return doDigestNLevel(data, nLevel, hashInto); }, executeHashComputations: (hashComputations: Array) => { for (let level = hashComputations.length - 1; level >= 0; level--) { diff --git a/packages/persistent-merkle-tree/src/hasher/hashtree.ts b/packages/persistent-merkle-tree/src/hasher/hashtree.ts index 5a99c7b2..08b37bdf 100644 --- a/packages/persistent-merkle-tree/src/hasher/hashtree.ts +++ b/packages/persistent-merkle-tree/src/hasher/hashtree.ts @@ -1,9 +1,8 @@ import {hashInto} from "@chainsafe/hashtree"; import {Hasher, HashObject} from "./types"; import {HashComputation, Node} from "../node"; -import {byteArrayToHashObject} from "@chainsafe/as-sha256"; import {byteArrayIntoHashObject} from "@chainsafe/as-sha256/lib/hashObject"; -import {merkleize} from "./util"; +import {doDigestNLevel, doMerkleizeInto} from "./util"; /** * Best SIMD implementation is in 512 bits = 64 bytes @@ -38,82 +37,11 @@ export const hasher: Hasher = { hashInto(hash64Input, hash64Output); byteArrayIntoHashObject(hash64Output, parent); }, - // input data is unsafe because it's modified - // if its chunk count is not even, need to be appended with zero hash at layer 0 so that we don't need - // a new memory allocation here (even through we don't need it if padFor = 1) - // TODO - batch: deduplicate with as-sha256 merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { - return merkleize(data, padFor, output, offset, hashInto); + return doMerkleizeInto(data, padFor, output, offset, hashInto); }, - // given nLevel = 3 - // digest multiple of 8 chunks = 256 bytes - // the result is multiple of 1 chunk = 32 bytes - // this is the same to hashTreeRoot() of multiple validators - digestNLevelUnsafe(data: Uint8Array, nLevel: number): Uint8Array { - let inputLength = data.length; - const bytesInBatch = Math.pow(2, nLevel) * 32; - if (nLevel < 1) { - throw new Error(`Invalid nLevel, expect to be greater than 0, got ${nLevel}`); - } - if (inputLength % bytesInBatch !== 0) { - throw new Error( - `Invalid input length, expect to be multiple of ${bytesInBatch} for nLevel ${nLevel}, got ${inputLength}` - ); - } - if (inputLength > MAX_INPUT_SIZE) { - throw new Error(`Invalid input length, expect to be less than ${MAX_INPUT_SIZE}, got ${inputLength}`); - } - - let outputLength = Math.floor(inputLength / 2); - - uint8Input.set(data, 0); - // hash into same buffer - let bufferIn = uint8Input.subarray(0, inputLength); - for (let i = nLevel; i > 0; i--) { - const bufferOut = bufferIn.subarray(0, outputLength); - hashInto(bufferIn, bufferOut); - bufferIn = bufferOut; - inputLength = outputLength; - outputLength = Math.floor(inputLength / 2); - } - - // the result is unsafe as it will be modified later, consumer should save the result if needed - return bufferIn; - }, - // eslint-disable-next-line @typescript-eslint/no-unused-vars - batchHashObjects(inputs: HashObject[]): HashObject[] { - if (inputs.length === 0) { - return []; - } - if (inputs.length % 2 !== 0) { - throw new Error("inputs length must be even"); - } - - const batch = PARALLEL_FACTOR * 2; - const outHashObjects: HashObject[] = []; - for (const [i, hashInput] of inputs.entries()) { - const indexInBatch = i % batch; - hashObjectToUint32Array(hashInput, uint32Input, indexInBatch * 8); - if (indexInBatch === batch - 1) { - hashInto(uint8Input, uint8Output); - for (let j = 0; j < batch / 2; j++) { - outHashObjects.push(byteArrayToHashObject(uint8Output.subarray(j * 32, (j + 1) * 32))); - } - } - } - - // hash remaining - const remaining = inputs.length % batch; - if (remaining > 0) { - const remainingInput = uint8Input.subarray(0, remaining * 32); - const remainingOutput = uint8Output.subarray(0, remaining * 16); - hashInto(remainingInput, remainingOutput); - for (let i = 0; i < remaining / 2; i++) { - outHashObjects.push(byteArrayToHashObject(remainingOutput.subarray(i * 32, (i + 1) * 32))); - } - } - - return outHashObjects; + digestNLevel(data: Uint8Array, nLevel: number): Uint8Array { + return doDigestNLevel(data, nLevel, hashInto); }, executeHashComputations(hashComputations: Array): void { for (let level = hashComputations.length - 1; level >= 0; level--) { diff --git a/packages/persistent-merkle-tree/src/hasher/index.ts b/packages/persistent-merkle-tree/src/hasher/index.ts index 73614951..886312ab 100644 --- a/packages/persistent-merkle-tree/src/hasher/index.ts +++ b/packages/persistent-merkle-tree/src/hasher/index.ts @@ -31,8 +31,8 @@ export function digest64(a: Uint8Array, b: Uint8Array): Uint8Array { return hasher.digest64(a, b); } -export function digestNLevelUnsafe(data: Uint8Array, nLevel: number): Uint8Array { - return hasher.digestNLevelUnsafe(data, nLevel); +export function digestNLevel(data: Uint8Array, nLevel: number): Uint8Array { + return hasher.digestNLevel(data, nLevel); } export function merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { diff --git a/packages/persistent-merkle-tree/src/hasher/noble.ts b/packages/persistent-merkle-tree/src/hasher/noble.ts index da2938b3..1afd347e 100644 --- a/packages/persistent-merkle-tree/src/hasher/noble.ts +++ b/packages/persistent-merkle-tree/src/hasher/noble.ts @@ -1,5 +1,5 @@ import {sha256} from "@noble/hashes/sha256"; -import {digest64HashObjects, HashObject, byteArrayIntoHashObject} from "@chainsafe/as-sha256"; +import {digest64HashObjects, byteArrayIntoHashObject} from "@chainsafe/as-sha256"; import type {Hasher} from "./types"; import {hashObjectToUint8Array} from "./util"; @@ -14,24 +14,9 @@ export const hasher: Hasher = { merkleizeInto(): void { throw new Error("Not implemented"); }, - digestNLevelUnsafe(): Uint8Array { + digestNLevel(): Uint8Array { throw new Error("Not implemented"); }, - batchHashObjects: (inputs: HashObject[]) => { - // noble does not support batch hash - if (inputs.length === 0) { - return []; - } else if (inputs.length % 2 !== 0) { - throw new Error(`Expect inputs.length to be even, got ${inputs.length}`); - } - - const outputs = new Array(); - for (let i = 0; i < inputs.length; i += 2) { - const output = digest64HashObjects(inputs[i], inputs[i + 1]); - outputs.push(output); - } - return outputs; - }, executeHashComputations: (hashComputations) => { for (let level = hashComputations.length - 1; level >= 0; level--) { const hcArr = hashComputations[level]; diff --git a/packages/persistent-merkle-tree/src/hasher/types.ts b/packages/persistent-merkle-tree/src/hasher/types.ts index 558ef6fd..eb7e4342 100644 --- a/packages/persistent-merkle-tree/src/hasher/types.ts +++ b/packages/persistent-merkle-tree/src/hasher/types.ts @@ -25,11 +25,7 @@ export type Hasher = { * With nLevel = 3, hash multiple of 256 bytes, return multiple of 32 bytes. * The result is unsafe as it will be overwritten by the next call. */ - digestNLevelUnsafe(data: Uint8Array, nLevel: number): Uint8Array; - /** - * Batch hash 2 * n HashObjects, return n HashObjects output - */ - batchHashObjects(inputs: HashObject[]): HashObject[]; + digestNLevel(data: Uint8Array, nLevel: number): Uint8Array; /** * Execute a batch of HashComputations */ diff --git a/packages/persistent-merkle-tree/src/hasher/util.ts b/packages/persistent-merkle-tree/src/hasher/util.ts index 9d063e3c..a028253c 100644 --- a/packages/persistent-merkle-tree/src/hasher/util.ts +++ b/packages/persistent-merkle-tree/src/hasher/util.ts @@ -13,7 +13,13 @@ export function uint8ArrayToHashObject(byteArr: Uint8Array): HashObject { type HashIntoFn = (input: Uint8Array, output: Uint8Array) => void; -export function merkleize( +/** + * Input data is unsafe because it's modified + * If its chunk count is not even, need to be appended with zero hash at layer 0 so that we don't need + * a new memory allocation here (even through we don't need it if padFor = 1) + * The Uint8Array(32) will be written to output at offset + */ +export function doMerkleizeInto( data: Uint8Array, padFor: number, output: Uint8Array, @@ -61,3 +67,37 @@ export function merkleize( output.set(bufferIn.subarray(0, 32), offset); } + +/** + * Input data is unsafe because it's modified + * given nLevel = 3 + * digest multiple of 8 chunks = 256 bytes + * the result is multiple of 1 chunk = 32 bytes + * this is the same to hashTreeRoot() of multiple validators + */ +export function doDigestNLevel(data: Uint8Array, nLevel: number, hashInto: HashIntoFn): Uint8Array { + let inputLength = data.length; + const bytesInBatch = Math.pow(2, nLevel) * 32; + if (nLevel < 1) { + throw new Error(`Invalid nLevel, expect to be greater than 0, got ${nLevel}`); + } + if (inputLength % bytesInBatch !== 0) { + throw new Error( + `Invalid input length, expect to be multiple of ${bytesInBatch} for nLevel ${nLevel}, got ${inputLength}` + ); + } + + let outputLength = Math.floor(inputLength / 2); + + // hash into same buffer + let bufferIn = data; + for (let i = nLevel; i > 0; i--) { + const bufferOut = bufferIn.subarray(0, outputLength); + hashInto(bufferIn, bufferOut); + bufferIn = bufferOut; + inputLength = outputLength; + outputLength = Math.floor(inputLength / 2); + } + + return bufferIn; +} diff --git a/packages/persistent-merkle-tree/test/perf/hasher.test.ts b/packages/persistent-merkle-tree/test/perf/hasher.test.ts index b30b7dab..d71a0948 100644 --- a/packages/persistent-merkle-tree/test/perf/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/perf/hasher.test.ts @@ -48,26 +48,6 @@ describe("hasher", function () { runsFactor, }); - // TODO: benchmark for this test is not stable, if it runs alone it's 20% - 30% faster - itBench.skip({ - id: `batchHashObjects - ${hasher.name}`, - before: () => { - const hashObjects: HashObject[] = []; - for (let i = 0; i < iterations; i++) { - hashObjects.push(uint8ArrayToHashObject(root1)); - hashObjects.push(uint8ArrayToHashObject(root2)); - } - return hashObjects; - }, - beforeEach: (hashObjects) => hashObjects, - fn: (hashObjects: HashObject[]) => { - for (let i = 0; i < runsFactor; i++) { - hasher.batchHashObjects(hashObjects); - } - }, - runsFactor: 10, - }); - itBench({ id: `executeHashComputations - ${hasher.name}`, beforeEach: () => { diff --git a/packages/persistent-merkle-tree/test/unit/hasher.test.ts b/packages/persistent-merkle-tree/test/unit/hasher.test.ts index 8707578c..dd9e3629 100644 --- a/packages/persistent-merkle-tree/test/unit/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/unit/hasher.test.ts @@ -57,19 +57,6 @@ describe("hashers", function () { expectEqualHex(hash1, hash3); }); - it("all hashers should return the same values from batchHashObjects", () => { - const hashObjects = linspace(254) - .map((num) => Buffer.alloc(32, num)) - .map(uint8ArrayToHashObject); - const results1 = nobleHasher.batchHashObjects(hashObjects).map(hashObjectToUint8Array); - const results2 = asSha256Hasher.batchHashObjects(hashObjects).map(hashObjectToUint8Array); - const results3 = hashtreeHasher.batchHashObjects(hashObjects).map(hashObjectToUint8Array); - Object.values(results1).forEach((result1, i) => { - expectEqualHex(result1, results2[i]); - expectEqualHex(result1, results3[i]); - }); - }); - describe("all hashers should return the same values from executeHashComputations", () => { for (const hasher of hashers) { it(hasher.name, () => { @@ -82,16 +69,15 @@ describe("hashers", function () { }); }); -describe("hasher.digestNLevelUnsafe", function () { +describe("hasher.digestNLevel", function () { const hashers = [hashtreeHasher, asSha256Hasher]; for (const hasher of hashers) { const numValidators = [1, 2, 3, 4]; for (const numValidator of numValidators) { - it (`${hasher.name} digestNLevelUnsafe ${numValidator} validators = ${8 * numValidator} chunk(s)`, () => { + it (`${hasher.name} digestNLevel ${numValidator} validators = ${8 * numValidator} chunk(s)`, () => { const nodes = Array.from({length: 8 * numValidator}, (_, i) => LeafNode.fromRoot(Buffer.alloc(32, i + numValidator))); const hashInput = Buffer.concat(nodes.map((node) => node.root)); - // slice() because output is unsafe - const hashOutput = hasher.digestNLevelUnsafe(hashInput, 3).slice(); + const hashOutput = hasher.digestNLevel(hashInput, 3).slice(); for (let i = 0; i < numValidator; i++) { const root = subtreeFillToContents(nodes.slice(i * 8, (i + 1) * 8), 3).root; expectEqualHex(hashOutput.subarray(i * 32, (i + 1) * 32), root); diff --git a/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts b/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts index 487b8659..d2f045fe 100644 --- a/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts +++ b/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts @@ -1,4 +1,4 @@ -import {HashComputationGroup, Node, digestNLevelUnsafe, setNodesAtDepth} from "@chainsafe/persistent-merkle-tree"; +import {HashComputationGroup, Node, digestNLevel, setNodesAtDepth} from "@chainsafe/persistent-merkle-tree"; import {ListCompositeType} from "../../../../src/type/listComposite"; import {ArrayCompositeTreeViewDUCache} from "../../../../src/viewDU/arrayComposite"; import {ListCompositeTreeViewDU} from "../../../../src/viewDU/listComposite"; @@ -28,6 +28,15 @@ const level4BytesArr: Uint8Array[] = []; for (let i = 0; i < PARALLEL_FACTOR; i++) { level4BytesArr.push(batchLevel4Bytes.subarray(i * 2 * 32, (i + 1) * 2 * 32)); } +const pubkeyRoots: Uint8Array[] = []; +for (let i = 0; i < PARALLEL_FACTOR; i++) { + pubkeyRoots.push(batchLevel4Bytes.subarray(i * 32, (i + 1) * 32)); +} + +const validatorRoots: Uint8Array[] = []; +for (let i = 0; i < PARALLEL_FACTOR; i++) { + validatorRoots.push(batchLevel3Bytes.subarray(i * 32, (i + 1) * 32)); +} export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU { constructor( @@ -55,33 +64,29 @@ export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU= 0; j--) { const viewIndex = indicesChanged[i - j]; const indexInBatch = (i - j) % PARALLEL_FACTOR; - const validatorRoot = validatorRoots.subarray(indexInBatch * 32, (indexInBatch + 1) * 32); const viewChanged = this.viewsChanged.get(viewIndex) as ValidatorTreeViewDU; - viewChanged.commitToRoot(validatorRoot); + viewChanged.commitToRoot(validatorRoots[indexInBatch]); nodesChanged.push({index: viewIndex, node: viewChanged.node}); // Set new node in nodes array to ensure data represented in the tree and fast nodes access is equal this.nodes[viewIndex] = viewChanged.node; diff --git a/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts b/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts index d606ec7f..18ce06ed 100644 --- a/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts +++ b/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts @@ -3,7 +3,7 @@ import {BranchNodeStruct} from "../../../../src/branchNodeStruct"; import {ContainerTypeGeneric} from "../../../../src/view/container"; import {TreeViewDU} from "../../../../src/viewDU/abstract"; import {ValidatorType} from "../validator"; -import {Node, digestNLevelUnsafe} from "@chainsafe/persistent-merkle-tree"; +import {Node} from "@chainsafe/persistent-merkle-tree"; import {ByteViews} from "../../../../src/type/abstract"; type Validator = { pubkey: Uint8Array; @@ -23,11 +23,7 @@ const NUMBER_2_POW_32 = 2 ** 32; const UINT32_SIZE = 4; const CHUNK_SIZE = 32; -// validator has 8 nodes at level 3 -const singleLevel3Bytes = new Uint8Array(8 * 32); -const singleLevel3ByteView = {uint8Array: singleLevel3Bytes, dataView: new DataView(singleLevel3Bytes.buffer)}; -// validator has 2 nodes at level 4 (pubkey has 48 bytes = 2 * nodes) -const singleLevel4Bytes = new Uint8Array(2 * 32); +const temporaryRoot = new Uint8Array(32); /** * A specific ViewDU for validator designed to be efficient to batch hash and efficient to create tree @@ -56,19 +52,9 @@ export class ValidatorTreeViewDU extends TreeViewDU { const dataView = new DataView(level3.buffer, level3.byteOffset, level3.byteLength); // pubkey takes 2 chunks, has to go to another level const level4 = new Uint8Array(32 * 2); - validatorToMerkleBytes({uint8Array: level3, dataView}, level4, validator); + validatorToChunkBytes({uint8Array: level3, dataView}, level4, validator); // additional slice() call make it easier to debug - const pubkeyRoot = digestNLevelUnsafe(level4, 1).slice(); + const pubkeyRoot = digestNLevel(level4, 1).slice(); level3.set(pubkeyRoot, 0); - const root = digestNLevelUnsafe(level3, 3).slice(); + const root = digestNLevel(level3, 3).slice(); const expectedRootNode2 = Validator.value_toTree(validator); expect(root).to.be.deep.equals(expectedRoot0); expect(root).to.be.deep.equals(expectedRootNode2.root); From 59a1221ccf26bf977ccebf1dfa45afa68e78bc41 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Thu, 4 Jul 2024 15:48:34 +0700 Subject: [PATCH 076/113] feat: support offset in byteArrayIntoHashObject --- packages/persistent-merkle-tree/src/hasher/hashtree.ts | 6 +++--- packages/persistent-merkle-tree/src/hasher/noble.ts | 2 +- packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/persistent-merkle-tree/src/hasher/hashtree.ts b/packages/persistent-merkle-tree/src/hasher/hashtree.ts index 08b37bdf..e84ccb4a 100644 --- a/packages/persistent-merkle-tree/src/hasher/hashtree.ts +++ b/packages/persistent-merkle-tree/src/hasher/hashtree.ts @@ -35,7 +35,7 @@ export const hasher: Hasher = { digest64HashObjects(left: HashObject, right: HashObject, parent: HashObject): void { hashObjectsToUint32Array(left, right, uint32Input); hashInto(hash64Input, hash64Output); - byteArrayIntoHashObject(hash64Output, parent); + byteArrayIntoHashObject(hash64Output, 0, parent); }, merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { return doMerkleizeInto(data, padFor, output, offset, hashInto); @@ -66,7 +66,7 @@ export const hasher: Hasher = { if (indexInBatch === PARALLEL_FACTOR - 1) { hashInto(uint8Input, uint8Output); for (const [j, destNode] of destNodes.entries()) { - byteArrayIntoHashObject(uint8Output.subarray(j * 32, (j + 1) * 32), destNode); + byteArrayIntoHashObject(uint8Output, j * 32, destNode); } destNodes = []; } @@ -80,7 +80,7 @@ export const hasher: Hasher = { hashInto(remainingInput, remainingOutput); // destNodes was prepared above for (const [i, destNode] of destNodes.entries()) { - byteArrayIntoHashObject(remainingOutput.subarray(i * 32, (i + 1) * 32), destNode); + byteArrayIntoHashObject(remainingOutput, i * 32, destNode); } } } diff --git a/packages/persistent-merkle-tree/src/hasher/noble.ts b/packages/persistent-merkle-tree/src/hasher/noble.ts index 1afd347e..6d379006 100644 --- a/packages/persistent-merkle-tree/src/hasher/noble.ts +++ b/packages/persistent-merkle-tree/src/hasher/noble.ts @@ -9,7 +9,7 @@ export const hasher: Hasher = { name: "noble", digest64, digest64HashObjects: (left, right, parent) => { - byteArrayIntoHashObject(digest64(hashObjectToUint8Array(left), hashObjectToUint8Array(right)), parent); + byteArrayIntoHashObject(digest64(hashObjectToUint8Array(left), hashObjectToUint8Array(right)), 0, parent); }, merkleizeInto(): void { throw new Error("Not implemented"); diff --git a/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts b/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts index 18ce06ed..68bb64cd 100644 --- a/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts +++ b/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts @@ -54,7 +54,7 @@ export class ValidatorTreeViewDU extends TreeViewDU; } - byteArrayIntoHashObject(root, this._rootNode); + byteArrayIntoHashObject(root, 0, this._rootNode); this.valueChanged = null; } From e5918664a758315bbb678e704d1e59c801de7b1c Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Fri, 5 Jul 2024 09:00:51 +0700 Subject: [PATCH 077/113] feat: ssz v0.17.0 --- packages/ssz/package.json | 2 +- yarn.lock | 32 ++++++++++++++++++++++++++++++++ 2 files changed, 33 insertions(+), 1 deletion(-) diff --git a/packages/ssz/package.json b/packages/ssz/package.json index 404079fd..0f97a60f 100644 --- a/packages/ssz/package.json +++ b/packages/ssz/package.json @@ -4,7 +4,7 @@ "license": "Apache-2.0", "author": "ChainSafe Systems", "homepage": "https://github.com/chainsafe/ssz", - "version": "0.16.0", + "version": "0.17.0", "main": "lib/index.js", "files": [ "lib/**/*.d.ts", diff --git a/yarn.lock b/yarn.lock index b74bf846..466aca7e 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1252,6 +1252,38 @@ core-js "2.6.10" require-resolve "0.0.2" +"@chainsafe/hashtree-darwin-arm64@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@chainsafe/hashtree-darwin-arm64/-/hashtree-darwin-arm64-1.0.1.tgz#e2c60090c56a1c8dc8bdff329856184ad32e4cd5" + integrity sha512-+KmEgQMpO7FDL3klAcpXbQ4DPZvfCe0qSaBBrtT4vLF8V1JGm3sp+j7oibtxtOsLKz7nJMiK1pZExi7vjXu8og== + +"@chainsafe/hashtree-linux-arm64-gnu@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@chainsafe/hashtree-linux-arm64-gnu/-/hashtree-linux-arm64-gnu-1.0.1.tgz#49d2604a6c9106219448af3eaf76f4da6e44daca" + integrity sha512-p1hnhGq2aFY+Zhdn1Q6L/6yLYNKjqXfn/Pc8jiM0e3+Lf/hB+yCdqYVu1pto26BrZjugCFZfupHaL4DjUTDttw== + +"@chainsafe/hashtree-linux-x64-gnu@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@chainsafe/hashtree-linux-x64-gnu/-/hashtree-linux-x64-gnu-1.0.1.tgz#31c5a2bb196b78f04f2bf4bfb5c1bf1f3331f071" + integrity sha512-uCIGuUWuWV0LiB4KLMy6JFa7Jp6NmPl3hKF5BYWu8TzUBe7vSXMZfqTzGxXPggFYN2/0KymfRdG9iDCOJfGRqg== + +"@chainsafe/hashtree@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@chainsafe/hashtree/-/hashtree-1.0.1.tgz#587666a261e1da6a37904095ce875fddc53c7c89" + integrity sha512-bleu9FjqBeR/l6W1u2Lz+HsS0b0LLJX2eUt3hOPBN7VqOhidx8wzkVh2S7YurS+iTQtfdK4K5QU9tcTGNrGwDg== + optionalDependencies: + "@chainsafe/hashtree-darwin-arm64" "1.0.1" + "@chainsafe/hashtree-linux-arm64-gnu" "1.0.1" + "@chainsafe/hashtree-linux-x64-gnu" "1.0.1" + +"@chainsafe/ssz@0.16.0": + version "0.16.0" + resolved "https://registry.yarnpkg.com/@chainsafe/ssz/-/ssz-0.16.0.tgz#262c491ac037777a16e8d8db479da2ba27539b8d" + integrity sha512-CgTDyrkbAKvrKwHxPT5rerXAHP3NB+uOvpnN9Gn8aJ/4TGOKhOboj4131bSFUZ679uPJ6pu6391cvInuOdrglw== + dependencies: + "@chainsafe/as-sha256" "^0.4.2" + "@chainsafe/persistent-merkle-tree" "^0.7.2" + "@chainsafe/ssz@^0.15.1": version "0.15.1" resolved "https://registry.yarnpkg.com/@chainsafe/ssz/-/ssz-0.15.1.tgz#008a711c3bcdc0d207cd4be15108870b0b1c60c0" From 9779b0a3a2b0911f553ca864ff010abf70ec5289 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Sat, 6 Jul 2024 13:33:45 +0700 Subject: [PATCH 078/113] fix: only overwrite ListValidatorViewDU --- packages/ssz/src/branchNodeStruct.ts | 9 +- packages/ssz/src/type/containerNodeStruct.ts | 19 +- packages/ssz/src/view/containerNodeStruct.ts | 4 +- .../ssz/src/viewDU/containerNodeStruct.ts | 33 ++- .../test/lodestarTypes/phase0/validator.ts | 60 +++- .../phase0/viewDU/listValidator.ts | 42 ++- .../lodestarTypes/phase0/viewDU/validator.ts | 265 ------------------ .../lodestarTypes/phase0/validator.test.ts | 64 +++-- .../phase0/viewDU/validatorNodeStruct.test.ts | 41 --- 9 files changed, 167 insertions(+), 370 deletions(-) delete mode 100644 packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts delete mode 100644 packages/ssz/test/unit/lodestarTypes/phase0/viewDU/validatorNodeStruct.test.ts diff --git a/packages/ssz/src/branchNodeStruct.ts b/packages/ssz/src/branchNodeStruct.ts index 471716c4..c99779a4 100644 --- a/packages/ssz/src/branchNodeStruct.ts +++ b/packages/ssz/src/branchNodeStruct.ts @@ -9,15 +9,18 @@ import {hashObjectToUint8Array, Node} from "@chainsafe/persistent-merkle-tree"; * expensive because the tree has to be recreated every time. */ export class BranchNodeStruct extends Node { - constructor(private readonly valueToNode: (value: T) => Node, readonly value: T) { + constructor( + private readonly valueToNode: (value: T) => Node, + private readonly hashTreeRootInto: (value: T, node: Node) => void, + readonly value: T + ) { // First null value is to save an extra variable to check if a node has a root or not super(null as unknown as number, 0, 0, 0, 0, 0, 0, 0); } get rootHashObject(): HashObject { if (this.h0 === null) { - const node = this.valueToNode(this.value); - super.applyHash(node.rootHashObject); + this.hashTreeRootInto(this.value, this); } return this; } diff --git a/packages/ssz/src/type/containerNodeStruct.ts b/packages/ssz/src/type/containerNodeStruct.ts index 76147f12..c884eea9 100644 --- a/packages/ssz/src/type/containerNodeStruct.ts +++ b/packages/ssz/src/type/containerNodeStruct.ts @@ -1,4 +1,5 @@ import {Node} from "@chainsafe/persistent-merkle-tree"; +import {byteArrayIntoHashObject} from "@chainsafe/as-sha256"; import {Type, ByteViews} from "./abstract"; import {isCompositeType} from "./composite"; import {ContainerType, ContainerOptions, renderContainerTypeName} from "./container"; @@ -24,6 +25,9 @@ import {ValueOfFields} from "../view/container"; * This tradeoff is good for data that is read often, written rarely, and consumes a lot of memory (i.e. Validator) */ export class ContainerNodeStructType>> extends ContainerType { + // Temporary root to avoid allocating new Uint8Array every time + private temporaryRoot = new Uint8Array(32); + constructor(readonly fields: Fields, opts?: ContainerOptions) { super(fields, { // Overwrite default "Container" typeName @@ -70,7 +74,7 @@ export class ContainerNodeStructType tree_deserializeFromBytes(data: ByteViews, start: number, end: number): Node { const value = this.value_deserializeFromBytes(data, start, end); - return new BranchNodeStruct(this.valueToTree.bind(this), value); + return new BranchNodeStruct(this.valueToTree.bind(this), this.computeRootInto.bind(this), value); } // Proofs @@ -91,7 +95,7 @@ export class ContainerNodeStructType super.tree_serializeToBytes({uint8Array, dataView}, 0, node); const value = this.value_deserializeFromBytes({uint8Array, dataView}, 0, uint8Array.length); return { - node: new BranchNodeStruct(this.valueToTree.bind(this), value), + node: new BranchNodeStruct(this.valueToTree.bind(this), this.computeRootInto.bind(this), value), done: true, }; } @@ -103,7 +107,7 @@ export class ContainerNodeStructType } value_toTree(value: ValueOfFields): Node { - return new BranchNodeStruct(this.valueToTree.bind(this), value); + return new BranchNodeStruct(this.valueToTree.bind(this), this.computeRootInto.bind(this), value); } private valueToTree(value: ValueOfFields): Node { @@ -112,4 +116,13 @@ export class ContainerNodeStructType this.value_serializeToBytes({uint8Array, dataView}, 0, value); return super.tree_deserializeFromBytes({uint8Array, dataView}, 0, uint8Array.length); } + + private computeRootInto(value: ValueOfFields, node: Node): void { + if (node.h0 !== null) { + return; + } + + this.hashTreeRootInto(value, this.temporaryRoot, 0); + byteArrayIntoHashObject(this.temporaryRoot, 0, node); + } } diff --git a/packages/ssz/src/view/containerNodeStruct.ts b/packages/ssz/src/view/containerNodeStruct.ts index da83f4d8..c45372bc 100644 --- a/packages/ssz/src/view/containerNodeStruct.ts +++ b/packages/ssz/src/view/containerNodeStruct.ts @@ -60,7 +60,7 @@ export function getContainerTreeViewClass; - this.tree.rootNode = new BranchNodeStruct(node["valueToNode"], newNodeValue); + this.tree.rootNode = new BranchNodeStruct(node["valueToNode"], node["hashTreeRootInto"], newNodeValue); }, }); } @@ -86,7 +86,7 @@ export function getContainerTreeViewClass; - this.tree.rootNode = new BranchNodeStruct(node["valueToNode"], newNodeValue); + this.tree.rootNode = new BranchNodeStruct(node["valueToNode"], node["hashTreeRootInto"], newNodeValue); }, }); } diff --git a/packages/ssz/src/viewDU/containerNodeStruct.ts b/packages/ssz/src/viewDU/containerNodeStruct.ts index 87993801..083e1176 100644 --- a/packages/ssz/src/viewDU/containerNodeStruct.ts +++ b/packages/ssz/src/viewDU/containerNodeStruct.ts @@ -8,7 +8,7 @@ import {TreeViewDU} from "./abstract"; /* eslint-disable @typescript-eslint/member-ordering */ -class ContainerTreeViewDU>> extends TreeViewDU< +export class ContainerNodeStructTreeViewDU>> extends TreeViewDU< ContainerTypeGeneric > { protected valueChanged: ValueOfFields | null = null; @@ -27,10 +27,33 @@ class ContainerTreeViewDU>> extends return; } + get value(): ValueOfFields { + return this.valueChanged ?? this._rootNode.value; + } + + /** + * This ViewDU does not support batch hash by default so we need to compute root immediately. + * Otherwise consumers may call commit() multiple times and not able to compute hashTreeRoot(). + */ commit(): void { if (this.valueChanged === null) { - // this does not suppor batch hash - this._rootNode.root; + this._rootNode.rootHashObject; + return; + } + + const value = this.valueChanged; + this.valueChanged = null; + + this._rootNode = this.type.value_toTree(value) as BranchNodeStruct>; + this._rootNode.rootHashObject; + } + + /** + * Same to commit() without hash, allow to do the batch hash at consumer side, like in ListValidatorViewDU + * of ethereum consensus node. + */ + commitNoHash(): void { + if (this.valueChanged === null) { return; } @@ -38,8 +61,6 @@ class ContainerTreeViewDU>> extends this.valueChanged = null; this._rootNode = this.type.value_toTree(value) as BranchNodeStruct>; - // this does not suppor batch hash - this._rootNode.root; } protected clearCache(): void { @@ -50,7 +71,7 @@ class ContainerTreeViewDU>> extends export function getContainerTreeViewDUClass>>( type: ContainerTypeGeneric ): ContainerTreeViewDUTypeConstructor { - class CustomContainerTreeViewDU extends ContainerTreeViewDU {} + class CustomContainerTreeViewDU extends ContainerNodeStructTreeViewDU {} // Dynamically define prototype methods for (let index = 0; index < type.fieldsEntries.length; index++) { diff --git a/packages/ssz/test/lodestarTypes/phase0/validator.ts b/packages/ssz/test/lodestarTypes/phase0/validator.ts index 758d95d7..4cd82a83 100644 --- a/packages/ssz/test/lodestarTypes/phase0/validator.ts +++ b/packages/ssz/test/lodestarTypes/phase0/validator.ts @@ -2,8 +2,6 @@ import {ByteViews} from "../../../src/type/abstract"; import {ContainerNodeStructType} from "../../../src/type/containerNodeStruct"; import {ValueOfFields} from "../../../src/view/container"; import * as primitiveSsz from "../primitive/sszTypes"; -import {ValidatorTreeViewDU} from "./viewDU/validator"; -import {Node} from "@chainsafe/persistent-merkle-tree"; const {Boolean, Bytes32, UintNum64, BLSPubkey, EpochInf} = primitiveSsz; @@ -17,6 +15,7 @@ const UINT32_SIZE = 4; const PUBKEY_SIZE = 48; const WITHDRAWAL_CREDENTIALS_SIZE = 32; const SLASHED_SIZE = 1; +const CHUNK_SIZE = 32; export const ValidatorType = { pubkey: BLSPubkey, @@ -37,10 +36,6 @@ export class ValidatorNodeStructType extends ContainerNodeStructType +): void { + const { + pubkey, + withdrawalCredentials, + effectiveBalance, + slashed, + activationEligibilityEpoch, + activationEpoch, + exitEpoch, + withdrawableEpoch, + } = value; + const {uint8Array: outputLevel3, dataView} = level3; + + // pubkey = 48 bytes which is 2 * CHUNK_SIZE + level4.set(pubkey, 0); + let offset = CHUNK_SIZE; + outputLevel3.set(withdrawalCredentials, offset); + offset += CHUNK_SIZE; + // effectiveBalance is UintNum64 + dataView.setUint32(offset, effectiveBalance & 0xffffffff, true); + dataView.setUint32(offset + 4, (effectiveBalance / NUMBER_2_POW_32) & 0xffffffff, true); + + offset += CHUNK_SIZE; + dataView.setUint32(offset, slashed ? 1 : 0, true); + offset += CHUNK_SIZE; + writeEpochInf(dataView, offset, activationEligibilityEpoch); + offset += CHUNK_SIZE; + writeEpochInf(dataView, offset, activationEpoch); + offset += CHUNK_SIZE; + writeEpochInf(dataView, offset, exitEpoch); + offset += CHUNK_SIZE; + writeEpochInf(dataView, offset, withdrawableEpoch); +} + function writeEpochInf(dataView: DataView, offset: number, value: number): number { if (value === Infinity) { dataView.setUint32(offset, 0xffffffff, true); @@ -81,4 +128,3 @@ function writeEpochInf(dataView: DataView, offset: number, value: number): numbe } return offset; } -export const ValidatorNodeStruct = new ValidatorNodeStructType(); diff --git a/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts b/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts index d2f045fe..09bd80b8 100644 --- a/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts +++ b/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts @@ -1,10 +1,11 @@ +import {byteArrayIntoHashObject} from "@chainsafe/as-sha256"; import {HashComputationGroup, Node, digestNLevel, setNodesAtDepth} from "@chainsafe/persistent-merkle-tree"; import {ListCompositeType} from "../../../../src/type/listComposite"; import {ArrayCompositeTreeViewDUCache} from "../../../../src/viewDU/arrayComposite"; import {ListCompositeTreeViewDU} from "../../../../src/viewDU/listComposite"; -import {ValidatorNodeStructType} from "../validator"; -import {ValidatorTreeViewDU} from "./validator"; +import {ValidatorNodeStructType, ValidatorType, validatorToChunkBytes} from "../validator"; import {ByteViews} from "../../../../src"; +import {ContainerNodeStructTreeViewDU} from "../../../../src/viewDU/containerNodeStruct"; /** * hashtree has a MAX_SIZE of 1024 bytes = 32 chunks @@ -58,6 +59,10 @@ export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU + >; const indicesChanged = Array.from(this.viewsChanged.keys()).sort((a, b) => a - b); const endBatch = indicesChanged.length - (indicesChanged.length % PARALLEL_FACTOR); // nodesChanged is sorted by index @@ -70,8 +75,10 @@ export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU= 0; j--) { const viewIndex = indicesChanged[i - j]; const indexInBatch = (i - j) % PARALLEL_FACTOR; - const viewChanged = this.viewsChanged.get(viewIndex) as ValidatorTreeViewDU; - viewChanged.commitToRoot(validatorRoots[indexInBatch]); - nodesChanged.push({index: viewIndex, node: viewChanged.node}); - // Set new node in nodes array to ensure data represented in the tree and fast nodes access is equal - this.nodes[viewIndex] = viewChanged.node; + const viewChanged = viewsChanged.get(viewIndex); + if (viewChanged) { + viewChanged.commitNoHash(); + const branchNodeStruct = viewChanged.node; + byteArrayIntoHashObject(validatorRoots[indexInBatch], 0, branchNodeStruct); + nodesChanged.push({index: viewIndex, node: viewChanged.node}); + // Set new node in nodes array to ensure data represented in the tree and fast nodes access is equal + this.nodes[viewIndex] = viewChanged.node; + } } } } @@ -98,11 +109,14 @@ export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU> { - protected valueChanged: Validator | null = null; - protected _rootNode: BranchNodeStruct; - - constructor(readonly type: ContainerTypeGeneric, node: Node) { - super(); - this._rootNode = node as BranchNodeStruct; - } - - get node(): Node { - return this._rootNode; - } - - get cache(): void { - return; - } - - commit(): void { - if (this.valueChanged !== null) { - this._rootNode = this.type.value_toTree(this.valueChanged) as BranchNodeStruct; - } - - if (this._rootNode.h0 === null) { - const value = this.valueChanged ?? this._rootNode.value; - this.type.hashTreeRootInto(value, temporaryRoot, 0); - byteArrayIntoHashObject(temporaryRoot, 0, this._rootNode); - } - this.valueChanged = null; - } - - get pubkey(): Uint8Array { - return (this.valueChanged || this._rootNode.value).pubkey; - } - - set pubkey(value: Uint8Array) { - if (this.valueChanged === null) { - this.valueChanged = this.type.clone(this._rootNode.value); - } - - this.valueChanged.pubkey = value.slice(); - } - - get withdrawalCredentials(): Uint8Array { - return (this.valueChanged || this._rootNode.value).withdrawalCredentials; - } - - set withdrawalCredentials(value: Uint8Array) { - if (this.valueChanged === null) { - this.valueChanged = this.type.clone(this._rootNode.value); - } - - this.valueChanged.withdrawalCredentials = value.slice(); - } - - get effectiveBalance(): number { - return (this.valueChanged || this._rootNode.value).effectiveBalance; - } - - set effectiveBalance(value: number) { - if (this.valueChanged === null) { - this.valueChanged = this.type.clone(this._rootNode.value); - } - - this.valueChanged.effectiveBalance = value; - } - - get slashed(): boolean { - return (this.valueChanged || this._rootNode.value).slashed; - } - - set slashed(value: boolean) { - if (this.valueChanged === null) { - this.valueChanged = this.type.clone(this._rootNode.value); - } - - this.valueChanged.slashed = value; - } - - get activationEligibilityEpoch(): number { - return (this.valueChanged || this._rootNode.value).activationEligibilityEpoch; - } - - set activationEligibilityEpoch(value: number) { - if (this.valueChanged === null) { - this.valueChanged = this.type.clone(this._rootNode.value); - } - - this.valueChanged.activationEligibilityEpoch = value; - } - - get activationEpoch(): number { - return (this.valueChanged || this._rootNode.value).activationEpoch; - } - - set activationEpoch(value: number) { - if (this.valueChanged === null) { - this.valueChanged = this.type.clone(this._rootNode.value); - } - - this.valueChanged.activationEpoch = value; - } - - get exitEpoch(): number { - return (this.valueChanged || this._rootNode.value).exitEpoch; - } - - set exitEpoch(value: number) { - if (this.valueChanged === null) { - this.valueChanged = this.type.clone(this._rootNode.value); - } - - this.valueChanged.exitEpoch = value; - } - - get withdrawableEpoch(): number { - return (this.valueChanged || this._rootNode.value).withdrawableEpoch; - } - - set withdrawableEpoch(value: number) { - if (this.valueChanged === null) { - this.valueChanged = this.type.clone(this._rootNode.value); - } - - this.valueChanged.withdrawableEpoch = value; - } - - /** - * Write to level3 and level4 bytes to compute merkle root. Note that this is to compute - * merkle root and it's different from serialization (which is more compressed). - * pub0 + pub1 are at level4, they will be hashed to 1st chunked of level 3 - * then use 8 chunks of level 3 to compute the root hash. - * reserved withdr eff sla actElig act exit with - * level 3 |----------|----------|----------|----------|----------|----------|----------|----------| - * - * pub0 pub1 - * level4 |----------|----------| - * - */ - valueToChunkBytes(level3: ByteViews, level4: Uint8Array): void { - if (level3.uint8Array.byteLength !== 8 * CHUNK_SIZE) { - throw Error(`Expected level3 to be 8 * CHUNK_SIZE bytes, got ${level3.uint8Array.byteLength}`); - } - if (level4.length !== 2 * CHUNK_SIZE) { - throw Error(`Expected level4 to be 2 * CHUNK_SIZE bytes, got ${level4.length}`); - } - // in case pushing a new validator to array, valueChanged could be null - const value = this.valueChanged ?? this._rootNode.value; - validatorToChunkBytes(level3, level4, value); - } - - /** - * Batch hash flow: parent will compute hash and call this function - */ - commitToRoot(root: Uint8Array): void { - // this.valueChanged === null means this viewDU is new - if (this.valueChanged !== null) { - this._rootNode = this.type.value_toTree(this.valueChanged) as BranchNodeStruct; - } - byteArrayIntoHashObject(root, 0, this._rootNode); - this.valueChanged = null; - } - - protected clearCache(): void { - this.valueChanged = null; - } - - get name(): string { - return this.type.typeName; - } -} - -/** - * Write to level3 and level4 bytes to compute merkle root. Note that this is to compute - * merkle root and it's different from serialization (which is more compressed). - * pub0 + pub1 are at level4, they will be hashed to 1st chunked of level 3 - * then use 8 chunks of level 3 to compute the root hash. - * reserved withdr eff sla actElig act exit with - * level 3 |----------|----------|----------|----------|----------|----------|----------|----------| - * - * pub0 pub1 - * level4 |----------|----------| - * - */ -export function validatorToChunkBytes(level3: ByteViews, level4: Uint8Array, value: Validator): void { - const { - pubkey, - withdrawalCredentials, - effectiveBalance, - slashed, - activationEligibilityEpoch, - activationEpoch, - exitEpoch, - withdrawableEpoch, - } = value; - const {uint8Array: outputLevel3, dataView} = level3; - - // pubkey = 48 bytes which is 2 * CHUNK_SIZE - level4.set(pubkey, 0); - let offset = CHUNK_SIZE; - outputLevel3.set(withdrawalCredentials, offset); - offset += CHUNK_SIZE; - // effectiveBalance is UintNum64 - dataView.setUint32(offset, effectiveBalance & 0xffffffff, true); - dataView.setUint32(offset + 4, (effectiveBalance / NUMBER_2_POW_32) & 0xffffffff, true); - - offset += CHUNK_SIZE; - // output[offset] = validator.slashed ? 1 : 0; - dataView.setUint32(offset, slashed ? 1 : 0, true); - offset += CHUNK_SIZE; - writeEpochInf(dataView, offset, activationEligibilityEpoch); - offset += CHUNK_SIZE; - writeEpochInf(dataView, offset, activationEpoch); - offset += CHUNK_SIZE; - writeEpochInf(dataView, offset, exitEpoch); - offset += CHUNK_SIZE; - writeEpochInf(dataView, offset, withdrawableEpoch); -} - -/** - * Write an epoch to DataView at offset. - */ -function writeEpochInf(dataView: DataView, offset: number, value: number): void { - if (value === Infinity) { - dataView.setUint32(offset, 0xffffffff, true); - offset += UINT32_SIZE; - dataView.setUint32(offset, 0xffffffff, true); - offset += UINT32_SIZE; - } else { - dataView.setUint32(offset, value & 0xffffffff, true); - offset += UINT32_SIZE; - dataView.setUint32(offset, (value / NUMBER_2_POW_32) & 0xffffffff, true); - offset += UINT32_SIZE; - } -} diff --git a/packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts b/packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts index a9525708..cf01c2f1 100644 --- a/packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts +++ b/packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts @@ -1,9 +1,9 @@ import {digestNLevel} from "@chainsafe/persistent-merkle-tree"; import {ContainerType} from "../../../../../ssz/src/type/container"; import {ssz} from "../../../lodestarTypes"; -import {ValidatorType} from "../../../lodestarTypes/phase0/validator"; -import {ValidatorTreeViewDU} from "../../../lodestarTypes/phase0/viewDU/validator"; +import {ValidatorNodeStruct, ValidatorType, validatorToChunkBytes} from "../../../lodestarTypes/phase0/validator"; import {expect} from "chai"; +import {Validator} from "../../../lodestarTypes/phase0/sszTypes"; const ValidatorContainer = new ContainerType(ValidatorType, {typeName: "Validator", jsonCase: "eth2"}); @@ -39,34 +39,40 @@ describe("Validator ssz types", function () { expect(root3).to.be.deep.equal(root); } }); +}); - it("ViewDU.commitToHashObject()", () => { - // transform validator from 0 to 1 - // TODO - batch: avoid this type casting - const viewDU = ssz.phase0.Validator.toViewDU(validators[0]) as ValidatorTreeViewDU; - viewDU.effectiveBalance = validators[1].effectiveBalance; - viewDU.slashed = validators[1].slashed; - // same logic to viewDU.commit(); - // validator has 8 nodes at level 3 - const singleLevel3Bytes = new Uint8Array(8 * 32); - const singleLevel3ByteView = {uint8Array: singleLevel3Bytes, dataView: new DataView(singleLevel3Bytes.buffer)}; - // validator has 2 nodes at level 4 (pubkey has 48 bytes = 2 * nodes) - const singleLevel4Bytes = new Uint8Array(2 * 32); - viewDU.valueToChunkBytes(singleLevel3ByteView, singleLevel4Bytes); - // level 4 hash - const pubkeyRoot = digestNLevel(singleLevel4Bytes, 1); - if (pubkeyRoot.length !== 32) { - throw new Error(`Invalid pubkeyRoot length, expect 32, got ${pubkeyRoot.length}`); - } - singleLevel3ByteView.uint8Array.set(pubkeyRoot, 0); - // level 3 hash - const validatorRoot = digestNLevel(singleLevel3ByteView.uint8Array, 3); - if (validatorRoot.length !== 32) { - throw new Error(`Invalid validatorRoot length, expect 32, got ${validatorRoot.length}`); +describe("validatorToChunkBytes", function () { + const seedValidator = { + activationEligibilityEpoch: 10, + activationEpoch: 11, + exitEpoch: Infinity, + slashed: false, + withdrawableEpoch: 13, + pubkey: Buffer.alloc(48, 100), + withdrawalCredentials: Buffer.alloc(32, 100), + }; + + const validators = [ + {...seedValidator, effectiveBalance: 31000000000, slashed: false}, + {...seedValidator, effectiveBalance: 32000000000, slashed: true}, + ]; + + it("should populate validator value to merkle bytes", () => { + for (const validator of validators) { + const expectedRoot0 = ValidatorNodeStruct.hashTreeRoot(validator); + // validator has 8 fields + const level3 = new Uint8Array(32 * 8); + const dataView = new DataView(level3.buffer, level3.byteOffset, level3.byteLength); + // pubkey takes 2 chunks, has to go to another level + const level4 = new Uint8Array(32 * 2); + validatorToChunkBytes({uint8Array: level3, dataView}, level4, validator); + // additional slice() call make it easier to debug + const pubkeyRoot = digestNLevel(level4, 1).slice(); + level3.set(pubkeyRoot, 0); + const root = digestNLevel(level3, 3).slice(); + const expectedRootNode2 = Validator.value_toTree(validator); + expect(root).to.be.deep.equals(expectedRoot0); + expect(root).to.be.deep.equals(expectedRootNode2.root); } - viewDU.commitToRoot(validatorRoot); - const expectedRoot = ValidatorContainer.hashTreeRoot(validators[1]); - expect(viewDU.node.root).to.be.deep.equal(expectedRoot); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); }); }); diff --git a/packages/ssz/test/unit/lodestarTypes/phase0/viewDU/validatorNodeStruct.test.ts b/packages/ssz/test/unit/lodestarTypes/phase0/viewDU/validatorNodeStruct.test.ts deleted file mode 100644 index 92f4676f..00000000 --- a/packages/ssz/test/unit/lodestarTypes/phase0/viewDU/validatorNodeStruct.test.ts +++ /dev/null @@ -1,41 +0,0 @@ -import {digestNLevel} from "@chainsafe/persistent-merkle-tree"; -import {validatorToChunkBytes} from "../../../../lodestarTypes/phase0/viewDU/validator"; -import {ValidatorNodeStruct} from "../../../../lodestarTypes/phase0/validator"; -import {expect} from "chai"; -import {Validator} from "../../../../lodestarTypes/phase0/sszTypes"; - -describe("validatorNodeStruct", () => { - const seedValidator = { - activationEligibilityEpoch: 10, - activationEpoch: 11, - exitEpoch: Infinity, - slashed: false, - withdrawableEpoch: 13, - pubkey: Buffer.alloc(48, 100), - withdrawalCredentials: Buffer.alloc(32, 100), - }; - - const validators = [ - {...seedValidator, effectiveBalance: 31000000000, slashed: false}, - {...seedValidator, effectiveBalance: 32000000000, slashed: true}, - ]; - - it("should populate validator value to merkle bytes", () => { - for (const validator of validators) { - const expectedRoot0 = ValidatorNodeStruct.hashTreeRoot(validator); - // validator has 8 fields - const level3 = new Uint8Array(32 * 8); - const dataView = new DataView(level3.buffer, level3.byteOffset, level3.byteLength); - // pubkey takes 2 chunks, has to go to another level - const level4 = new Uint8Array(32 * 2); - validatorToChunkBytes({uint8Array: level3, dataView}, level4, validator); - // additional slice() call make it easier to debug - const pubkeyRoot = digestNLevel(level4, 1).slice(); - level3.set(pubkeyRoot, 0); - const root = digestNLevel(level3, 3).slice(); - const expectedRootNode2 = Validator.value_toTree(validator); - expect(root).to.be.deep.equals(expectedRoot0); - expect(root).to.be.deep.equals(expectedRootNode2.root); - } - }); -}); From 30976f1df56660e2c0785965e73a88040e70a1ce Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Sat, 6 Jul 2024 13:48:02 +0700 Subject: [PATCH 079/113] fix: publish ContainerNodeStructTreeViewDU --- packages/ssz/src/index.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/ssz/src/index.ts b/packages/ssz/src/index.ts index 5c427f14..c140c60c 100644 --- a/packages/ssz/src/index.ts +++ b/packages/ssz/src/index.ts @@ -30,6 +30,7 @@ export {ValueOfFields, ContainerTypeGeneric} from "./view/container"; export {TreeViewDU} from "./viewDU/abstract"; export {ListCompositeTreeViewDU} from "./viewDU/listComposite"; export {ArrayCompositeTreeViewDUCache} from "./viewDU/arrayComposite"; +export {ContainerNodeStructTreeViewDU} from "./viewDU/containerNodeStruct"; // Values export {BitArray, getUint8ByteToBitBooleanArray} from "./value/bitArray"; From c939ee7884f5937e07cb8131969139d8c165ee8a Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Sun, 7 Jul 2024 08:20:26 +0700 Subject: [PATCH 080/113] fix: preallocate HashComputationsGroup --- .../src/hasher/as-sha256.ts | 5 ++ .../src/hasher/hashtree.ts | 5 ++ packages/ssz/src/viewDU/abstract.ts | 46 +++++++++++++++---- 3 files changed, 46 insertions(+), 10 deletions(-) diff --git a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts index 6a5b7fa5..d7904ca7 100644 --- a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts +++ b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts @@ -27,6 +27,11 @@ export const hasher: Hasher = { throw Error(`no hash computations for level ${level}`); } + if (hcArr.length === 0) { + // nothing to hash + continue; + } + // HashComputations of the same level are safe to batch let src0_0: Node | null = null; let src1_0: Node | null = null; diff --git a/packages/persistent-merkle-tree/src/hasher/hashtree.ts b/packages/persistent-merkle-tree/src/hasher/hashtree.ts index e84ccb4a..2f3f57bd 100644 --- a/packages/persistent-merkle-tree/src/hasher/hashtree.ts +++ b/packages/persistent-merkle-tree/src/hasher/hashtree.ts @@ -51,6 +51,11 @@ export const hasher: Hasher = { throw Error(`no hash computations for level ${level}`); } + if (hcArr.length === 0) { + // nothing to hash + continue; + } + // size input array to 2 HashObject per computation * 32 bytes per object // const input: Uint8Array = Uint8Array.from(new Array(hcArr.length * 2 * 32)); let destNodes: Node[] = []; diff --git a/packages/ssz/src/viewDU/abstract.ts b/packages/ssz/src/viewDU/abstract.ts index 69954660..1398fb20 100644 --- a/packages/ssz/src/viewDU/abstract.ts +++ b/packages/ssz/src/viewDU/abstract.ts @@ -2,6 +2,21 @@ import {HashComputationGroup, executeHashComputations} from "@chainsafe/persiste import {ByteViews, CompositeType} from "../type/composite"; import {TreeView} from "../view/abstract"; +/** + * Always allocating a new HashComputationGroup for each hashTreeRoot() is not great for gc + * because a lot of ViewDUs are not changed and computed root already. + */ +let nextHashComps: HashComputationGroup = { + byLevel: [], + offset: 0, +}; + +const symbolCachedTreeRoot = Symbol("ssz_cached_tree_root"); + +export type NodeWithCachedTreeRoot = { + [symbolCachedTreeRoot]?: Uint8Array; +}; + /* eslint-disable @typescript-eslint/member-ordering */ /** @@ -50,21 +65,32 @@ export abstract class TreeViewDU 0 || nextHashComps.offset !== 0) { + // preallocate for the next time + nextHashComps = { + byLevel: [], + offset: 0, + }; + executeHashComputations(hashComps.byLevel); + // This makes sure the root node is computed by batch + if (this.node.h0 === null) { + throw Error("Root is not computed by batch"); + } + } - // This makes sure the root node is computed by batch - if (this.node.h0 === null) { - throw Error("Root is not computed by batch"); + const cachedRoot = (this.node as NodeWithCachedTreeRoot)[symbolCachedTreeRoot]; + if (cachedRoot) { + return cachedRoot; + } else { + const root = this.node.root; + (this.node as NodeWithCachedTreeRoot)[symbolCachedTreeRoot] = root; + return root; } - return this.node.root; } /** From 805b350725c5cc387082a43698e3412f14897ceb Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Tue, 9 Jul 2024 09:24:01 +0700 Subject: [PATCH 081/113] chore: benchmark merkleize() vs merkleizeInto() --- .../ssz/test/perf/eth2/beaconBlock.test.ts | 2 +- packages/ssz/test/perf/merkleize.test.ts | 20 ++++++++++++++++++- 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/packages/ssz/test/perf/eth2/beaconBlock.test.ts b/packages/ssz/test/perf/eth2/beaconBlock.test.ts index 56293a01..523e94ea 100644 --- a/packages/ssz/test/perf/eth2/beaconBlock.test.ts +++ b/packages/ssz/test/perf/eth2/beaconBlock.test.ts @@ -70,7 +70,7 @@ describe("Benchmark BeaconBlock.hashTreeRoot()", function () { const root = ssz.deneb.BeaconBlock.hashTreeRoot(block); console.log("BeaconBlock.hashTreeRoot() root", toHexString(root)); itBench({ - id: `Deneb BeaconBlock.hashTreeRoot(), numTransaciton=${numTransaction}`, + id: `Deneb BeaconBlock.hashTreeRoot(), numTransaction=${numTransaction}`, beforeEach: () => { clearCachedRoots(block); return block; diff --git a/packages/ssz/test/perf/merkleize.test.ts b/packages/ssz/test/perf/merkleize.test.ts index b83a1f5d..a900015a 100644 --- a/packages/ssz/test/perf/merkleize.test.ts +++ b/packages/ssz/test/perf/merkleize.test.ts @@ -1,5 +1,6 @@ import {itBench} from "@dapplion/benchmark"; -import {bitLength} from "../../src/util/merkleize"; +import {bitLength, merkleize} from "../../src/util/merkleize"; +import {merkleizeInto} from "@chainsafe/persistent-merkle-tree"; describe("merkleize / bitLength", () => { for (const n of [50, 8000, 250000]) { @@ -13,6 +14,23 @@ describe("merkleize / bitLength", () => { } }); +describe("merkleize vs persistent-merkle-tree merkleizeInto", () => { + const chunkCounts = [4, 8, 16, 32]; + + for (const chunkCount of chunkCounts) { + const rootArr = Array.from({length: chunkCount}, (_, i) => Buffer.alloc(32, i)); + const roots = Buffer.concat(rootArr); + const result = Buffer.alloc(32); + itBench(`merkleizeInto ${chunkCount} chunks`, () => { + merkleizeInto(roots, chunkCount, result, 0); + }); + + itBench(`merkleize ${chunkCount} chunks`, () => { + merkleize(rootArr, chunkCount); + }); + } +}); + // Previous implementation, replaced by bitLength function bitLengthStr(n: number): number { const bitstring = n.toString(2); From fc0531dcb992d311b3a3b3f713b2bdd1f6c0661b Mon Sep 17 00:00:00 2001 From: twoeths Date: Thu, 11 Jul 2024 10:07:48 +0700 Subject: [PATCH 082/113] fix: do not rebind nodes if child ViewDU is not changed (#380) --- packages/ssz/src/viewDU/abstract.ts | 2 +- packages/ssz/src/viewDU/arrayComposite.ts | 9 ++++-- packages/ssz/src/viewDU/container.ts | 9 ++++-- .../ssz/test/unit/unchangedViewDUs.test.ts | 29 +++++++++++++++++++ 4 files changed, 42 insertions(+), 7 deletions(-) create mode 100644 packages/ssz/test/unit/unchangedViewDUs.test.ts diff --git a/packages/ssz/src/viewDU/abstract.ts b/packages/ssz/src/viewDU/abstract.ts index 1398fb20..90190b0a 100644 --- a/packages/ssz/src/viewDU/abstract.ts +++ b/packages/ssz/src/viewDU/abstract.ts @@ -67,7 +67,7 @@ export abstract class TreeViewDU 0 || nextHashComps.offset !== 0) { diff --git a/packages/ssz/src/viewDU/arrayComposite.ts b/packages/ssz/src/viewDU/arrayComposite.ts index a257bcbe..6666a3a8 100644 --- a/packages/ssz/src/viewDU/arrayComposite.ts +++ b/packages/ssz/src/viewDU/arrayComposite.ts @@ -197,9 +197,12 @@ export class ArrayCompositeTreeViewDU< for (const [index, view] of this.viewsChanged) { const node = this.type.elementType.commitViewDU(view, hashCompsView); - // Set new node in nodes array to ensure data represented in the tree and fast nodes access is equal - this.nodes[index] = node; - nodesChanged.push({index, node}); + // there's a chance the view is not changed, no need to rebind nodes in that case + if (this.nodes[index] !== node) { + // Set new node in nodes array to ensure data represented in the tree and fast nodes access is equal + this.nodes[index] = node; + nodesChanged.push({index, node}); + } // Cache the view's caches to preserve it's data after 'this.viewsChanged.clear()' const cache = this.type.elementType.cacheOfViewDU(view); diff --git a/packages/ssz/src/viewDU/container.ts b/packages/ssz/src/viewDU/container.ts index ed01b958..a5eb515a 100644 --- a/packages/ssz/src/viewDU/container.ts +++ b/packages/ssz/src/viewDU/container.ts @@ -100,9 +100,12 @@ class ContainerTreeViewDU>> extends for (const [index, view] of this.viewsChanged) { const fieldType = this.type.fieldsEntries[index].fieldType as unknown as CompositeTypeAny; const node = fieldType.commitViewDU(view, hashCompsView); - // Set new node in nodes array to ensure data represented in the tree and fast nodes access is equal - this.nodes[index] = node; - nodesChanged.push({index, node}); + // there's a chance the view is not changed, no need to rebind nodes in that case + if (this.nodes[index] !== node) { + // Set new node in nodes array to ensure data represented in the tree and fast nodes access is equal + this.nodes[index] = node; + nodesChanged.push({index, node}); + } // Cache the view's caches to preserve it's data after 'this.viewsChanged.clear()' const cache = fieldType.cacheOfViewDU(view); diff --git a/packages/ssz/test/unit/unchangedViewDUs.test.ts b/packages/ssz/test/unit/unchangedViewDUs.test.ts new file mode 100644 index 00000000..98e696c9 --- /dev/null +++ b/packages/ssz/test/unit/unchangedViewDUs.test.ts @@ -0,0 +1,29 @@ +import {expect} from "chai"; +import * as sszAltair from "../lodestarTypes/altair/sszTypes"; +import {getRandomState} from "../utils/generateEth2Objs"; + +describe("Unchanged ViewDUs", () => { + const state = sszAltair.BeaconState.toViewDU(getRandomState(100)); + + it("should not recompute hashTreeRoot() when no fields is changed", () => { + const root = state.hashTreeRoot(); + // this causes viewsChanged inside BeaconState container + state.validators.length; + state.balances.length; + // but we should not recompute root, should get from cache instead + const root2 = state.hashTreeRoot(); + expect(root2).to.equal(root, "should not recompute hashTreeRoot() when no fields are changed"); + }); + + it("handle childViewDU.hashTreeRoot()", () => { + const state2 = state.clone(); + state2.latestBlockHeader.stateRoot = Buffer.alloc(32, 3); + const root2 = state2.hashTreeRoot(); + const state3 = state.clone(); + state3.latestBlockHeader.stateRoot = Buffer.alloc(32, 3); + // hashTreeRoot() also does the commit() + state3.latestBlockHeader.commit(); + const root3 = state3.hashTreeRoot(); + expect(root3).to.be.deep.equal(root2); + }); +}); From 33e241658cecf9e2d3a125a672944810908ce76c Mon Sep 17 00:00:00 2001 From: twoeths Date: Thu, 11 Jul 2024 18:06:06 +0700 Subject: [PATCH 083/113] fix: optimize commit() for Container and ArrayComposite ViewDUs (#381) --- packages/ssz/src/viewDU/arrayComposite.ts | 21 ++++++---- packages/ssz/src/viewDU/container.ts | 48 ++++++++++++----------- 2 files changed, 38 insertions(+), 31 deletions(-) diff --git a/packages/ssz/src/viewDU/arrayComposite.ts b/packages/ssz/src/viewDU/arrayComposite.ts index 6666a3a8..76f3eff4 100644 --- a/packages/ssz/src/viewDU/arrayComposite.ts +++ b/packages/ssz/src/viewDU/arrayComposite.ts @@ -184,7 +184,6 @@ export class ArrayCompositeTreeViewDU< return; } - const nodesChanged: {index: number; node: Node}[] = []; // each view may mutate HashComputationGroup at offset + depth const hashCompsView = hashComps != null && isOldRootHashed @@ -195,13 +194,24 @@ export class ArrayCompositeTreeViewDU< } : null; - for (const [index, view] of this.viewsChanged) { + const indexesChanged = Array.from(this.viewsChanged.keys()).sort((a, b) => a - b); + const indexes: number[] = []; + const nodes: Node[] = []; + for (const index of indexesChanged) { + const view = this.viewsChanged.get(index); + if (!view) { + // should not happen + throw Error("View not found in viewsChanged, index=" + index); + } + const node = this.type.elementType.commitViewDU(view, hashCompsView); // there's a chance the view is not changed, no need to rebind nodes in that case if (this.nodes[index] !== node) { // Set new node in nodes array to ensure data represented in the tree and fast nodes access is equal this.nodes[index] = node; - nodesChanged.push({index, node}); + // nodesChanged.push({index, node}); + indexes.push(index); + nodes.push(node); } // Cache the view's caches to preserve it's data after 'this.viewsChanged.clear()' @@ -209,11 +219,6 @@ export class ArrayCompositeTreeViewDU< if (cache) this.caches[index] = cache; } - // TODO: Optimize to loop only once, Numerical sort ascending - const nodesChangedSorted = nodesChanged.sort((a, b) => a.index - b.index); - const indexes = nodesChangedSorted.map((entry) => entry.index); - const nodes = nodesChangedSorted.map((entry) => entry.node); - const chunksNode = this.type.tree_getChunksNode(this._rootNode); const hashCompsThis = hashComps != null && isOldRootHashed diff --git a/packages/ssz/src/viewDU/container.ts b/packages/ssz/src/viewDU/container.ts index a5eb515a..06b43dc1 100644 --- a/packages/ssz/src/viewDU/container.ts +++ b/packages/ssz/src/viewDU/container.ts @@ -89,38 +89,40 @@ class ContainerTreeViewDU>> extends return; } - const nodesChanged: {index: number; node: Node}[] = []; - let hashCompsView: HashComputationGroup | null = null; // if old root is not hashed, no need to pass HashComputationGroup to child view bc we need to do full traversal here if (hashComps != null && isOldRootHashed) { // each view may mutate HashComputationGroup at offset + depth hashCompsView = {byLevel: hashComps.byLevel, offset: hashComps.offset + this.type.depth}; } - for (const [index, view] of this.viewsChanged) { - const fieldType = this.type.fieldsEntries[index].fieldType as unknown as CompositeTypeAny; - const node = fieldType.commitViewDU(view, hashCompsView); - // there's a chance the view is not changed, no need to rebind nodes in that case - if (this.nodes[index] !== node) { - // Set new node in nodes array to ensure data represented in the tree and fast nodes access is equal - this.nodes[index] = node; - nodesChanged.push({index, node}); - } - - // Cache the view's caches to preserve it's data after 'this.viewsChanged.clear()' - const cache = fieldType.cacheOfViewDU(view); - if (cache) this.caches[index] = cache; - } - for (const index of this.nodesChanged) { - nodesChanged.push({index, node: this.nodes[index]}); + // union all changes then sort, they should not be duplicated + const combinedIndexes = [...this.nodesChanged, ...Array.from(this.viewsChanged.keys())].sort((a, b) => a - b); + const indexes: number[] = []; + const nodes: Node[] = []; + for (const index of combinedIndexes) { + const view = this.viewsChanged.get(index); + if (view) { + // composite type + const fieldType = this.type.fieldsEntries[index].fieldType as unknown as CompositeTypeAny; + const node = fieldType.commitViewDU(view, hashCompsView); + // there's a chance the view is not changed, no need to rebind nodes in that case + if (this.nodes[index] !== node) { + // Set new node in nodes array to ensure data represented in the tree and fast nodes access is equal + this.nodes[index] = node; + indexes.push(index); + nodes.push(node); + } + // Cache the view's caches to preserve it's data after 'this.viewsChanged.clear()' + const cache = fieldType.cacheOfViewDU(view); + if (cache) this.caches[index] = cache; + } else { + // basic type + indexes.push(index); + nodes.push(this.nodes[index]); + } } - // TODO: Optimize to loop only once, Numerical sort ascending - const nodesChangedSorted = nodesChanged.sort((a, b) => a.index - b.index); - const indexes = nodesChangedSorted.map((entry) => entry.index); - const nodes = nodesChangedSorted.map((entry) => entry.node); - this._rootNode = setNodesAtDepth( this._rootNode, this.type.depth, From 4873a6606e8c0f18f319716d07bac29f9419f234 Mon Sep 17 00:00:00 2001 From: twoeths Date: Mon, 15 Jul 2024 17:21:42 +0700 Subject: [PATCH 084/113] feat: support optional output arrays for getAll() apis (#383) --- packages/ssz/src/view/arrayBasic.ts | 7 +++++-- packages/ssz/src/view/arrayComposite.ts | 14 ++++++++++---- packages/ssz/src/viewDU/arrayBasic.ts | 7 +++++-- packages/ssz/src/viewDU/arrayComposite.ts | 14 ++++++++++---- 4 files changed, 30 insertions(+), 12 deletions(-) diff --git a/packages/ssz/src/view/arrayBasic.ts b/packages/ssz/src/view/arrayBasic.ts index ed34128c..d131dc6a 100644 --- a/packages/ssz/src/view/arrayBasic.ts +++ b/packages/ssz/src/view/arrayBasic.ts @@ -83,13 +83,16 @@ export class ArrayBasicTreeView> extends /** * Get all values of this array as Basic element type values, from index zero to `this.length - 1` */ - getAll(): ValueOf[] { + getAll(values?: ValueOf[]): ValueOf[] { + if (values && values.length !== this.length) { + throw Error(`Expected ${this.length} values, got ${values.length}`); + } const length = this.length; const chunksNode = this.type.tree_getChunksNode(this.node); const chunkCount = Math.ceil(length / this.type.itemsPerChunk); const leafNodes = getNodesAtDepth(chunksNode, this.type.chunkDepth, 0, chunkCount) as LeafNode[]; - const values = new Array>(length); + values = values ?? new Array>(length); const itemsPerChunk = this.type.itemsPerChunk; // Prevent many access in for loop below const lenFullNodes = Math.floor(length / itemsPerChunk); const remainder = length % itemsPerChunk; diff --git a/packages/ssz/src/view/arrayComposite.ts b/packages/ssz/src/view/arrayComposite.ts index 65463e08..bbcf899d 100644 --- a/packages/ssz/src/view/arrayComposite.ts +++ b/packages/ssz/src/view/arrayComposite.ts @@ -88,11 +88,14 @@ export class ArrayCompositeTreeView< * The returned views don't have a parent hook to this View's Tree, so changes in the returned views won't be * propagated upwards. To get linked element Views use `this.get()` */ - getAllReadonly(): CompositeView[] { + getAllReadonly(views?: CompositeView[]): CompositeView[] { + if (views && views.length !== this.length) { + throw Error(`Expected ${this.length} views, got ${views.length}`); + } const length = this.length; const chunksNode = this.type.tree_getChunksNode(this.node); const nodes = getNodesAtDepth(chunksNode, this.type.chunkDepth, 0, length); - const views = new Array>(length); + views = views ?? new Array>(length); for (let i = 0; i < length; i++) { // TODO: Optimize views[i] = this.type.elementType.getView(new Tree(nodes[i])); @@ -105,11 +108,14 @@ export class ArrayCompositeTreeView< * The returned values are not Views so any changes won't be propagated upwards. * To get linked element Views use `this.get()` */ - getAllReadonlyValues(): ValueOf[] { + getAllReadonlyValues(values?: ValueOf[]): ValueOf[] { + if (values && values.length !== this.length) { + throw Error(`Expected ${this.length} values, got ${values.length}`); + } const length = this.length; const chunksNode = this.type.tree_getChunksNode(this.node); const nodes = getNodesAtDepth(chunksNode, this.type.chunkDepth, 0, length); - const values = new Array>(length); + values = values ?? new Array>(length); for (let i = 0; i < length; i++) { values[i] = this.type.elementType.tree_toValue(nodes[i]); } diff --git a/packages/ssz/src/viewDU/arrayBasic.ts b/packages/ssz/src/viewDU/arrayBasic.ts index 159c525a..6899174e 100644 --- a/packages/ssz/src/viewDU/arrayBasic.ts +++ b/packages/ssz/src/viewDU/arrayBasic.ts @@ -110,7 +110,10 @@ export class ArrayBasicTreeViewDU> extend /** * Get all values of this array as Basic element type values, from index zero to `this.length - 1` */ - getAll(): ValueOf[] { + getAll(values?: ValueOf[]): ValueOf[] { + if (values && values.length !== this._length) { + throw Error(`Expected ${this._length} values, got ${values.length}`); + } if (!this.nodesPopulated) { const nodesPrev = this.nodes; const chunksNode = this.type.tree_getChunksNode(this.node); @@ -125,7 +128,7 @@ export class ArrayBasicTreeViewDU> extend this.nodesPopulated = true; } - const values = new Array>(this._length); + values = values ?? new Array>(this._length); const itemsPerChunk = this.type.itemsPerChunk; // Prevent many access in for loop below const lenFullNodes = Math.floor(this._length / itemsPerChunk); const remainder = this._length % itemsPerChunk; diff --git a/packages/ssz/src/viewDU/arrayComposite.ts b/packages/ssz/src/viewDU/arrayComposite.ts index 76f3eff4..3e854aa3 100644 --- a/packages/ssz/src/viewDU/arrayComposite.ts +++ b/packages/ssz/src/viewDU/arrayComposite.ts @@ -147,10 +147,13 @@ export class ArrayCompositeTreeViewDU< /** * WARNING: Returns all commited changes, if there are any pending changes commit them beforehand */ - getAllReadonly(): CompositeViewDU[] { + getAllReadonly(views?: CompositeViewDU[]): CompositeViewDU[] { + if (views && views.length !== this._length) { + throw Error(`Expected ${this._length} views, got ${views.length}`); + } this.populateAllNodes(); - const views = new Array>(this._length); + views = views ?? new Array>(this._length); for (let i = 0; i < this._length; i++) { views[i] = this.type.elementType.getViewDU(this.nodes[i], this.caches[i]); } @@ -160,10 +163,13 @@ export class ArrayCompositeTreeViewDU< /** * WARNING: Returns all commited changes, if there are any pending changes commit them beforehand */ - getAllReadonlyValues(): ValueOf[] { + getAllReadonlyValues(values?: ValueOf[]): ValueOf[] { + if (values && values.length !== this._length) { + throw Error(`Expected ${this._length} values, got ${values.length}`); + } this.populateAllNodes(); - const values = new Array>(this._length); + values = values ?? new Array>(this._length); for (let i = 0; i < this._length; i++) { values[i] = this.type.elementType.tree_toValue(this.nodes[i]); } From dcda46f01d810fb48be99fa3994b09c17081abef Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Fri, 19 Jul 2024 08:42:32 +0700 Subject: [PATCH 085/113] fix: merge master --- .../src/hasher/as-sha256.ts | 2 +- .../src/hasher/hashtree.ts | 2 +- .../src/hasher/index.ts | 20 ++++++-------- .../src/hasher/noble.ts | 27 +++++++++++++++---- .../src/hasher/types.ts | 4 +-- packages/persistent-merkle-tree/src/index.ts | 1 + packages/persistent-merkle-tree/src/node.ts | 18 ++++++------- .../persistent-merkle-tree/src/subtree.ts | 12 +++------ .../persistent-merkle-tree/src/zeroHash.ts | 12 ++++++--- .../test/unit/hasher.test.ts | 5 ++-- packages/ssz/src/util/merkleize.ts | 2 +- packages/ssz/src/util/zeros.ts | 13 --------- yarn.lock | 8 ------ 13 files changed, 58 insertions(+), 68 deletions(-) delete mode 100644 packages/ssz/src/util/zeros.ts diff --git a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts index d7904ca7..d8228251 100644 --- a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts +++ b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts @@ -19,7 +19,7 @@ export const hasher: Hasher = { digestNLevel(data: Uint8Array, nLevel: number): Uint8Array { return doDigestNLevel(data, nLevel, hashInto); }, - executeHashComputations: (hashComputations: Array) => { + executeHashComputations: (hashComputations: HashComputation[][]) => { for (let level = hashComputations.length - 1; level >= 0; level--) { const hcArr = hashComputations[level]; if (!hcArr) { diff --git a/packages/persistent-merkle-tree/src/hasher/hashtree.ts b/packages/persistent-merkle-tree/src/hasher/hashtree.ts index 2f3f57bd..26de5609 100644 --- a/packages/persistent-merkle-tree/src/hasher/hashtree.ts +++ b/packages/persistent-merkle-tree/src/hasher/hashtree.ts @@ -43,7 +43,7 @@ export const hasher: Hasher = { digestNLevel(data: Uint8Array, nLevel: number): Uint8Array { return doDigestNLevel(data, nLevel, hashInto); }, - executeHashComputations(hashComputations: Array): void { + executeHashComputations(hashComputations: HashComputation[][]): void { for (let level = hashComputations.length - 1; level >= 0; level--) { const hcArr = hashComputations[level]; if (!hcArr) { diff --git a/packages/persistent-merkle-tree/src/hasher/index.ts b/packages/persistent-merkle-tree/src/hasher/index.ts index 886312ab..dfa1d7e6 100644 --- a/packages/persistent-merkle-tree/src/hasher/index.ts +++ b/packages/persistent-merkle-tree/src/hasher/index.ts @@ -1,22 +1,14 @@ import {Hasher} from "./types"; -// import {hasher as nobleHasher} from "./noble"; -// import {hasher as csHasher} from "./as-sha256"; -import {hasher as hashtreeHasher} from "./hashtree"; +import {hasher as nobleHasher} from "./noble"; +import type {HashComputation} from "../node"; export * from "./types"; export * from "./util"; /** - * Default hasher used across the SSZ codebase, this does not support batch hash. - * Use `as-sha256` hasher for batch hashing using SIMD. - * TODO - batch: Use `hashtree` hasher for 20x speedup + * Hasher used across the SSZ codebase, by default, this does not support batch hash. */ -// export let hasher: Hasher = nobleHasher; -// For testing purposes, we use the as-sha256 hasher -// export let hasher: Hasher = csHasher; - -// For testing purposes, we use the hashtree hasher -export let hasher: Hasher = hashtreeHasher; +export let hasher: Hasher = nobleHasher; /** * Set the hasher to be used across the SSZ codebase @@ -38,3 +30,7 @@ export function digestNLevel(data: Uint8Array, nLevel: number): Uint8Array { export function merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { hasher.merkleizeInto(data, padFor, output, offset); } + +export function executeHashComputations(hashComputations: HashComputation[][]): void { + hasher.executeHashComputations(hashComputations); +} diff --git a/packages/persistent-merkle-tree/src/hasher/noble.ts b/packages/persistent-merkle-tree/src/hasher/noble.ts index 6d379006..eb1224b6 100644 --- a/packages/persistent-merkle-tree/src/hasher/noble.ts +++ b/packages/persistent-merkle-tree/src/hasher/noble.ts @@ -1,9 +1,26 @@ import {sha256} from "@noble/hashes/sha256"; import {digest64HashObjects, byteArrayIntoHashObject} from "@chainsafe/as-sha256"; import type {Hasher} from "./types"; -import {hashObjectToUint8Array} from "./util"; +import {doDigestNLevel, doMerkleizeInto, hashObjectToUint8Array} from "./util"; const digest64 = (a: Uint8Array, b: Uint8Array): Uint8Array => sha256.create().update(a).update(b).digest(); +const hashInto = (input: Uint8Array, output: Uint8Array): void => { + if (input.length % 64 !== 0) { + throw new Error(`Invalid input length ${input.length}`); + } + if (input.length !== output.length * 2) { + throw new Error(`Invalid output length ${output.length}`); + } + + const count = Math.floor(input.length / 64); + for (let i = 0; i < count; i++) { + const offset = i * 64; + const in1 = input.subarray(offset, offset + 32); + const in2 = input.subarray(offset + 32, offset + 64); + const out = digest64(in1, in2); + output.set(out, i * 32); + } +}; export const hasher: Hasher = { name: "noble", @@ -11,11 +28,11 @@ export const hasher: Hasher = { digest64HashObjects: (left, right, parent) => { byteArrayIntoHashObject(digest64(hashObjectToUint8Array(left), hashObjectToUint8Array(right)), 0, parent); }, - merkleizeInto(): void { - throw new Error("Not implemented"); + merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { + return doMerkleizeInto(data, padFor, output, offset, hashInto); }, - digestNLevel(): Uint8Array { - throw new Error("Not implemented"); + digestNLevel(data: Uint8Array, nLevel: number): Uint8Array { + return doDigestNLevel(data, nLevel, hashInto); }, executeHashComputations: (hashComputations) => { for (let level = hashComputations.length - 1; level >= 0; level--) { diff --git a/packages/persistent-merkle-tree/src/hasher/types.ts b/packages/persistent-merkle-tree/src/hasher/types.ts index eb7e4342..ad72cfcb 100644 --- a/packages/persistent-merkle-tree/src/hasher/types.ts +++ b/packages/persistent-merkle-tree/src/hasher/types.ts @@ -1,5 +1,5 @@ import type {HashObject} from "@chainsafe/as-sha256/lib/hashObject"; -import {HashComputation} from "../node"; +import type {HashComputation} from "../node"; export type {HashObject}; @@ -29,5 +29,5 @@ export type Hasher = { /** * Execute a batch of HashComputations */ - executeHashComputations(hashComputations: Array): void; + executeHashComputations(hashComputations: HashComputation[][]): void; }; diff --git a/packages/persistent-merkle-tree/src/index.ts b/packages/persistent-merkle-tree/src/index.ts index d3ff35a5..5ff9c0b8 100644 --- a/packages/persistent-merkle-tree/src/index.ts +++ b/packages/persistent-merkle-tree/src/index.ts @@ -6,3 +6,4 @@ export * from "./proof"; export * from "./subtree"; export * from "./tree"; export * from "./zeroNode"; +export * from "./zeroHash"; diff --git a/packages/persistent-merkle-tree/src/node.ts b/packages/persistent-merkle-tree/src/node.ts index ac9ebb7d..5d2b5bbe 100644 --- a/packages/persistent-merkle-tree/src/node.ts +++ b/packages/persistent-merkle-tree/src/node.ts @@ -1,5 +1,5 @@ import {HashObject} from "@chainsafe/as-sha256/lib/hashObject"; -import {hashObjectToUint8Array, hasher, uint8ArrayToHashObject} from "./hasher"; +import {executeHashComputations, hashObjectToUint8Array, hasher, uint8ArrayToHashObject} from "./hasher"; const TWO_POWER_32 = 2 ** 32; @@ -11,7 +11,7 @@ export type HashComputation = { export type HashComputationGroup = { // global array - byLevel: Array; + byLevel: HashComputation[][]; // offset from top offset: number; }; @@ -399,7 +399,10 @@ export function bitwiseOrNodeH(node: Node, hIndex: number, value: number): void else throw Error("hIndex > 7"); } -export function getHashComputations(node: Node, offset: number, hashCompsByLevel: Array): void { +/** + * Get HashComputations from a root node all the way to the leaf nodes. + */ +export function getHashComputations(node: Node, offset: number, hashCompsByLevel: HashComputation[][]): void { if (node.h0 === null) { const hashComputations = arrayAtIndex(hashCompsByLevel, offset); const {left, right} = node; @@ -409,15 +412,10 @@ export function getHashComputations(node: Node, offset: number, hashCompsByLevel getHashComputations(right, offset + 1, hashCompsByLevel); } - // else stop the recursion, LeafNode should have h0 -} - -// TODO - batch: move to hasher/index.ts -export function executeHashComputations(hashComputations: Array): void { - hasher.executeHashComputations(hashComputations); + // else stop the recursion, node is hashed } -export function arrayAtIndex(twoDArray: Array, index: number): T[] { +export function arrayAtIndex(twoDArray: T[][], index: number): T[] { if (twoDArray[index] === undefined) { twoDArray[index] = []; } diff --git a/packages/persistent-merkle-tree/src/subtree.ts b/packages/persistent-merkle-tree/src/subtree.ts index 561f0b1d..8708c670 100644 --- a/packages/persistent-merkle-tree/src/subtree.ts +++ b/packages/persistent-merkle-tree/src/subtree.ts @@ -39,13 +39,12 @@ export function subtreeFillToLength(bottom: Node, depth: number, length: number) * WARNING: Mutates the provided nodes array. * @param hashCompRootNode is a hacky way from ssz to set `dest` of HashComputation for BranchNodeStruct * TODO: Don't mutate the nodes array. - * TODO - batch: check consumers of this function, can we compute HashComputationGroup when deserializing ViewDU from Uint8Array? + * hashComps is an output parameter that will be filled with the hash computations if exists. */ export function subtreeFillToContents( nodes: Node[], depth: number, - hashComps: HashComputationGroup | null = null, - hashCompRootNode: Node | null = null + hashComps: HashComputationGroup | null = null ): Node { const maxLength = 2 ** depth; if (nodes.length > maxLength) { @@ -59,7 +58,6 @@ export function subtreeFillToContents( if (depth === 0) { const node = nodes[0]; if (hashComps !== null) { - // only use hashCompRootNode for >=1 nodes where we have a rebind getHashComputations(node, hashComps.offset, hashComps.byLevel); } return node; @@ -79,7 +77,7 @@ export function subtreeFillToContents( arrayAtIndex(hashComps.byLevel, offset).push({ src0: leftNode, src1: rightNode, - dest: hashCompRootNode ?? rootNode, + dest: rootNode, }); } @@ -103,8 +101,7 @@ export function subtreeFillToContents( arrayAtIndex(hashComps.byLevel, offset).push({ src0: left, src1: right, - // d = 1 means we are at root node, use hashCompRootNode if possible - dest: d === 1 ? hashCompRootNode ?? node : node, + dest: node, }); if (d === depth) { // bottom up strategy so we don't need to go down the tree except for the last level @@ -125,7 +122,6 @@ export function subtreeFillToContents( getHashComputations(left, offset + 1, hashComps.byLevel); } // no need to getHashComputations for zero node - // no need to set hashCompRootNode here arrayAtIndex(hashComps.byLevel, offset).push({src0: left, src1: right, dest: node}); } } diff --git a/packages/persistent-merkle-tree/src/zeroHash.ts b/packages/persistent-merkle-tree/src/zeroHash.ts index 536c6096..33b6097c 100644 --- a/packages/persistent-merkle-tree/src/zeroHash.ts +++ b/packages/persistent-merkle-tree/src/zeroHash.ts @@ -1,6 +1,6 @@ -// TODO - batch: deduplicate to ssz -// use as-sh256 hasher here instead of using hasher variable because this is used inside hasher itself -import {digest2Bytes32} from "@chainsafe/as-sha256"; +// use noble here instead of using hasher variable because this is used inside hasher itself +// we cache zero hashes so performance is not a concern +import {sha256} from "@noble/hashes/sha256"; // create array of "zero hashes", successively hashed zero chunks const zeroHashes = [new Uint8Array(32)]; @@ -8,7 +8,11 @@ const zeroHashes = [new Uint8Array(32)]; export function zeroHash(depth: number): Uint8Array { if (depth >= zeroHashes.length) { for (let i = zeroHashes.length; i <= depth; i++) { - zeroHashes[i] = digest2Bytes32(zeroHashes[i - 1], zeroHashes[i - 1]); + zeroHashes[i] = sha256 + .create() + .update(zeroHashes[i - 1]) + .update(zeroHashes[i - 1]) + .digest(); } } return zeroHashes[depth]; diff --git a/packages/persistent-merkle-tree/test/unit/hasher.test.ts b/packages/persistent-merkle-tree/test/unit/hasher.test.ts index dd9e3629..46f454b0 100644 --- a/packages/persistent-merkle-tree/test/unit/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/unit/hasher.test.ts @@ -70,7 +70,7 @@ describe("hashers", function () { }); describe("hasher.digestNLevel", function () { - const hashers = [hashtreeHasher, asSha256Hasher]; + const hashers = [nobleHasher, hashtreeHasher, asSha256Hasher]; for (const hasher of hashers) { const numValidators = [1, 2, 3, 4]; for (const numValidator of numValidators) { @@ -90,7 +90,7 @@ describe("hasher.digestNLevel", function () { describe("hasher.merkleizeInto", function () { const numNodes = [0, 1, 2, 3, 4, 5, 6, 7, 8]; - for (const hasher of [hashtreeHasher, asSha256Hasher]) { + for (const hasher of [nobleHasher, hashtreeHasher, asSha256Hasher]) { it (`${hasher.name} should throw error if not multiple of 64 bytes`, () => { const data = Buffer.alloc(63, 0); const output = Buffer.alloc(32); @@ -114,4 +114,3 @@ describe("hasher.merkleizeInto", function () { } }); -// TODO - batch: test more methods diff --git a/packages/ssz/src/util/merkleize.ts b/packages/ssz/src/util/merkleize.ts index 95045867..073dea5d 100644 --- a/packages/ssz/src/util/merkleize.ts +++ b/packages/ssz/src/util/merkleize.ts @@ -1,5 +1,5 @@ import {hasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/index"; -import {zeroHash} from "./zeros"; +import {zeroHash} from "@chainsafe/persistent-merkle-tree"; /** Dedicated property to cache hashTreeRoot of immutable CompositeType values */ export const symbolCachedPermanentRoot = Symbol("ssz_cached_permanent_root"); diff --git a/packages/ssz/src/util/zeros.ts b/packages/ssz/src/util/zeros.ts deleted file mode 100644 index 56a2dc95..00000000 --- a/packages/ssz/src/util/zeros.ts +++ /dev/null @@ -1,13 +0,0 @@ -import {hasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/index"; - -// create array of "zero hashes", successively hashed zero chunks -const zeroHashes = [new Uint8Array(32)]; - -export function zeroHash(depth: number): Uint8Array { - if (depth >= zeroHashes.length) { - for (let i = zeroHashes.length; i <= depth; i++) { - zeroHashes[i] = hasher.digest64(zeroHashes[i - 1], zeroHashes[i - 1]); - } - } - return zeroHashes[depth]; -} diff --git a/yarn.lock b/yarn.lock index 466aca7e..6d4c7fbe 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1276,14 +1276,6 @@ "@chainsafe/hashtree-linux-arm64-gnu" "1.0.1" "@chainsafe/hashtree-linux-x64-gnu" "1.0.1" -"@chainsafe/ssz@0.16.0": - version "0.16.0" - resolved "https://registry.yarnpkg.com/@chainsafe/ssz/-/ssz-0.16.0.tgz#262c491ac037777a16e8d8db479da2ba27539b8d" - integrity sha512-CgTDyrkbAKvrKwHxPT5rerXAHP3NB+uOvpnN9Gn8aJ/4TGOKhOboj4131bSFUZ679uPJ6pu6391cvInuOdrglw== - dependencies: - "@chainsafe/as-sha256" "^0.4.2" - "@chainsafe/persistent-merkle-tree" "^0.7.2" - "@chainsafe/ssz@^0.15.1": version "0.15.1" resolved "https://registry.yarnpkg.com/@chainsafe/ssz/-/ssz-0.15.1.tgz#008a711c3bcdc0d207cd4be15108870b0b1c60c0" From 7e0aa16e1a814d29e53d73efa2f7aa114d831d92 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Fri, 19 Jul 2024 08:44:36 +0700 Subject: [PATCH 086/113] fix: merge issue --- packages/ssz/test/unit/merkleize.test.ts | 3 +-- yarn.lock | 8 ++++++++ 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/packages/ssz/test/unit/merkleize.test.ts b/packages/ssz/test/unit/merkleize.test.ts index f728a877..d1e611b7 100644 --- a/packages/ssz/test/unit/merkleize.test.ts +++ b/packages/ssz/test/unit/merkleize.test.ts @@ -1,7 +1,6 @@ import {expect} from "chai"; import {bitLength, maxChunksToDepth, merkleize, mixInLength, nextPowerOf2} from "../../src/util/merkleize"; -import {merkleizeInto, LeafNode} from "@chainsafe/persistent-merkle-tree"; -import {zeroHash} from "../../src/util/zeros"; +import {merkleizeInto, LeafNode, zeroHash} from "@chainsafe/persistent-merkle-tree"; describe("util / merkleize / bitLength", () => { const bitLengthByIndex = [0, 1, 2, 2, 3, 3, 3, 3, 4, 4]; diff --git a/yarn.lock b/yarn.lock index 6d4c7fbe..466aca7e 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1276,6 +1276,14 @@ "@chainsafe/hashtree-linux-arm64-gnu" "1.0.1" "@chainsafe/hashtree-linux-x64-gnu" "1.0.1" +"@chainsafe/ssz@0.16.0": + version "0.16.0" + resolved "https://registry.yarnpkg.com/@chainsafe/ssz/-/ssz-0.16.0.tgz#262c491ac037777a16e8d8db479da2ba27539b8d" + integrity sha512-CgTDyrkbAKvrKwHxPT5rerXAHP3NB+uOvpnN9Gn8aJ/4TGOKhOboj4131bSFUZ679uPJ6pu6391cvInuOdrglw== + dependencies: + "@chainsafe/as-sha256" "^0.4.2" + "@chainsafe/persistent-merkle-tree" "^0.7.2" + "@chainsafe/ssz@^0.15.1": version "0.15.1" resolved "https://registry.yarnpkg.com/@chainsafe/ssz/-/ssz-0.15.1.tgz#008a711c3bcdc0d207cd4be15108870b0b1c60c0" From b6f18fb3a03d3229ecede95f2b812b2d356845ad Mon Sep 17 00:00:00 2001 From: twoeths Date: Tue, 23 Jul 2024 14:22:02 +0700 Subject: [PATCH 087/113] feat: minimal memory allocation on ViewDU hashTreeRoot (#388) --- .../src/hashComputation.ts | 213 ++++++++++++++++++ .../src/hasher/as-sha256.ts | 9 +- .../src/hasher/hashtree.ts | 25 +- .../src/hasher/index.ts | 4 +- .../src/hasher/types.ts | 4 +- packages/persistent-merkle-tree/src/index.ts | 1 + packages/persistent-merkle-tree/src/node.ts | 53 +---- .../persistent-merkle-tree/src/subtree.ts | 45 ++-- packages/persistent-merkle-tree/src/tree.ts | 58 ++--- .../test/perf/hashComputation.test.ts | 41 ++++ .../test/perf/hasher.test.ts | 5 +- .../test/unit/batchHash.test.ts | 48 ---- .../test/unit/hashComputation.test.ts | 83 +++++++ .../test/unit/hasher.test.ts | 5 +- .../test/unit/node.test.ts | 7 +- .../test/unit/subtree.test.ts | 73 ++++-- .../test/unit/tree.test.ts | 36 ++- packages/ssz/src/type/arrayBasic.ts | 11 +- packages/ssz/src/type/bitArray.ts | 6 +- packages/ssz/src/type/composite.ts | 4 +- packages/ssz/src/type/container.ts | 10 +- packages/ssz/src/type/listBasic.ts | 15 +- packages/ssz/src/type/listComposite.ts | 15 +- packages/ssz/src/type/vectorBasic.ts | 10 +- packages/ssz/src/type/vectorComposite.ts | 10 +- packages/ssz/src/view/arrayBasic.ts | 5 +- packages/ssz/src/view/arrayComposite.ts | 5 +- packages/ssz/src/viewDU/abstract.ts | 44 ++-- packages/ssz/src/viewDU/arrayBasic.ts | 32 ++- packages/ssz/src/viewDU/arrayComposite.ts | 47 ++-- packages/ssz/src/viewDU/bitArray.ts | 8 +- packages/ssz/src/viewDU/container.ts | 35 ++- packages/ssz/src/viewDU/listBasic.ts | 2 +- packages/ssz/src/viewDU/listComposite.ts | 4 +- .../phase0/viewDU/listValidator.ts | 22 +- .../ssz/test/perf/eth2/beaconState.test.ts | 85 ++----- .../ssz/test/unit/eth2/validators.test.ts | 19 +- 37 files changed, 652 insertions(+), 447 deletions(-) create mode 100644 packages/persistent-merkle-tree/src/hashComputation.ts create mode 100644 packages/persistent-merkle-tree/test/perf/hashComputation.test.ts delete mode 100644 packages/persistent-merkle-tree/test/unit/batchHash.test.ts create mode 100644 packages/persistent-merkle-tree/test/unit/hashComputation.test.ts diff --git a/packages/persistent-merkle-tree/src/hashComputation.ts b/packages/persistent-merkle-tree/src/hashComputation.ts new file mode 100644 index 00000000..d7f48a25 --- /dev/null +++ b/packages/persistent-merkle-tree/src/hashComputation.ts @@ -0,0 +1,213 @@ +import type {Node} from "./node"; + +/** + * HashComputation to be later used to compute hash of nodes from bottom up. + * This is also an item of a linked list. + * ╔═════════════════════╗ ╔══════════════════════╗ + * ║ dest ║ ║ next_dest ║ + * ║ / \ ║ ========> ║ / \ ║ + * ║ src0 src1 ║ ║ next_src0 next_src1║ + * ╚═════════════════════╝ ╚══════════════════════╝ + */ +export type HashComputation = { + src0: Node; + src1: Node; + dest: Node; + next: HashComputation | null; +}; + +/** + * Model HashComputation[] at the same level that support reusing the same memory. + * Before every run, reset() should be called. + * After every run, clean() should be called. + */ +export class HashComputationLevel { + private _length: number; + private _totalLength: number; + // use LinkedList to avoid memory allocation when the list grows + // always have a fixed head although length is 0 + private head: HashComputation; + private tail: HashComputation | null; + private pointer: HashComputation | null; + + constructor() { + this._length = 0; + this._totalLength = 0; + this.head = { + src0: null as unknown as Node, + src1: null as unknown as Node, + dest: null as unknown as Node, + next: null, + }; + this.tail = null; + this.pointer = null; + } + + get length(): number { + return this._length; + } + + get totalLength(): number { + return this._totalLength; + } + + /** + * run before every run + */ + reset(): void { + // keep this.head + this.tail = null; + this._length = 0; + // totalLength is not reset + this.pointer = null; + } + + /** + * Append a new HashComputation to tail. + * This will overwrite the existing HashComputation if it is not null, or grow the list if needed. + */ + push(src0: Node, src1: Node, dest: Node): void { + if (this.tail !== null) { + let newTail = this.tail.next; + if (newTail !== null) { + newTail.src0 = src0; + newTail.src1 = src1; + newTail.dest = dest; + } else { + // grow the list + newTail = {src0, src1, dest, next: null}; + this.tail.next = newTail; + this._totalLength++; + } + this.tail = newTail; + this._length++; + return; + } + + // first item + this.head.src0 = src0; + this.head.src1 = src1; + this.head.dest = dest; + this.tail = this.head; + this._length = 1; + if (this._totalLength === 0) { + this._totalLength = 1; + } + // else _totalLength > 0, do not set + } + + /** + * run after every run + * hashComps may still refer to the old Nodes, we should release them to avoid memory leak. + */ + clean(): void { + let hc = this.tail?.next ?? null; + while (hc !== null) { + if (hc.src0 === null) { + // we may have already cleaned it in the previous run, return early + break; + } + hc.src0 = null as unknown as Node; + hc.src1 = null as unknown as Node; + hc.dest = null as unknown as Node; + hc = hc.next; + } + } + + /** + * Implement Iterator for this class + */ + next(): IteratorResult { + if (!this.pointer || this.tail === null) { + return {done: true, value: undefined}; + } + + // never yield value beyond the tail + const value = this.pointer; + const isNull = value.src0 === null; + this.pointer = this.pointer.next; + + return isNull ? {done: true, value: undefined} : {done: false, value}; + } + + /** + * This is convenient method to consume HashComputationLevel with for-of loop + * See "next" method above for the actual implementation + */ + [Symbol.iterator](): IterableIterator { + this.pointer = this.head; + return this; + } + + /** + * Not great due to memory allocation. + * Mainly used for testing. + */ + toArray(): HashComputation[] { + const hashComps: HashComputation[] = []; + for (const hc of this) { + hashComps.push(hc); + } + return hashComps; + } + + /** + * For testing. + */ + dump(): HashComputation[] { + const hashComps: HashComputation[] = []; + let hc: HashComputation | null = null; + for (hc = this.head; hc !== null; hc = hc.next) { + hashComps.push(hc); + } + return hashComps; + } +} + +/** + * Model HashComputationLevel[] at different levels. + */ +export class HashComputationGroup { + readonly byLevel: HashComputationLevel[]; + constructor() { + this.byLevel = []; + } + + reset(): void { + for (const level of this.byLevel) { + level.reset(); + } + } + + clean(): void { + for (const level of this.byLevel) { + level.clean(); + } + } +} + +/** + * Get HashComputations from a root node all the way to the leaf nodes. + */ +export function getHashComputations(node: Node, offset: number, hcByLevel: HashComputationLevel[]): void { + if (node.h0 === null) { + const hashComputations = levelAtIndex(hcByLevel, offset); + const {left, right} = node; + hashComputations.push(left, right, node); + // leaf nodes should have h0 to stop the recursion + getHashComputations(left, offset + 1, hcByLevel); + getHashComputations(right, offset + 1, hcByLevel); + } + + // else stop the recursion, node is hashed +} + +/** + * Utility to get HashComputationLevel at a specific index. + */ +export function levelAtIndex(hcByLevel: HashComputationLevel[], index: number): HashComputationLevel { + if (hcByLevel[index] === undefined) { + hcByLevel[index] = new HashComputationLevel(); + } + return hcByLevel[index]; +} diff --git a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts index d8228251..943f9b49 100644 --- a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts +++ b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts @@ -6,7 +6,8 @@ import { hashInto, } from "@chainsafe/as-sha256"; import type {Hasher} from "./types"; -import {HashComputation, Node} from "../node"; +import {Node} from "../node"; +import type {HashComputationLevel} from "../hashComputation"; import {doDigestNLevel, doMerkleizeInto} from "./util"; export const hasher: Hasher = { @@ -19,7 +20,7 @@ export const hasher: Hasher = { digestNLevel(data: Uint8Array, nLevel: number): Uint8Array { return doDigestNLevel(data, nLevel, hashInto); }, - executeHashComputations: (hashComputations: HashComputation[][]) => { + executeHashComputations: (hashComputations: HashComputationLevel[]) => { for (let level = hashComputations.length - 1; level >= 0; level--) { const hcArr = hashComputations[level]; if (!hcArr) { @@ -46,7 +47,8 @@ export const hasher: Hasher = { let src1_3: Node | null = null; let dest3: Node | null = null; - for (const [i, hc] of hcArr.entries()) { + let i = 0; + for (const hc of hcArr) { const indexInBatch = i % 4; switch (indexInBatch) { @@ -121,6 +123,7 @@ export const hasher: Hasher = { default: throw Error(`Unexpected indexInBatch ${indexInBatch}`); } + i++; } // remaining diff --git a/packages/persistent-merkle-tree/src/hasher/hashtree.ts b/packages/persistent-merkle-tree/src/hasher/hashtree.ts index 26de5609..f578d50c 100644 --- a/packages/persistent-merkle-tree/src/hasher/hashtree.ts +++ b/packages/persistent-merkle-tree/src/hasher/hashtree.ts @@ -1,6 +1,7 @@ import {hashInto} from "@chainsafe/hashtree"; import {Hasher, HashObject} from "./types"; -import {HashComputation, Node} from "../node"; +import {Node} from "../node"; +import type {HashComputationLevel} from "../hashComputation"; import {byteArrayIntoHashObject} from "@chainsafe/as-sha256/lib/hashObject"; import {doDigestNLevel, doMerkleizeInto} from "./util"; @@ -20,6 +21,8 @@ const uint8Output = new Uint8Array(PARALLEL_FACTOR * 32); // convenient reusable Uint8Array for hash64 const hash64Input = uint8Input.subarray(0, 64); const hash64Output = uint8Output.subarray(0, 32); +// size input array to 2 HashObject per computation * 32 bytes per object +const destNodes: Node[] = new Array(PARALLEL_FACTOR); export const hasher: Hasher = { name: "hashtree", @@ -43,7 +46,7 @@ export const hasher: Hasher = { digestNLevel(data: Uint8Array, nLevel: number): Uint8Array { return doDigestNLevel(data, nLevel, hashInto); }, - executeHashComputations(hashComputations: HashComputation[][]): void { + executeHashComputations(hashComputations: HashComputationLevel[]): void { for (let level = hashComputations.length - 1; level >= 0; level--) { const hcArr = hashComputations[level]; if (!hcArr) { @@ -56,25 +59,25 @@ export const hasher: Hasher = { continue; } - // size input array to 2 HashObject per computation * 32 bytes per object - // const input: Uint8Array = Uint8Array.from(new Array(hcArr.length * 2 * 32)); - let destNodes: Node[] = []; - // hash every 16 inputs at once to avoid memory allocation - for (const [i, {src0, src1, dest}] of hcArr.entries()) { + let i = 0; + for (const {src0, src1, dest} of hcArr) { + if (!src0 || !src1 || !dest) { + throw new Error(`Invalid HashComputation at index ${i}`); + } const indexInBatch = i % PARALLEL_FACTOR; const offset = indexInBatch * 16; hashObjectToUint32Array(src0, uint32Input, offset); hashObjectToUint32Array(src1, uint32Input, offset + 8); - destNodes.push(dest); + destNodes[indexInBatch] = dest; if (indexInBatch === PARALLEL_FACTOR - 1) { hashInto(uint8Input, uint8Output); for (const [j, destNode] of destNodes.entries()) { byteArrayIntoHashObject(uint8Output, j * 32, destNode); } - destNodes = []; } + i++; } const remaining = hcArr.length % PARALLEL_FACTOR; @@ -84,8 +87,8 @@ export const hasher: Hasher = { const remainingOutput = uint8Output.subarray(0, remaining * 32); hashInto(remainingInput, remainingOutput); // destNodes was prepared above - for (const [i, destNode] of destNodes.entries()) { - byteArrayIntoHashObject(remainingOutput, i * 32, destNode); + for (let j = 0; j < remaining; j++) { + byteArrayIntoHashObject(remainingOutput, j * 32, destNodes[j]); } } } diff --git a/packages/persistent-merkle-tree/src/hasher/index.ts b/packages/persistent-merkle-tree/src/hasher/index.ts index dfa1d7e6..75442232 100644 --- a/packages/persistent-merkle-tree/src/hasher/index.ts +++ b/packages/persistent-merkle-tree/src/hasher/index.ts @@ -1,6 +1,6 @@ import {Hasher} from "./types"; import {hasher as nobleHasher} from "./noble"; -import type {HashComputation} from "../node"; +import type {HashComputationLevel} from "../hashComputation"; export * from "./types"; export * from "./util"; @@ -31,6 +31,6 @@ export function merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Arr hasher.merkleizeInto(data, padFor, output, offset); } -export function executeHashComputations(hashComputations: HashComputation[][]): void { +export function executeHashComputations(hashComputations: HashComputationLevel[]): void { hasher.executeHashComputations(hashComputations); } diff --git a/packages/persistent-merkle-tree/src/hasher/types.ts b/packages/persistent-merkle-tree/src/hasher/types.ts index ad72cfcb..9f5813f0 100644 --- a/packages/persistent-merkle-tree/src/hasher/types.ts +++ b/packages/persistent-merkle-tree/src/hasher/types.ts @@ -1,5 +1,5 @@ import type {HashObject} from "@chainsafe/as-sha256/lib/hashObject"; -import type {HashComputation} from "../node"; +import type {HashComputationLevel} from "../hashComputation"; export type {HashObject}; @@ -29,5 +29,5 @@ export type Hasher = { /** * Execute a batch of HashComputations */ - executeHashComputations(hashComputations: HashComputation[][]): void; + executeHashComputations(hashComputations: HashComputationLevel[]): void; }; diff --git a/packages/persistent-merkle-tree/src/index.ts b/packages/persistent-merkle-tree/src/index.ts index 5ff9c0b8..5311ca5a 100644 --- a/packages/persistent-merkle-tree/src/index.ts +++ b/packages/persistent-merkle-tree/src/index.ts @@ -1,6 +1,7 @@ export * from "./gindex"; export * from "./hasher"; export * from "./node"; +export * from "./hashComputation"; export * from "./packedNode"; export * from "./proof"; export * from "./subtree"; diff --git a/packages/persistent-merkle-tree/src/node.ts b/packages/persistent-merkle-tree/src/node.ts index 5d2b5bbe..5f80c667 100644 --- a/packages/persistent-merkle-tree/src/node.ts +++ b/packages/persistent-merkle-tree/src/node.ts @@ -1,21 +1,8 @@ import {HashObject} from "@chainsafe/as-sha256/lib/hashObject"; -import {executeHashComputations, hashObjectToUint8Array, hasher, uint8ArrayToHashObject} from "./hasher"; +import {hashObjectToUint8Array, hasher, uint8ArrayToHashObject} from "./hasher"; const TWO_POWER_32 = 2 ** 32; -export type HashComputation = { - src0: Node; - src1: Node; - dest: Node; -}; - -export type HashComputationGroup = { - // global array - byLevel: HashComputation[][]; - // offset from top - offset: number; -}; - /** * An immutable binary merkle tree node */ @@ -83,15 +70,6 @@ export class BranchNode extends Node { } } - batchHash(): Uint8Array { - executeHashComputations(this.hashComputations); - - if (this.h0 === null) { - throw Error("Root is not computed by batch"); - } - return this.root; - } - get rootHashObject(): HashObject { if (this.h0 === null) { hasher.digest64HashObjects(this.left.rootHashObject, this.right.rootHashObject, this); @@ -114,12 +92,6 @@ export class BranchNode extends Node { get right(): Node { return this._right; } - - get hashComputations(): HashComputation[][] { - const hashComputations: HashComputation[][] = []; - getHashComputations(this, 0, hashComputations); - return hashComputations; - } } /** @@ -398,26 +370,3 @@ export function bitwiseOrNodeH(node: Node, hIndex: number, value: number): void else if (hIndex === 7) node.h7 |= value; else throw Error("hIndex > 7"); } - -/** - * Get HashComputations from a root node all the way to the leaf nodes. - */ -export function getHashComputations(node: Node, offset: number, hashCompsByLevel: HashComputation[][]): void { - if (node.h0 === null) { - const hashComputations = arrayAtIndex(hashCompsByLevel, offset); - const {left, right} = node; - hashComputations.push({src0: left, src1: right, dest: node}); - // leaf nodes should have h0 to stop the recursion - getHashComputations(left, offset + 1, hashCompsByLevel); - getHashComputations(right, offset + 1, hashCompsByLevel); - } - - // else stop the recursion, node is hashed -} - -export function arrayAtIndex(twoDArray: T[][], index: number): T[] { - if (twoDArray[index] === undefined) { - twoDArray[index] = []; - } - return twoDArray[index]; -} diff --git a/packages/persistent-merkle-tree/src/subtree.ts b/packages/persistent-merkle-tree/src/subtree.ts index 8708c670..65ea3f51 100644 --- a/packages/persistent-merkle-tree/src/subtree.ts +++ b/packages/persistent-merkle-tree/src/subtree.ts @@ -1,4 +1,5 @@ -import {BranchNode, HashComputationGroup, Node, arrayAtIndex, getHashComputations} from "./node"; +import {BranchNode, Node} from "./node"; +import {getHashComputations, levelAtIndex, HashComputationLevel} from "./hashComputation"; import {zeroNode} from "./zeroNode"; export function subtreeFillToDepth(bottom: Node, depth: number): Node { @@ -39,12 +40,13 @@ export function subtreeFillToLength(bottom: Node, depth: number, length: number) * WARNING: Mutates the provided nodes array. * @param hashCompRootNode is a hacky way from ssz to set `dest` of HashComputation for BranchNodeStruct * TODO: Don't mutate the nodes array. - * hashComps is an output parameter that will be filled with the hash computations if exists. + * hcByLevel is an output parameter that will be filled with the hash computations if exists. */ export function subtreeFillToContents( nodes: Node[], depth: number, - hashComps: HashComputationGroup | null = null + hcOffset = 0, + hcByLevel: HashComputationLevel[] | null = null ): Node { const maxLength = 2 ** depth; if (nodes.length > maxLength) { @@ -57,8 +59,8 @@ export function subtreeFillToContents( if (depth === 0) { const node = nodes[0]; - if (hashComps !== null) { - getHashComputations(node, hashComps.offset, hashComps.byLevel); + if (hcByLevel !== null) { + getHashComputations(node, hcOffset, hcByLevel); } return node; } @@ -70,15 +72,10 @@ export function subtreeFillToContents( const rightNode = nodes.length > 1 ? nodes[1] : zeroNode(0); const rootNode = new BranchNode(leftNode, rightNode); - if (hashComps !== null) { - const offset = hashComps.offset; - getHashComputations(leftNode, offset + 1, hashComps.byLevel); - getHashComputations(rightNode, offset + 1, hashComps.byLevel); - arrayAtIndex(hashComps.byLevel, offset).push({ - src0: leftNode, - src1: rightNode, - dest: rootNode, - }); + if (hcByLevel !== null) { + getHashComputations(leftNode, hcOffset + 1, hcByLevel); + getHashComputations(rightNode, hcOffset + 1, hcByLevel); + levelAtIndex(hcByLevel, hcOffset).push(leftNode, rightNode, rootNode); } return rootNode; @@ -89,7 +86,7 @@ export function subtreeFillToContents( for (let d = depth; d > 0; d--) { const countRemainder = count % 2; const countEven = count - countRemainder; - const offset = hashComps ? hashComps.offset + d - 1 : null; + const offset = hcByLevel ? hcOffset + d - 1 : null; // For each depth level compute the new BranchNodes and overwrite the nodes array for (let i = 0; i < countEven; i += 2) { @@ -97,16 +94,12 @@ export function subtreeFillToContents( const right = nodes[i + 1]; const node = new BranchNode(left, right); nodes[i / 2] = node; - if (offset !== null && hashComps !== null) { - arrayAtIndex(hashComps.byLevel, offset).push({ - src0: left, - src1: right, - dest: node, - }); + if (offset !== null && hcByLevel !== null) { + levelAtIndex(hcByLevel, offset).push(left, right, node); if (d === depth) { // bottom up strategy so we don't need to go down the tree except for the last level - getHashComputations(left, offset + 1, hashComps.byLevel); - getHashComputations(right, offset + 1, hashComps.byLevel); + getHashComputations(left, offset + 1, hcByLevel); + getHashComputations(right, offset + 1, hcByLevel); } } } @@ -116,13 +109,13 @@ export function subtreeFillToContents( const right = zeroNode(depth - d); const node = new BranchNode(left, right); nodes[countEven / 2] = node; - if (offset !== null && hashComps !== null) { + if (offset !== null && hcByLevel !== null) { if (d === depth) { // only go down on the last level - getHashComputations(left, offset + 1, hashComps.byLevel); + getHashComputations(left, offset + 1, hcByLevel); } // no need to getHashComputations for zero node - arrayAtIndex(hashComps.byLevel, offset).push({src0: left, src1: right, dest: node}); + levelAtIndex(hcByLevel, offset).push(left, right, node); } } diff --git a/packages/persistent-merkle-tree/src/tree.ts b/packages/persistent-merkle-tree/src/tree.ts index 1ade6129..7e89be1c 100644 --- a/packages/persistent-merkle-tree/src/tree.ts +++ b/packages/persistent-merkle-tree/src/tree.ts @@ -1,6 +1,7 @@ import {zeroNode} from "./zeroNode"; import {Gindex, GindexBitstring, convertGindexToBitstring} from "./gindex"; -import {Node, LeafNode, BranchNode, HashComputationGroup, arrayAtIndex} from "./node"; +import {Node, LeafNode, BranchNode} from "./node"; +import {HashComputationLevel, levelAtIndex} from "./hashComputation"; import {createNodeFromProof, createProof, Proof, ProofInput} from "./proof"; import {createSingleProof} from "./proof/single"; @@ -73,13 +74,6 @@ export class Tree { return this.rootNode.root; } - batchHash(): Uint8Array { - if (!this.rootNode.isLeaf()) { - return (this.rootNode as BranchNode).batchHash(); - } - return this.root; - } - /** * Return a copy of the tree */ @@ -341,14 +335,15 @@ export function setNodeAtDepth(rootNode: Node, nodesDepth: number, index: number * gindex and navigate upwards creating or caching nodes as necessary. Loop and repeat. * * Supports index up to `Number.MAX_SAFE_INTEGER`. - * @param hashComps a map of HashComputation[] by level (could be from 0 to `nodesDepth - 1`) + * @param hcByLevel an array of HashComputation[] by level (could be from 0 to `nodesDepth - 1`) */ export function setNodesAtDepth( rootNode: Node, nodesDepth: number, indexes: number[], nodes: Node[], - hashComps: HashComputationGroup | null = null + hcOffset = 0, + hcByLevel: HashComputationLevel[] | null = null ): Node { // depth depthi gindexes indexes // 0 1 1 0 @@ -367,8 +362,7 @@ export function setNodesAtDepth( if (nodesDepth === 0) { return nodes.length > 0 ? nodes[0] : rootNode; } - const hashCompsByLevel = hashComps?.byLevel ?? null; - const offset = hashComps?.offset ?? 0; + const offset = hcOffset; /** * Contiguous filled stack of parent nodes. It get filled in the first descent @@ -435,33 +429,25 @@ export function setNodesAtDepth( // Next node is the very next to the right of current node if (index + 1 === indexes[i + 1]) { node = new BranchNode(nodes[i], nodes[i + 1]); - if (hashCompsByLevel != null) { + if (hcByLevel != null) { // go with level of dest node (level 0 goes with root node) // in this case dest node is nodesDept - 2, same for below - arrayAtIndex(hashCompsByLevel, nodesDepth - 1 + offset).push({ - src0: nodes[i], - src1: nodes[i + 1], - dest: node, - }); + levelAtIndex(hcByLevel, nodesDepth - 1 + offset).push(nodes[i], nodes[i + 1], node); } // Move pointer one extra forward since node has consumed two nodes i++; } else { const oldNode = node; node = new BranchNode(nodes[i], oldNode.right); - if (hashCompsByLevel != null) { - arrayAtIndex(hashCompsByLevel, nodesDepth - 1 + offset).push({ - src0: nodes[i], - src1: oldNode.right, - dest: node, - }); + if (hcByLevel != null) { + levelAtIndex(hcByLevel, nodesDepth - 1 + offset).push(nodes[i], oldNode.right, node); } } } else { const oldNode = node; node = new BranchNode(oldNode.left, nodes[i]); - if (hashCompsByLevel != null) { - arrayAtIndex(hashCompsByLevel, nodesDepth - 1 + offset).push({src0: oldNode.left, src1: nodes[i], dest: node}); + if (hcByLevel != null) { + levelAtIndex(hcByLevel, nodesDepth - 1 + offset).push(oldNode.left, nodes[i], node); } } @@ -503,12 +489,8 @@ export function setNodesAtDepth( // Also, if still has to move upwards, rebind since the node won't be visited anymore const oldNode = node; node = new BranchNode(oldNode, parentNodeStack[d].right); - if (hashCompsByLevel != null) { - arrayAtIndex(hashCompsByLevel, depth + offset).push({ - src0: oldNode, - src1: parentNodeStack[d].right, - dest: node, - }); + if (hcByLevel != null) { + levelAtIndex(hcByLevel, depth + offset).push(oldNode, parentNodeStack[d].right, node); } } else { // Only store the left node if it's at d = diffDepth @@ -521,19 +503,15 @@ export function setNodesAtDepth( if (leftNode !== undefined) { const oldNode = node; node = new BranchNode(leftNode, oldNode); - if (hashCompsByLevel != null) { - arrayAtIndex(hashCompsByLevel, depth + offset).push({src0: leftNode, src1: oldNode, dest: node}); + if (hcByLevel != null) { + levelAtIndex(hcByLevel, depth + offset).push(leftNode, oldNode, node); } leftParentNodeStack[d] = undefined; } else { const oldNode = node; node = new BranchNode(parentNodeStack[d].left, oldNode); - if (hashCompsByLevel != null) { - arrayAtIndex(hashCompsByLevel, depth + offset).push({ - src0: parentNodeStack[d].left, - src1: oldNode, - dest: node, - }); + if (hcByLevel != null) { + levelAtIndex(hcByLevel, depth + offset).push(parentNodeStack[d].left, oldNode, node); } } } diff --git a/packages/persistent-merkle-tree/test/perf/hashComputation.test.ts b/packages/persistent-merkle-tree/test/perf/hashComputation.test.ts new file mode 100644 index 00000000..38df8b62 --- /dev/null +++ b/packages/persistent-merkle-tree/test/perf/hashComputation.test.ts @@ -0,0 +1,41 @@ +import {itBench} from "@dapplion/benchmark"; +import { HashComputation, HashComputationLevel, LeafNode, zeroHash } from "../../src"; + +describe("HashComputationLevel", function () { + const src = LeafNode.fromRoot(zeroHash(0)); + const dest = LeafNode.fromRoot(zeroHash(1)); + const hashComp: HashComputation = {src0: src, src1: src, dest, next: null}; + + const length = 1_000_000; + + itBench({ + id: "HashComputationLevel.push then loop", + before: () => new HashComputationLevel(), + beforeEach: (level) => { + level.reset(); + return level; + }, + fn: (level: HashComputationLevel) => { + for (let i = 0; i < length; i++) { + level.push(src, src, dest); + } + level.clean(); + for (const hc of level) { + const {src0, src1, dest} = hc; + } + } + }); + + itBench({ + id: "HashComputation[] push then loop", + fn: () => { + const level: HashComputation[] = []; + for (let i = 0; i < length; i++) { + level.push(hashComp); + } + for (const hc of level) { + const {src0, src1, dest} = hc; + } + } + }) +}); diff --git a/packages/persistent-merkle-tree/test/perf/hasher.test.ts b/packages/persistent-merkle-tree/test/perf/hasher.test.ts index d71a0948..bb8c3ecf 100644 --- a/packages/persistent-merkle-tree/test/perf/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/perf/hasher.test.ts @@ -4,6 +4,7 @@ import {hasher as asShaHasher} from "../../src/hasher/as-sha256"; import {hasher as nobleHasher} from "../../src/hasher/noble"; import {hasher as hashtreeHasher} from "../../src/hasher/hashtree"; import {buildComparisonTrees} from "../utils/tree"; +import { HashComputationLevel, getHashComputations } from "../../src"; describe("hasher", function () { this.timeout(0); @@ -55,7 +56,9 @@ describe("hasher", function () { return tree; }, fn: (tree) => { - hasher.executeHashComputations(tree.hashComputations); + const hcByLevel: HashComputationLevel[] = []; + getHashComputations(tree, 0, hcByLevel); + hasher.executeHashComputations(hcByLevel); }, }); }); diff --git a/packages/persistent-merkle-tree/test/unit/batchHash.test.ts b/packages/persistent-merkle-tree/test/unit/batchHash.test.ts deleted file mode 100644 index cb863dc6..00000000 --- a/packages/persistent-merkle-tree/test/unit/batchHash.test.ts +++ /dev/null @@ -1,48 +0,0 @@ -import {expect} from "chai"; -import {countToDepth} from "../../src/gindex"; -import {BranchNode, LeafNode, Node} from "../../src/node"; -import {subtreeFillToContents} from "../../src/subtree"; -import {zeroNode} from "../../src/zeroNode"; - -describe("batchHash", function () { - const numNodes = [200, 201, 202, 203]; - // const numNodes = [32, 33, 64]; - for (const numNode of numNodes) { - it(`${numNode} nodes`, () => { - const rootNode = createList(numNode); - const root1 = rootNode.batchHash(); - const rootNode2 = createList(numNode); - const root2 = rootNode2.root; - expect(root2).to.be.deep.equal(root1); - - const depth = countToDepth(BigInt(numNode)) + 1; - resetNodes(rootNode, depth); - resetNodes(rootNode2, depth); - expect(rootNode.batchHash()).to.be.deep.equal(rootNode2.batchHash()); - }); - } -}); - -function resetNodes(node: Node, depth: number): void { - if (node.isLeaf()) return; - // do not reset zeroNode - if (node === zeroNode(depth)) return; - // this is to ask Node to calculate node again - node.h0 = null as unknown as number; - // in the old version, we should do - // node._root = null; - resetNodes(node.left, depth - 1); - resetNodes(node.right, depth - 1); -} - -function newLeafNodeFilled(i: number): LeafNode { - return LeafNode.fromRoot(new Uint8Array(Array.from({length: 32}, () => i % 256))); -} - -function createList(numNode: number): BranchNode { - const nodes = Array.from({length: numNode}, (_, i) => newLeafNodeFilled(i)); - // add 1 to countToDepth for mix_in_length spec - const depth = countToDepth(BigInt(numNode)) + 1; - const node = subtreeFillToContents(nodes, depth); - return node as BranchNode; -} diff --git a/packages/persistent-merkle-tree/test/unit/hashComputation.test.ts b/packages/persistent-merkle-tree/test/unit/hashComputation.test.ts new file mode 100644 index 00000000..da2a5150 --- /dev/null +++ b/packages/persistent-merkle-tree/test/unit/hashComputation.test.ts @@ -0,0 +1,83 @@ +import { expect } from "chai"; +import { zeroNode, Node } from "../../src"; +import {HashComputationLevel} from "../../src/hashComputation"; + +describe("HashComputationLevel", () => { + let hashComputationLevel: HashComputationLevel; + + beforeEach(() => { + hashComputationLevel = new HashComputationLevel(); + hashComputationLevel.push(zeroNode(0), zeroNode(0), zeroNode(1)); + }); + + it("should reset", () => { + hashComputationLevel.reset(); + expect(hashComputationLevel.length).to.be.equal(0); + expect(hashComputationLevel.totalLength).to.be.equal(1); + expect(toArray(hashComputationLevel)).to.be.deep.equal([]); + }); + + it("should push", () => { + hashComputationLevel.push(zeroNode(0), zeroNode(0), zeroNode(1)); + expect(hashComputationLevel.length).to.be.equal(2); + expect(hashComputationLevel.totalLength).to.be.equal(2); + const arr = toArray(hashComputationLevel); + expect(arr.length).to.be.equal(2); + expect(arr).to.be.deep.equal([ + {src0: zeroNode(0), src1: zeroNode(0), dest: zeroNode(1)}, + {src0: zeroNode(0), src1: zeroNode(0), dest: zeroNode(1)} + ]); + }); + + it("reset then push full", () => { + hashComputationLevel.push(zeroNode(0), zeroNode(0), zeroNode(1)); + hashComputationLevel.reset(); + hashComputationLevel.push(zeroNode(0), zeroNode(0), zeroNode(1)); + hashComputationLevel.push(zeroNode(0), zeroNode(0), zeroNode(1)); + hashComputationLevel.clean(); + expect(hashComputationLevel.length).to.be.equal(2); + expect(hashComputationLevel.totalLength).to.be.equal(2); + const arr = toArray(hashComputationLevel); + expect(arr).to.be.deep.equal([ + {src0: zeroNode(0), src1: zeroNode(0), dest: zeroNode(1)}, + {src0: zeroNode(0), src1: zeroNode(0), dest: zeroNode(1)} + ]); + }); + + it("reset then push partial", () => { + hashComputationLevel.push(zeroNode(0), zeroNode(0), zeroNode(1)); + // totalLength = 2 now + hashComputationLevel.reset(); + hashComputationLevel.push(zeroNode(0), zeroNode(0), zeroNode(1)); + hashComputationLevel.clean(); + expect(hashComputationLevel.length).to.be.equal(1); + expect(hashComputationLevel.totalLength).to.be.equal(2); + const arr = toArray(hashComputationLevel); + expect(arr).to.be.deep.equal([ + {src0: zeroNode(0), src1: zeroNode(0), dest: zeroNode(1)}, + ]); + }); + + it("clean", () => { + hashComputationLevel.push(zeroNode(0), zeroNode(0), zeroNode(1)); + hashComputationLevel.reset(); + hashComputationLevel.push(zeroNode(0), zeroNode(0), zeroNode(1)); + hashComputationLevel.clean(); + expect(hashComputationLevel.length).to.be.equal(1); + expect(hashComputationLevel.totalLength).to.be.equal(2); + const arr = toArray(hashComputationLevel); + expect(arr).to.be.deep.equal([ + {src0: zeroNode(0), src1: zeroNode(0), dest: zeroNode(1)}, + ]); + const all = hashComputationLevel.dump(); + const last = all[all.length - 1]; + expect(last.src0).to.be.null; + expect(last.src1).to.be.null; + expect(last.dest).to.be.null; + }); + +}); + +function toArray(hc: HashComputationLevel): {src0: Node; src1: Node; dest: Node}[] { + return hc.toArray().map(({src0, src1, dest}) => ({src0, src1, dest})); +} diff --git a/packages/persistent-merkle-tree/test/unit/hasher.test.ts b/packages/persistent-merkle-tree/test/unit/hasher.test.ts index 46f454b0..ee129fd0 100644 --- a/packages/persistent-merkle-tree/test/unit/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/unit/hasher.test.ts @@ -5,7 +5,7 @@ import {hasher as asSha256Hasher} from "../../src/hasher/as-sha256"; import {hasher as hashtreeHasher} from "../../src/hasher/hashtree"; import {linspace} from "../utils/misc"; import {buildComparisonTrees} from "../utils/tree"; -import {HashObject, LeafNode, subtreeFillToContents} from "../../src"; +import {HashComputationLevel, HashObject, LeafNode, getHashComputations, subtreeFillToContents} from "../../src"; import { expect } from "chai"; import { zeroHash } from "../../src/zeroHash"; @@ -61,7 +61,8 @@ describe("hashers", function () { for (const hasher of hashers) { it(hasher.name, () => { const [tree1, tree2] = buildComparisonTrees(8); - const hashComputations = tree2.hashComputations; + const hashComputations: HashComputationLevel[] = []; + getHashComputations(tree1, 0, hashComputations); hasher.executeHashComputations(hashComputations); expectEqualHex(tree1.root, tree2.root); }); diff --git a/packages/persistent-merkle-tree/test/unit/node.test.ts b/packages/persistent-merkle-tree/test/unit/node.test.ts index bbe8d7be..1fbcb54e 100644 --- a/packages/persistent-merkle-tree/test/unit/node.test.ts +++ b/packages/persistent-merkle-tree/test/unit/node.test.ts @@ -1,6 +1,7 @@ import {HashObject} from "@chainsafe/as-sha256"; import {expect} from "chai"; -import {BranchNode, HashComputation, LeafNode, countToDepth, getHashComputations, subtreeFillToContents} from "../../src"; +import {BranchNode, LeafNode, countToDepth, subtreeFillToContents} from "../../src"; +import {HashComputation, HashComputationLevel, getHashComputations} from "../../src/hashComputation"; describe("LeafNode uint", () => { const testCasesNode: { @@ -222,7 +223,7 @@ describe("getHashComputations", () => { const nodes = Array.from({length: numNode}, (_, i) => newLeafNodeFilled(i)); const depth = countToDepth(BigInt(numNode)); const rootNode = subtreeFillToContents(nodes, depth); - const hashComputations: HashComputation[][] = []; + const hashComputations: HashComputationLevel[] = []; getHashComputations(rootNode, 0, hashComputations); expect(hashComputations.length).to.equal(expectedLengths.length); for (let i = 0; i < hashComputations.length; i++) { @@ -252,7 +253,7 @@ describe("getHashComputations", () => { for (const {numNode, expectedLengths} of testCases2) { it(`list with ${numNode} nodes`, () => { const rootNode = createList(numNode); - const hashComputations: HashComputation[][] = []; + const hashComputations: HashComputationLevel[] = []; getHashComputations(rootNode, 0, hashComputations); expect(hashComputations.length).to.equal(expectedLengths.length); for (let i = 0; i < hashComputations.length; i++) { diff --git a/packages/persistent-merkle-tree/test/unit/subtree.test.ts b/packages/persistent-merkle-tree/test/unit/subtree.test.ts index 880838d7..d46f7ad6 100644 --- a/packages/persistent-merkle-tree/test/unit/subtree.test.ts +++ b/packages/persistent-merkle-tree/test/unit/subtree.test.ts @@ -1,5 +1,5 @@ import { expect } from "chai"; -import {subtreeFillToContents, LeafNode, getNodesAtDepth, executeHashComputations, BranchNode, Node} from "../../src"; +import {subtreeFillToContents, LeafNode, getNodesAtDepth, executeHashComputations, BranchNode, Node, HashComputationLevel, zeroNode} from "../../src"; describe("subtreeFillToContents", function () { // the hash computation takes time @@ -39,12 +39,8 @@ describe("subtreeFillToContents", function () { expectedNodes[i] = node; } - const hashComps = { - offset: 0, - byLevel: [], - }; - - const node = subtreeFillToContents(nodes, depth, hashComps); + const hcByLevel: HashComputationLevel[] = []; + const node = subtreeFillToContents(nodes, depth, 0, hcByLevel); const retrievedNodes = getNodesAtDepth(node, depth, 0, count); // Assert correct @@ -53,7 +49,7 @@ describe("subtreeFillToContents", function () { throw Error(`Wrong node at index ${i}`); } } - executeHashComputations(hashComps.byLevel); + executeHashComputations(hcByLevel); if (node.h0 === null) { throw Error("Root node h0 is null"); } @@ -62,7 +58,46 @@ describe("subtreeFillToContents", function () { } }); -describe("subtreeFillToContents - validator nodes", function () { +describe("subtreeFillToContents with hcByLevel", function () { + + it("depth = 0", () => { + // return zeroNode, no hash computations + const nodes = [LeafNode.fromZero()]; + const hcByLevel: HashComputationLevel[] = []; + subtreeFillToContents(nodes, 0, 0, hcByLevel); + expect(hcByLevel.length).to.equal(0); + }); + + it("depth = 1, bottom nodes are leaf nodes", () => { + // return BranchNode, hash computations + const nodes = [LeafNode.fromZero(), LeafNode.fromZero()]; + const hcByLevel: HashComputationLevel[] = []; + const node = subtreeFillToContents(nodes, 1, 0, hcByLevel); + expect(hcByLevel.length).to.equal(1); + expect(hcByLevel[0].length).to.equal(1); + executeHashComputations(hcByLevel); + if (node.h0 === null) { + throw Error("Root node h0 is null"); + } + expect(node.root).to.deep.equal(zeroNode(1).root); + }); + + it("depth = 1, bottom nodes are branch nodes", () => { + const node0 = new BranchNode(LeafNode.fromZero(), LeafNode.fromZero()); + const node1 = new BranchNode(LeafNode.fromZero(), LeafNode.fromZero()); + const nodes = [node0, node1]; + const hcByLevel: HashComputationLevel[] = []; + const node = subtreeFillToContents(nodes, 1, 0, hcByLevel); + expect(hcByLevel.length).to.equal(2); + expect(hcByLevel[0].length).to.equal(1); + expect(hcByLevel[1].length).to.equal(2); + executeHashComputations(hcByLevel); + if (node.h0 === null) { + throw Error("Root node h0 is null"); + } + expect(node.root).to.deep.equal(zeroNode(2).root); + }); + /** * 0 root * / \ @@ -90,20 +125,18 @@ describe("subtreeFillToContents - validator nodes", function () { // maxChunksToDepth in ssz returns 3 for 8 nodes const depth = 3; const root0 = subtreeFillToContents(nodesArr[0], depth); - const hashComps = { - offset: 0, - byLevel: new Array<[]>(), - }; - const node = subtreeFillToContents(nodesArr[1], depth, hashComps); - expect(hashComps.byLevel.length).to.equal(4); - expect(hashComps.byLevel[0].length).to.equal(1); - expect(hashComps.byLevel[1].length).to.equal(2); - expect(hashComps.byLevel[2].length).to.equal(4); - expect(hashComps.byLevel[3].length).to.equal(1); - executeHashComputations(hashComps.byLevel); + const hcByLevel: HashComputationLevel[] = []; + const node = subtreeFillToContents(nodesArr[1], depth, 0, hcByLevel); + expect(hcByLevel.length).to.equal(4); + expect(hcByLevel[0].length).to.equal(1); + expect(hcByLevel[1].length).to.equal(2); + expect(hcByLevel[2].length).to.equal(4); + expect(hcByLevel[3].length).to.equal(1); + executeHashComputations(hcByLevel); if (node.h0 === null) { throw Error("Root node h0 is null"); } + // node.root is computed in batch, root0.root is computed in a single call expect(node.root).to.deep.equal(root0.root); }); }); diff --git a/packages/persistent-merkle-tree/test/unit/tree.test.ts b/packages/persistent-merkle-tree/test/unit/tree.test.ts index a885466d..092ee122 100644 --- a/packages/persistent-merkle-tree/test/unit/tree.test.ts +++ b/packages/persistent-merkle-tree/test/unit/tree.test.ts @@ -13,6 +13,8 @@ import { BranchNode, HashComputation, getHashComputations, + HashComputationLevel, + executeHashComputations, } from "../../src"; describe("fixed-depth tree iteration", () => { @@ -59,7 +61,8 @@ describe("batchHash() vs root getter", () => { const depth = Math.ceil(Math.log2(length)); const tree = new Tree(subtreeFillToContents([...leaves], depth)); const tree2 = new Tree(subtreeFillToContents([...leaves], depth)); - expect(tree.batchHash()).to.be.deep.equal(tree2.root); + batchHash(tree.rootNode); + expect(tree.root).to.be.deep.equal(tree2.root); }); } }); @@ -124,7 +127,8 @@ describe("Tree.setNode vs Tree.setHashObjectFn", () => { tree2.setNodeWithFn(BigInt(18), getNewNodeFn); tree2.setNodeWithFn(BigInt(46), getNewNodeFn); tree2.setNodeWithFn(BigInt(60), getNewNodeFn); - expect(toHex((tree2.rootNode as BranchNode).batchHash())).to.equal("02607e58782c912e2f96f4ff9daf494d0d115e7c37e8c2b7ddce17213591151b"); + batchHash(tree2.rootNode); + expect(toHex(tree2.root)).to.equal("02607e58782c912e2f96f4ff9daf494d0d115e7c37e8c2b7ddce17213591151b"); }); it("Should throw for gindex 0", () => { @@ -164,7 +168,7 @@ describe("Tree batch setNodes", () => { const treeOk = new Tree(zeroNode(depth)); // cache all roots treeOk.root; - const hashComputationsOk: Array = Array.from({length: depth}, () => []); + const hashComputationsOk: Array = Array.from({length: depth}, () => new HashComputationLevel()); const tree = new Tree(zeroNode(depth)); tree.root; const gindexesBigint = gindexes.map((gindex) => BigInt(gindex)); @@ -183,14 +187,15 @@ describe("Tree batch setNodes", () => { it(`${id} - setNodesAtDepth()`, () => { const chunksNode = tree.rootNode; - const hashComputations: Array = Array.from({length: depth}, () => []); + const hcByLevel: HashComputationLevel[] = []; const newChunksNode = setNodesAtDepth( chunksNode, depth, indexes, gindexes.map((nodeValue) => LeafNode.fromRoot(Buffer.alloc(32, nodeValue))), // TODO: more test cases with positive offset? - {byLevel: hashComputations, offset: 0} + 0, + hcByLevel ); tree.rootNode = newChunksNode; const roots = getTreeRoots(tree, maxGindex); @@ -201,10 +206,12 @@ describe("Tree batch setNodes", () => { // TODO: need sort? // TODO: confirm all nodes in HashComputation are populated with HashObjects, h0 !== null for (let i = depth - 1; i >= 0; i--) { - expect(hashComputations[i].length).to.be.equal(hashComputationsOk[i].length, `incorrect length at depth ${i}`); - for (let j = 0; j < hashComputations[i].length; j++) { - const hcOk = hashComputationsOk[i][j]; - const hc = hashComputations[i][j]; + const hcArr = hcByLevel[i].toArray(); + const hcOkArr = hashComputationsOk[i].toArray(); + expect(hcArr.length).to.be.equal(hcOkArr.length, `incorrect length at depth ${i}`); + for (let j = 0; j < hcArr.length; j++) { + const hcOk = hcOkArr[j]; + const hc = hcArr[j]; expect(hc.src0.root).to.be.deep.equal(hcOk.src0.root); expect(hc.src1.root).to.be.deep.equal(hcOk.src1.root); expect(hc.dest.root).to.be.deep.equal(hcOk.dest.root); @@ -287,3 +294,14 @@ function toHex(bytes: Buffer | Uint8Array): string { return Buffer.from(bytes).toString("hex"); } +/** + * This is only a test utility function, don't want to use it in production because it allocates memory every time. + */ +function batchHash(node: Node): void { + const hashComputations: HashComputationLevel[] = []; + getHashComputations(node, 0, hashComputations); + executeHashComputations(hashComputations); + if (node.h0 === null) { + throw Error("Root node h0 is null"); + } +} diff --git a/packages/ssz/src/type/arrayBasic.ts b/packages/ssz/src/type/arrayBasic.ts index c10161fe..4119c26b 100644 --- a/packages/ssz/src/type/arrayBasic.ts +++ b/packages/ssz/src/type/arrayBasic.ts @@ -1,12 +1,12 @@ import { BranchNode, - HashComputationGroup, LeafNode, Node, getNodesAtDepth, packedNodeRootsToBytes, packedRootsBytesToNode, - arrayAtIndex, + levelAtIndex, + HashComputationLevel, } from "@chainsafe/persistent-merkle-tree"; import {Type, ValueOf, ByteViews} from "./abstract"; import {BasicType} from "./basic"; @@ -45,7 +45,8 @@ export function setChunksNode( rootNode: Node, chunksNode: Node, newLength: number | null, - hashComps: HashComputationGroup | null + hcOffset = 0, + hcByLevel: HashComputationLevel[] | null = null ): Node { const lengthNode = newLength !== null @@ -54,8 +55,8 @@ export function setChunksNode( : // else re-use existing node (rootNode.right as LeafNode); const branchNode = new BranchNode(chunksNode, lengthNode); - if (hashComps !== null) { - arrayAtIndex(hashComps.byLevel, hashComps.offset).push({src0: chunksNode, src1: lengthNode, dest: branchNode}); + if (hcByLevel !== null) { + levelAtIndex(hcByLevel, hcOffset).push(chunksNode, lengthNode, branchNode); } return branchNode; } diff --git a/packages/ssz/src/type/bitArray.ts b/packages/ssz/src/type/bitArray.ts index 65c76b4f..0a65d11b 100644 --- a/packages/ssz/src/type/bitArray.ts +++ b/packages/ssz/src/type/bitArray.ts @@ -1,4 +1,4 @@ -import {concatGindices, Gindex, HashComputationGroup, Node, toGindex, Tree} from "@chainsafe/persistent-merkle-tree"; +import {concatGindices, Gindex, HashComputationLevel, Node, toGindex, Tree} from "@chainsafe/persistent-merkle-tree"; import {fromHexString, toHexString, byteArrayEquals} from "../util/byteArray"; import {CompositeType, LENGTH_GINDEX} from "./composite"; import {BitArray} from "../value/bitArray"; @@ -29,8 +29,8 @@ export abstract class BitArrayType extends CompositeType extends Type { /** INTERNAL METHOD: Given a Tree View, returns a `Node` with all its updated data */ abstract commitView(view: TV): Node; /** INTERNAL METHOD: Given a Deferred Update Tree View returns a `Node` with all its updated data */ - abstract commitViewDU(view: TVDU, hashComps?: HashComputationGroup | null): Node; + abstract commitViewDU(view: TVDU, hcOffset?: number, hcByLevel?: HashComputationLevel[] | null): Node; /** INTERNAL METHOD: Return the cache of a Deferred Update Tree View. May return `undefined` if this ViewDU has no cache */ abstract cacheOfViewDU(view: TVDU): unknown; diff --git a/packages/ssz/src/type/container.ts b/packages/ssz/src/type/container.ts index 67d6afb3..1ed46a89 100644 --- a/packages/ssz/src/type/container.ts +++ b/packages/ssz/src/type/container.ts @@ -7,7 +7,7 @@ import { toGindex, concatGindices, getNode, - HashComputationGroup, + HashComputationLevel, } from "@chainsafe/persistent-merkle-tree"; import {maxChunksToDepth} from "../util/merkleize"; import {Require} from "../util/types"; @@ -166,8 +166,12 @@ export class ContainerType>> extends return view.node; } - commitViewDU(view: ContainerTreeViewDUType, hashComps: HashComputationGroup | null = null): Node { - view.commit(hashComps); + commitViewDU( + view: ContainerTreeViewDUType, + hcOffset = 0, + hcByLevel: HashComputationLevel[] | null = null + ): Node { + view.commit(hcOffset, hcByLevel); return view.node; } diff --git a/packages/ssz/src/type/listBasic.ts b/packages/ssz/src/type/listBasic.ts index ed4e24c9..d509c065 100644 --- a/packages/ssz/src/type/listBasic.ts +++ b/packages/ssz/src/type/listBasic.ts @@ -1,4 +1,4 @@ -import {HashComputationGroup, LeafNode, Node, Tree, merkleizeInto} from "@chainsafe/persistent-merkle-tree"; +import {HashComputationLevel, LeafNode, Node, Tree, merkleizeInto} from "@chainsafe/persistent-merkle-tree"; import {ValueOf} from "./abstract"; import {BasicType} from "./basic"; import {ByteViews} from "./composite"; @@ -93,8 +93,12 @@ export class ListBasicType> return view.node; } - commitViewDU(view: ListBasicTreeViewDU, hashComps: HashComputationGroup | null = null): Node { - view.commit(hashComps); + commitViewDU( + view: ListBasicTreeViewDU, + hcOffset = 0, + hcByLevel: HashComputationLevel[] | null = null + ): Node { + view.commit(hcOffset, hcByLevel); return view.node; } @@ -153,9 +157,10 @@ export class ListBasicType> rootNode: Node, chunksNode: Node, newLength: number | null, - hashComps: HashComputationGroup | null + hcOffset = 0, + hcByLevel: HashComputationLevel[] | null = null ): Node { - return setChunksNode(rootNode, chunksNode, newLength, hashComps); + return setChunksNode(rootNode, chunksNode, newLength, hcOffset, hcByLevel); } // Merkleization diff --git a/packages/ssz/src/type/listComposite.ts b/packages/ssz/src/type/listComposite.ts index f74a3c88..14a90821 100644 --- a/packages/ssz/src/type/listComposite.ts +++ b/packages/ssz/src/type/listComposite.ts @@ -1,4 +1,4 @@ -import {HashComputationGroup, Node, Tree, merkleizeInto} from "@chainsafe/persistent-merkle-tree"; +import {HashComputationLevel, Node, Tree, merkleizeInto} from "@chainsafe/persistent-merkle-tree"; import {maxChunksToDepth, symbolCachedPermanentRoot, ValueWithCachedPermanentRoot} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; @@ -98,8 +98,12 @@ export class ListCompositeType< return view.node; } - commitViewDU(view: ListCompositeTreeViewDU, hashComps: HashComputationGroup | null = null): Node { - view.commit(hashComps); + commitViewDU( + view: ListCompositeTreeViewDU, + hcOffset = 0, + hcByLevel: HashComputationLevel[] | null = null + ): Node { + view.commit(hcOffset, hcByLevel); return view.node; } @@ -160,9 +164,10 @@ export class ListCompositeType< rootNode: Node, chunksNode: Node, newLength: number | null, - hashComps: HashComputationGroup | null + hcOffset = 0, + hcByLevel: HashComputationLevel[] | null = null ): Node { - return setChunksNode(rootNode, chunksNode, newLength, hashComps); + return setChunksNode(rootNode, chunksNode, newLength, hcOffset, hcByLevel); } // Merkleization diff --git a/packages/ssz/src/type/vectorBasic.ts b/packages/ssz/src/type/vectorBasic.ts index 1b86ad03..bb189044 100644 --- a/packages/ssz/src/type/vectorBasic.ts +++ b/packages/ssz/src/type/vectorBasic.ts @@ -1,4 +1,4 @@ -import {HashComputationGroup, Node, Tree} from "@chainsafe/persistent-merkle-tree"; +import {HashComputationLevel, Node, Tree} from "@chainsafe/persistent-merkle-tree"; import {maxChunksToDepth} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; @@ -87,8 +87,12 @@ export class VectorBasicType> return view.node; } - commitViewDU(view: ArrayBasicTreeViewDU, hashComps: HashComputationGroup | null = null): Node { - view.commit(hashComps); + commitViewDU( + view: ArrayBasicTreeViewDU, + hcOffset = 0, + hcByLevel: HashComputationLevel[] | null = null + ): Node { + view.commit(hcOffset, hcByLevel); return view.node; } diff --git a/packages/ssz/src/type/vectorComposite.ts b/packages/ssz/src/type/vectorComposite.ts index af5b1d1f..73e17373 100644 --- a/packages/ssz/src/type/vectorComposite.ts +++ b/packages/ssz/src/type/vectorComposite.ts @@ -1,4 +1,4 @@ -import {HashComputationGroup, Node, Tree} from "@chainsafe/persistent-merkle-tree"; +import {HashComputationLevel, Node, Tree} from "@chainsafe/persistent-merkle-tree"; import {maxChunksToDepth} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; @@ -94,8 +94,12 @@ export class VectorCompositeType< return view.node; } - commitViewDU(view: ArrayCompositeTreeViewDU, hashComps: HashComputationGroup | null = null): Node { - view.commit(hashComps); + commitViewDU( + view: ArrayCompositeTreeViewDU, + hcOffset = 0, + hcByLevel: HashComputationLevel[] | null = null + ): Node { + view.commit(hcOffset, hcByLevel); return view.node; } diff --git a/packages/ssz/src/view/arrayBasic.ts b/packages/ssz/src/view/arrayBasic.ts index d131dc6a..5997e14c 100644 --- a/packages/ssz/src/view/arrayBasic.ts +++ b/packages/ssz/src/view/arrayBasic.ts @@ -1,4 +1,4 @@ -import {getNodesAtDepth, HashComputationGroup, LeafNode, Node, Tree} from "@chainsafe/persistent-merkle-tree"; +import {getNodesAtDepth, HashComputationLevel, LeafNode, Node, Tree} from "@chainsafe/persistent-merkle-tree"; import {ValueOf} from "../type/abstract"; import {BasicType} from "../type/basic"; import {CompositeType} from "../type/composite"; @@ -28,7 +28,8 @@ export type ArrayBasicType> = CompositeTy rootNode: Node, chunksNode: Node, newLength: number | null, - hashComps: HashComputationGroup | null + hcOffset?: number, + hcByLevel?: HashComputationLevel[] | null ): Node; }; diff --git a/packages/ssz/src/view/arrayComposite.ts b/packages/ssz/src/view/arrayComposite.ts index bbcf899d..0e9ae75a 100644 --- a/packages/ssz/src/view/arrayComposite.ts +++ b/packages/ssz/src/view/arrayComposite.ts @@ -1,4 +1,4 @@ -import {getNodesAtDepth, HashComputationGroup, Node, toGindexBitstring, Tree} from "@chainsafe/persistent-merkle-tree"; +import {getNodesAtDepth, HashComputationLevel, Node, toGindexBitstring, Tree} from "@chainsafe/persistent-merkle-tree"; import {ValueOf} from "../type/abstract"; import {CompositeType, CompositeView, CompositeViewDU} from "../type/composite"; import {TreeView} from "./abstract"; @@ -23,7 +23,8 @@ export type ArrayCompositeType< rootNode: Node, chunksNode: Node, newLength: number | null, - hashComps: HashComputationGroup | null + hcOffset?: number, + hcByLevel?: HashComputationLevel[] | null ): Node; }; diff --git a/packages/ssz/src/viewDU/abstract.ts b/packages/ssz/src/viewDU/abstract.ts index 90190b0a..08bd8551 100644 --- a/packages/ssz/src/viewDU/abstract.ts +++ b/packages/ssz/src/viewDU/abstract.ts @@ -1,4 +1,4 @@ -import {HashComputationGroup, executeHashComputations} from "@chainsafe/persistent-merkle-tree"; +import {HashComputationLevel, executeHashComputations, HashComputationGroup} from "@chainsafe/persistent-merkle-tree"; import {ByteViews, CompositeType} from "../type/composite"; import {TreeView} from "../view/abstract"; @@ -6,11 +6,6 @@ import {TreeView} from "../view/abstract"; * Always allocating a new HashComputationGroup for each hashTreeRoot() is not great for gc * because a lot of ViewDUs are not changed and computed root already. */ -let nextHashComps: HashComputationGroup = { - byLevel: [], - offset: 0, -}; - const symbolCachedTreeRoot = Symbol("ssz_cached_tree_root"); export type NodeWithCachedTreeRoot = { @@ -35,7 +30,7 @@ export abstract class TreeViewDU 0 || nextHashComps.offset !== 0) { - // preallocate for the next time - nextHashComps = { - byLevel: [], - offset: 0, - }; - executeHashComputations(hashComps.byLevel); - // This makes sure the root node is computed by batch - if (this.node.h0 === null) { - throw Error("Root is not computed by batch"); - } - } - + const offset = 0; + hcGroup.reset(); + this.commit(offset, hcGroup.byLevel); + hcGroup.clean(); const cachedRoot = (this.node as NodeWithCachedTreeRoot)[symbolCachedTreeRoot]; if (cachedRoot) { return cachedRoot; - } else { - const root = this.node.root; - (this.node as NodeWithCachedTreeRoot)[symbolCachedTreeRoot] = root; - return root; } + executeHashComputations(hcGroup.byLevel); + // This makes sure the root node is computed by batch + if (this.node.h0 === null) { + throw Error("Root is not computed by batch"); + } + + const root = this.node.root; + (this.node as NodeWithCachedTreeRoot)[symbolCachedTreeRoot] = root; + return root; } /** diff --git a/packages/ssz/src/viewDU/arrayBasic.ts b/packages/ssz/src/viewDU/arrayBasic.ts index 6899174e..0464b26c 100644 --- a/packages/ssz/src/viewDU/arrayBasic.ts +++ b/packages/ssz/src/viewDU/arrayBasic.ts @@ -2,7 +2,7 @@ import { getHashComputations, getNodeAtDepth, getNodesAtDepth, - HashComputationGroup, + HashComputationLevel, LeafNode, Node, setNodesAtDepth, @@ -163,15 +163,15 @@ export class ArrayBasicTreeViewDU> extend } /** - * When we need to compute HashComputations (hashComps != null): - * - if old _rootNode is hashed, then only need to put pending changes to HashComputationGroup - * - if old _rootNode is not hashed, need to traverse and put to HashComputationGroup + * When we need to compute HashComputations (hcByLevel != null): + * - if old _rootNode is hashed, then only need to put pending changes to hcByLevel + * - if old _rootNode is not hashed, need to traverse and put to hcByLevel */ - commit(hashComps: HashComputationGroup | null = null): void { + commit(hcOffset = 0, hcByLevel: HashComputationLevel[] | null = null): void { const isOldRootHashed = this._rootNode.h0 !== null; if (this.nodesChanged.size === 0) { - if (!isOldRootHashed && hashComps !== null) { - getHashComputations(this._rootNode, hashComps.offset, hashComps.byLevel); + if (!isOldRootHashed && hcByLevel !== null) { + getHashComputations(this._rootNode, hcOffset, hcByLevel); } return; } @@ -184,24 +184,20 @@ export class ArrayBasicTreeViewDU> extend } const chunksNode = this.type.tree_getChunksNode(this._rootNode); - const hashCompsThis = - hashComps != null && isOldRootHashed - ? { - byLevel: hashComps.byLevel, - offset: hashComps.offset + this.type.tree_chunksNodeOffset(), - } - : null; - const newChunksNode = setNodesAtDepth(chunksNode, this.type.chunkDepth, indexes, nodes, hashCompsThis); + const offsetThis = hcOffset + this.type.tree_chunksNodeOffset(); + const byLevelThis = hcByLevel != null && isOldRootHashed ? hcByLevel : null; + const newChunksNode = setNodesAtDepth(chunksNode, this.type.chunkDepth, indexes, nodes, offsetThis, byLevelThis); this._rootNode = this.type.tree_setChunksNode( this._rootNode, newChunksNode, this.dirtyLength ? this._length : null, - isOldRootHashed ? hashComps : null + hcOffset, + isOldRootHashed ? hcByLevel : null ); - if (!isOldRootHashed && hashComps !== null) { - getHashComputations(this._rootNode, hashComps.offset, hashComps.byLevel); + if (!isOldRootHashed && hcByLevel !== null) { + getHashComputations(this._rootNode, hcOffset, hcByLevel); } this.nodesChanged.clear(); diff --git a/packages/ssz/src/viewDU/arrayComposite.ts b/packages/ssz/src/viewDU/arrayComposite.ts index 3e854aa3..4c429f18 100644 --- a/packages/ssz/src/viewDU/arrayComposite.ts +++ b/packages/ssz/src/viewDU/arrayComposite.ts @@ -2,7 +2,7 @@ import { getHashComputations, getNodeAtDepth, getNodesAtDepth, - HashComputationGroup, + HashComputationLevel, Node, setNodesAtDepth, } from "@chainsafe/persistent-merkle-tree"; @@ -177,28 +177,23 @@ export class ArrayCompositeTreeViewDU< } /** - * When we need to compute HashComputations (hashComps != null): - * - if old _rootNode is hashed, then only need to put pending changes to HashComputationGroup - * - if old _rootNode is not hashed, need to traverse and put to HashComputationGroup + * When we need to compute HashComputations (hcByLevel != null): + * - if old _rootNode is hashed, then only need to put pending changes to hcByLevel + * - if old _rootNode is not hashed, need to traverse and put to hcByLevel */ - commit(hashComps: HashComputationGroup | null = null): void { + commit(hcOffset = 0, hcByLevel: HashComputationLevel[] | null = null): void { const isOldRootHashed = this._rootNode.h0 !== null; if (this.viewsChanged.size === 0) { - if (!isOldRootHashed && hashComps !== null) { - getHashComputations(this._rootNode, hashComps.offset, hashComps.byLevel); + if (!isOldRootHashed && hcByLevel !== null) { + getHashComputations(this._rootNode, hcOffset, hcByLevel); } return; } - // each view may mutate HashComputationGroup at offset + depth - const hashCompsView = - hashComps != null && isOldRootHashed - ? { - byLevel: hashComps.byLevel, - // Depth includes the extra level for the length node - offset: hashComps.offset + this.type.depth, - } - : null; + // each view may mutate hcByLevel at offset + depth + const offsetView = hcOffset + this.type.depth; + // Depth includes the extra level for the length node + const byLevelView = hcByLevel != null && isOldRootHashed ? hcByLevel : null; const indexesChanged = Array.from(this.viewsChanged.keys()).sort((a, b) => a - b); const indexes: number[] = []; @@ -210,7 +205,7 @@ export class ArrayCompositeTreeViewDU< throw Error("View not found in viewsChanged, index=" + index); } - const node = this.type.elementType.commitViewDU(view, hashCompsView); + const node = this.type.elementType.commitViewDU(view, offsetView, byLevelView); // there's a chance the view is not changed, no need to rebind nodes in that case if (this.nodes[index] !== node) { // Set new node in nodes array to ensure data represented in the tree and fast nodes access is equal @@ -226,24 +221,20 @@ export class ArrayCompositeTreeViewDU< } const chunksNode = this.type.tree_getChunksNode(this._rootNode); - const hashCompsThis = - hashComps != null && isOldRootHashed - ? { - byLevel: hashComps.byLevel, - offset: hashComps.offset + this.type.tree_chunksNodeOffset(), - } - : null; - const newChunksNode = setNodesAtDepth(chunksNode, this.type.chunkDepth, indexes, nodes, hashCompsThis); + const offsetThis = hcOffset + this.type.tree_chunksNodeOffset(); + const byLevelThis = hcByLevel != null && isOldRootHashed ? hcByLevel : null; + const newChunksNode = setNodesAtDepth(chunksNode, this.type.chunkDepth, indexes, nodes, offsetThis, byLevelThis); this._rootNode = this.type.tree_setChunksNode( this._rootNode, newChunksNode, this.dirtyLength ? this._length : null, - hashComps + hcOffset, + hcByLevel ); - if (!isOldRootHashed && hashComps !== null) { - getHashComputations(this._rootNode, hashComps.offset, hashComps.byLevel); + if (!isOldRootHashed && hcByLevel !== null) { + getHashComputations(this._rootNode, hcOffset, hcByLevel); } this.viewsChanged.clear(); diff --git a/packages/ssz/src/viewDU/bitArray.ts b/packages/ssz/src/viewDU/bitArray.ts index 50c7f4c6..b9c12d14 100644 --- a/packages/ssz/src/viewDU/bitArray.ts +++ b/packages/ssz/src/viewDU/bitArray.ts @@ -1,4 +1,4 @@ -import {HashComputationGroup, Node, getHashComputations} from "@chainsafe/persistent-merkle-tree"; +import {HashComputationLevel, Node, getHashComputations} from "@chainsafe/persistent-merkle-tree"; import {BitArray} from "../value/bitArray"; import {CompositeType} from "../type/composite"; import {TreeViewDU} from "./abstract"; @@ -22,13 +22,13 @@ export class BitArrayTreeViewDU extends TreeViewDU>> extends } /** - * When we need to compute HashComputations (hashComps != null): - * - if old _rootNode is hashed, then only need to put pending changes to HashComputationGroup - * - if old _rootNode is not hashed, need to traverse and put to HashComputationGroup + * When we need to compute HashComputations (hcByLevel != null): + * - if old _rootNode is hashed, then only need to put pending changes to hcByLevel + * - if old _rootNode is not hashed, need to traverse and put to hcByLevel */ - commit(hashComps: HashComputationGroup | null = null): void { + commit(hcOffset = 0, hcByLevel: HashComputationLevel[] | null = null): void { const isOldRootHashed = this._rootNode.h0 !== null; if (this.nodesChanged.size === 0 && this.viewsChanged.size === 0) { - if (!isOldRootHashed && hashComps !== null) { - getHashComputations(this._rootNode, hashComps.offset, hashComps.byLevel); + if (!isOldRootHashed && hcByLevel !== null) { + getHashComputations(this._rootNode, hcOffset, hcByLevel); } return; } - let hashCompsView: HashComputationGroup | null = null; - // if old root is not hashed, no need to pass HashComputationGroup to child view bc we need to do full traversal here - if (hashComps != null && isOldRootHashed) { - // each view may mutate HashComputationGroup at offset + depth - hashCompsView = {byLevel: hashComps.byLevel, offset: hashComps.offset + this.type.depth}; - } + // each view may mutate hcByLevel at offset + depth + const offsetView = hcOffset + this.type.depth; + // if old root is not hashed, no need to pass hcByLevel to child view bc we need to do full traversal here + const byLevelView = hcByLevel != null && isOldRootHashed ? hcByLevel : null; // union all changes then sort, they should not be duplicated const combinedIndexes = [...this.nodesChanged, ...Array.from(this.viewsChanged.keys())].sort((a, b) => a - b); @@ -105,7 +103,7 @@ class ContainerTreeViewDU>> extends if (view) { // composite type const fieldType = this.type.fieldsEntries[index].fieldType as unknown as CompositeTypeAny; - const node = fieldType.commitViewDU(view, hashCompsView); + const node = fieldType.commitViewDU(view, offsetView, byLevelView); // there's a chance the view is not changed, no need to rebind nodes in that case if (this.nodes[index] !== node) { // Set new node in nodes array to ensure data represented in the tree and fast nodes access is equal @@ -128,12 +126,13 @@ class ContainerTreeViewDU>> extends this.type.depth, indexes, nodes, - isOldRootHashed ? hashComps : null + hcOffset, + isOldRootHashed ? hcByLevel : null ); - // old root is not hashed, need to traverse and put to HashComputationGroup - if (!isOldRootHashed && hashComps !== null) { - getHashComputations(this._rootNode, hashComps.offset, hashComps.byLevel); + // old root is not hashed, need to traverse + if (!isOldRootHashed && hcByLevel !== null) { + getHashComputations(this._rootNode, hcOffset, hcByLevel); } this.nodesChanged.clear(); diff --git a/packages/ssz/src/viewDU/listBasic.ts b/packages/ssz/src/viewDU/listBasic.ts index fed05791..d26a5c6f 100644 --- a/packages/ssz/src/viewDU/listBasic.ts +++ b/packages/ssz/src/viewDU/listBasic.ts @@ -79,7 +79,7 @@ export class ListBasicTreeViewDU> extends // Must set new length and commit to tree to restore the same tree at that index const newLength = index + 1; - const newRootNode = this.type.tree_setChunksNode(rootNode, newChunksNode, newLength, null); + const newRootNode = this.type.tree_setChunksNode(rootNode, newChunksNode, newLength); return this.type.getViewDU(newRootNode) as this; } diff --git a/packages/ssz/src/viewDU/listComposite.ts b/packages/ssz/src/viewDU/listComposite.ts index 1522a55a..09993624 100644 --- a/packages/ssz/src/viewDU/listComposite.ts +++ b/packages/ssz/src/viewDU/listComposite.ts @@ -64,7 +64,7 @@ export class ListCompositeTreeViewDU< // Must set new length and commit to tree to restore the same tree at that index const newLength = index + 1; - const newRootNode = this.type.tree_setChunksNode(rootNode, newChunksNode, newLength, null); + const newRootNode = this.type.tree_setChunksNode(rootNode, newChunksNode, newLength); return this.type.getViewDU(newRootNode) as this; } @@ -105,7 +105,7 @@ export class ListCompositeTreeViewDU< newLength = nodes.length; } - const newRootNode = this.type.tree_setChunksNode(this._rootNode, newChunksNode, newLength, null); + const newRootNode = this.type.tree_setChunksNode(this._rootNode, newChunksNode, newLength); return this.type.getViewDU(newRootNode) as this; } diff --git a/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts b/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts index 09bd80b8..6536748b 100644 --- a/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts +++ b/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts @@ -1,5 +1,5 @@ import {byteArrayIntoHashObject} from "@chainsafe/as-sha256"; -import {HashComputationGroup, Node, digestNLevel, setNodesAtDepth} from "@chainsafe/persistent-merkle-tree"; +import {HashComputationLevel, Node, digestNLevel, setNodesAtDepth} from "@chainsafe/persistent-merkle-tree"; import {ListCompositeType} from "../../../../src/type/listComposite"; import {ArrayCompositeTreeViewDUCache} from "../../../../src/viewDU/arrayComposite"; import {ListCompositeTreeViewDU} from "../../../../src/viewDU/listComposite"; @@ -48,10 +48,10 @@ export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU entry.index); const nodes = nodesChanged.map((entry) => entry.node); const chunksNode = this.type.tree_getChunksNode(this._rootNode); - const hashCompsThis = - hashComps != null && isOldRootHashed - ? { - byLevel: hashComps.byLevel, - offset: hashComps.offset + this.type.tree_chunksNodeOffset(), - } - : null; - const newChunksNode = setNodesAtDepth(chunksNode, this.type.chunkDepth, indexes, nodes, hashCompsThis); + const offsetThis = hcOffset + this.type.tree_chunksNodeOffset(); + const byLevelThis = hcByLevel != null && isOldRootHashed ? hcByLevel : null; + const newChunksNode = setNodesAtDepth(chunksNode, this.type.chunkDepth, indexes, nodes, offsetThis, byLevelThis); this._rootNode = this.type.tree_setChunksNode( this._rootNode, newChunksNode, this.dirtyLength ? this._length : null, - hashComps + hcOffset, + hcByLevel ); - if (!isOldRootHashed && hashComps !== null) { + if (!isOldRootHashed && hcByLevel !== null) { // should never happen, handle just in case // not possible to get HashComputations due to BranchNodeStruct this._rootNode.root; diff --git a/packages/ssz/test/perf/eth2/beaconState.test.ts b/packages/ssz/test/perf/eth2/beaconState.test.ts index 2fecb3f0..0fbed3a5 100644 --- a/packages/ssz/test/perf/eth2/beaconState.test.ts +++ b/packages/ssz/test/perf/eth2/beaconState.test.ts @@ -1,11 +1,5 @@ import {itBench} from "@dapplion/benchmark"; -import { - BranchNode, - HashComputationGroup, - getHashComputations, - executeHashComputations, - HashComputation, -} from "@chainsafe/persistent-merkle-tree"; +import {HashComputationLevel, executeHashComputations, HashComputationGroup} from "@chainsafe/persistent-merkle-tree"; import {BeaconState} from "../../lodestarTypes/altair/sszTypes"; import {BitArray, CompositeViewDU, toHexString} from "../../../src"; @@ -58,47 +52,13 @@ describe(`BeaconState ViewDU partially modified tree vc=${vc} numModified=${numM }, }); - itBench({ - id: `BeaconState ViewDU batchHash vc=${vc}`, - beforeEach: () => createPartiallyModifiedDenebState(), - fn: (state: CompositeViewDU) => { - state.commit(); - (state.node as BranchNode).batchHash(); - if (toHexString(state.node.root) !== expectedRoot) { - throw new Error("hashTreeRoot does not match expectedRoot"); - } - }, - }); - - itBench({ - id: `BeaconState ViewDU batchHash - commit & getHashComputation vc=${vc}`, - beforeEach: () => createPartiallyModifiedDenebState(), - fn: (state: CompositeViewDU) => { - state.commit(); - getHashComputations(state.node, 0, []); - }, - }); - - itBench({ - id: `BeaconState ViewDU batchHash - hash step vc=${vc}`, - beforeEach: () => { - const state = createPartiallyModifiedDenebState(); - state.commit(); - const hashComputations: HashComputation[][] = []; - getHashComputations(state.node, 0, hashComputations); - return hashComputations; - }, - fn: (hashComputations: HashComputation[][]) => { - executeHashComputations(hashComputations); - }, - }); - + const hc = new HashComputationGroup(); itBench({ id: `BeaconState ViewDU hashTreeRoot vc=${vc}`, beforeEach: () => createPartiallyModifiedDenebState(), fn: (state: CompositeViewDU) => { - // commit() step is inside hashTreeRoot() - if (toHexString(state.hashTreeRoot()) !== expectedRoot) { + // commit() step is inside hashTreeRoot(), reuse HashComputationGroup + if (toHexString(state.hashTreeRoot(hc)) !== expectedRoot) { throw new Error("hashTreeRoot does not match expectedRoot"); } }, @@ -108,11 +68,7 @@ describe(`BeaconState ViewDU partially modified tree vc=${vc} numModified=${numM id: `BeaconState ViewDU hashTreeRoot - commit step vc=${vc}`, beforeEach: () => createPartiallyModifiedDenebState(), fn: (state: CompositeViewDU) => { - const hashComps: HashComputationGroup = { - byLevel: [], - offset: 0, - }; - state.commit(hashComps); + state.commit(0, []); }, }); @@ -120,29 +76,12 @@ describe(`BeaconState ViewDU partially modified tree vc=${vc} numModified=${numM id: `BeaconState ViewDU hashTreeRoot - hash step vc=${vc}`, beforeEach: () => { const state = createPartiallyModifiedDenebState(); - const hashComps: HashComputationGroup = { - byLevel: [], - offset: 0, - }; - state.commit(hashComps); - return hashComps; - }, - fn: (hashComps) => { - executeHashComputations(hashComps.byLevel); + const hcByLevel: HashComputationLevel[] = []; + state.commit(0, hcByLevel); + return hcByLevel; }, - }); - - itBench.skip({ - id: `BeaconState ViewDU hashTreeRoot - commit step each validator vc=${vc}`, - beforeEach: () => createPartiallyModifiedDenebState(), - fn: (state: CompositeViewDU) => { - const hashComps: HashComputationGroup = { - byLevel: [], - offset: 0, - }; - for (let i = 0; i < numModified; i++) { - state.validators.get(i).commit(hashComps); - } + fn: (hcByLevel) => { + executeHashComputations(hcByLevel); }, }); }); @@ -152,7 +91,9 @@ function createPartiallyModifiedDenebState(): CompositeViewDU; const validator: Validator = { @@ -149,20 +149,17 @@ describe.skip("getHashComputations BranchNodeStruct", function () { for (const {name, fn} of testCases) { it(name, () => { - const hashComps: HashComputationGroup = { - byLevel: [], - offset: 0, - }; + const hcByLevel: HashComputationLevel[] = []; const validatorViewDU = ValidatorNodeStruct.toViewDU(validator); // cache all roots validatorViewDU.hashTreeRoot(); fn(validatorViewDU); - validatorViewDU.commit(hashComps); - expect(hashComps.byLevel.length).to.be.equal(4); - expect(hashComps.byLevel[0].length).to.be.equal(1); - expect(hashComps.byLevel[1].length).to.be.equal(2); - expect(hashComps.byLevel[2].length).to.be.equal(4); - expect(hashComps.byLevel[3].length).to.be.equal(1); + validatorViewDU.commit(0, hcByLevel); + expect(hcByLevel.length).to.be.equal(4); + expect(hcByLevel[0].length).to.be.equal(1); + expect(hcByLevel[1].length).to.be.equal(2); + expect(hcByLevel[2].length).to.be.equal(4); + expect(hcByLevel[3].length).to.be.equal(1); }); } }); From 04f8a161e12d3668fbc07116719be3f6d99375bf Mon Sep 17 00:00:00 2001 From: twoeths Date: Wed, 31 Jul 2024 15:25:24 +0700 Subject: [PATCH 088/113] feat: ReuseListIterator for getAll() api (#390) --- .../src/hashComputation.ts | 5 +- packages/ssz/src/index.ts | 1 + packages/ssz/src/interface.ts | 6 + packages/ssz/src/util/reusableListIterator.ts | 145 ++++++++++++++++++ packages/ssz/src/view/arrayComposite.ts | 32 ++++ packages/ssz/src/viewDU/arrayComposite.ts | 31 ++++ .../test/perf/byType/listComposite.test.ts | 48 ++++-- packages/ssz/test/perf/iterate.test.ts | 20 ++- .../perf/util/reusableListIterator.test.ts | 44 ++++++ .../unit/byType/listComposite/tree.test.ts | 20 ++- .../unit/util/reusableListIterator.test.ts | 64 ++++++++ 11 files changed, 402 insertions(+), 14 deletions(-) create mode 100644 packages/ssz/src/util/reusableListIterator.ts create mode 100644 packages/ssz/test/perf/util/reusableListIterator.test.ts create mode 100644 packages/ssz/test/unit/util/reusableListIterator.test.ts diff --git a/packages/persistent-merkle-tree/src/hashComputation.ts b/packages/persistent-merkle-tree/src/hashComputation.ts index d7f48a25..d6a8c7ab 100644 --- a/packages/persistent-merkle-tree/src/hashComputation.ts +++ b/packages/persistent-merkle-tree/src/hashComputation.ts @@ -55,7 +55,10 @@ export class HashComputationLevel { * run before every run */ reset(): void { - // keep this.head + // keep this.head object, only release the data + this.head.src0 = null as unknown as Node; + this.head.src1 = null as unknown as Node; + this.head.dest = null as unknown as Node; this.tail = null; this._length = 0; // totalLength is not reset diff --git a/packages/ssz/src/index.ts b/packages/ssz/src/index.ts index c140c60c..f2379e17 100644 --- a/packages/ssz/src/index.ts +++ b/packages/ssz/src/index.ts @@ -37,6 +37,7 @@ export {BitArray, getUint8ByteToBitBooleanArray} from "./value/bitArray"; // Utils export {fromHexString, toHexString, byteArrayEquals} from "./util/byteArray"; +export {ReusableListIterator} from "./util/reusableListIterator"; export {hash64, symbolCachedPermanentRoot} from "./util/merkleize"; diff --git a/packages/ssz/src/interface.ts b/packages/ssz/src/interface.ts index 240c2795..05240a04 100644 --- a/packages/ssz/src/interface.ts +++ b/packages/ssz/src/interface.ts @@ -16,6 +16,12 @@ export interface List extends ArrayLike { pop(): T | undefined; } +export interface ListIterator { + readonly length: number; + push(...values: T[]): void; + [Symbol.iterator](): Iterator; +} + export type Container> = T; export type ByteVector = Vector; diff --git a/packages/ssz/src/util/reusableListIterator.ts b/packages/ssz/src/util/reusableListIterator.ts new file mode 100644 index 00000000..7e1c35a7 --- /dev/null +++ b/packages/ssz/src/util/reusableListIterator.ts @@ -0,0 +1,145 @@ +import {ListIterator} from "../interface"; + +class LinkedNode { + data: T; + next: LinkedNode | null = null; + + constructor(data: T) { + this.data = data; + } +} + +/** + * A LinkedList that's designed to be reused overtime. + * Before every run, reset() should be called. + * After every run, clean() should be called. + */ +export class ReusableListIterator implements ListIterator { + private head: LinkedNode; + private tail: LinkedNode | null; + private _length = 0; + private _totalLength = 0; + private pointer: LinkedNode | null; + // this avoids memory allocation + private iteratorResult: IteratorResult; + + constructor() { + this.head = { + data: null as unknown as T, + next: null, + }; + this.tail = null; + this.pointer = null; + this.iteratorResult = {} as IteratorResult; + } + + get length(): number { + return this._length; + } + + get totalLength(): number { + return this._totalLength; + } + + /** + * run before every run + */ + reset(): void { + // keep this.head object, only release the data + this.head.data = null as unknown as T; + this.tail = null; + this._length = 0; + // totalLength is not reset + this.pointer = null; + // no need to reset iteratorResult + } + + /** + * Append new data to the tail + * This will overwrite the existing data if it is not null, or grow the list if needed. + */ + push(value: T): void { + if (this.tail !== null) { + let newTail = this.tail.next; + if (newTail !== null) { + newTail.data = value; + } else { + // grow the list + newTail = {data: value, next: null}; + this.tail.next = newTail; + this._totalLength++; + } + this.tail = newTail; + this._length++; + return; + } + + // first item + this.head.data = value; + this.tail = this.head; + this._length = 1; + if (this._totalLength === 0) { + this._totalLength = 1; + } + // else _totalLength > 0, do not set + } + + /** + * run after every run + * hashComps may still refer to the old Nodes, we should release them to avoid memory leak. + */ + clean(): void { + let node = this.tail?.next ?? null; + while (node !== null && node.data !== null) { + node.data = null as unknown as T; + node = node.next; + } + } + + /** + * Implement Iterator for this class + */ + next(): IteratorResult { + if (!this.pointer || this.tail === null) { + return {done: true, value: undefined}; + } + + // never yield value beyond the tail + const value = this.pointer.data; + this.pointer = this.pointer.next; + // should not allocate new object here + const isNull = value === null; + this.iteratorResult.done = isNull; + this.iteratorResult.value = isNull ? undefined : value; + return this.iteratorResult; + } + + /** + * This is convenient method to consume HashComputationLevel with for-of loop + * See "next" method above for the actual implementation + */ + [Symbol.iterator](): IterableIterator { + this.pointer = this.head; + return this; + } + + toArray(): T[] { + const result: T[] = []; + for (const data of this) { + result.push(data); + } + return result; + } + + /** + * For testing only + */ + dump(): T[] { + const result: T[] = []; + let node: LinkedNode | null = this.head; + for (; node !== null; node = node.next) { + result.push(node.data); + } + return result; + } +} diff --git a/packages/ssz/src/view/arrayComposite.ts b/packages/ssz/src/view/arrayComposite.ts index 0e9ae75a..97168645 100644 --- a/packages/ssz/src/view/arrayComposite.ts +++ b/packages/ssz/src/view/arrayComposite.ts @@ -2,6 +2,7 @@ import {getNodesAtDepth, HashComputationLevel, Node, toGindexBitstring, Tree} fr import {ValueOf} from "../type/abstract"; import {CompositeType, CompositeView, CompositeViewDU} from "../type/composite"; import {TreeView} from "./abstract"; +import {ListIterator} from "../interface"; /** Expected API of this View's type. This interface allows to break a recursive dependency between types and views */ export type ArrayCompositeType< @@ -104,6 +105,22 @@ export class ArrayCompositeTreeView< return views; } + /** + * Similar to getAllReadonly but support ListIterator interface. + * Use ReusableListIterator to reuse over multiple calls. + */ + getAllReadonlyIter(views?: ListIterator>): ListIterator> { + const length = this.length; + const chunksNode = this.type.tree_getChunksNode(this.node); + const nodes = getNodesAtDepth(chunksNode, this.type.chunkDepth, 0, length); + views = views ?? new Array>(); + for (let i = 0; i < length; i++) { + // TODO: Optimize + views.push(this.type.elementType.getView(new Tree(nodes[i]))); + } + return views; + } + /** * Returns an array of values of all elements in the array, from index zero to `this.length - 1`. * The returned values are not Views so any changes won't be propagated upwards. @@ -122,4 +139,19 @@ export class ArrayCompositeTreeView< } return values; } + + /** + * Similar to getAllReadonlyValues but support ListIterator interface. + * Use ReusableListIterator to reuse over multiple calls. + */ + getAllReadonlyValuesIter(values?: ListIterator>): ListIterator> { + const length = this.length; + const chunksNode = this.type.tree_getChunksNode(this.node); + const nodes = getNodesAtDepth(chunksNode, this.type.chunkDepth, 0, length); + values = values ?? new Array>(); + for (let i = 0; i < length; i++) { + values.push(this.type.elementType.tree_toValue(nodes[i])); + } + return values; + } } diff --git a/packages/ssz/src/viewDU/arrayComposite.ts b/packages/ssz/src/viewDU/arrayComposite.ts index 4c429f18..6856c63a 100644 --- a/packages/ssz/src/viewDU/arrayComposite.ts +++ b/packages/ssz/src/viewDU/arrayComposite.ts @@ -10,6 +10,7 @@ import {ValueOf} from "../type/abstract"; import {CompositeType, CompositeView, CompositeViewDU} from "../type/composite"; import {ArrayCompositeType} from "../view/arrayComposite"; import {TreeViewDU} from "./abstract"; +import {ListIterator} from "../interface"; export type ArrayCompositeTreeViewDUCache = { nodes: Node[]; @@ -160,6 +161,21 @@ export class ArrayCompositeTreeViewDU< return views; } + /** + * Similar to getAllReadonly but support ListIterator interface. + * Use ReusableListIterator to reuse over multiple calls. + */ + getAllReadonlyIter(views?: ListIterator>): ListIterator> { + this.populateAllNodes(); + + views = views ?? new Array>(); + for (let i = 0; i < this._length; i++) { + const view = this.type.elementType.getViewDU(this.nodes[i], this.caches[i]); + views.push(view); + } + return views; + } + /** * WARNING: Returns all commited changes, if there are any pending changes commit them beforehand */ @@ -176,6 +192,21 @@ export class ArrayCompositeTreeViewDU< return values; } + /** + * Similar to getAllReadonlyValues but support ListIterator interface. + * Use ReusableListIterator to reuse over multiple calls. + */ + getAllReadonlyValuesIter(values?: ListIterator>): ListIterator> { + this.populateAllNodes(); + + values = values ?? new Array>(); + for (let i = 0; i < this._length; i++) { + const value = this.type.elementType.tree_toValue(this.nodes[i]); + values.push(value); + } + return values; + } + /** * When we need to compute HashComputations (hcByLevel != null): * - if old _rootNode is hashed, then only need to put pending changes to hcByLevel diff --git a/packages/ssz/test/perf/byType/listComposite.test.ts b/packages/ssz/test/perf/byType/listComposite.test.ts index dbf75988..32762cfb 100644 --- a/packages/ssz/test/perf/byType/listComposite.test.ts +++ b/packages/ssz/test/perf/byType/listComposite.test.ts @@ -1,5 +1,13 @@ import {itBench} from "@dapplion/benchmark"; -import {ContainerNodeStructType, ContainerType, ListCompositeType, UintNumberType} from "../../../src"; +import { + CompositeViewDU, + ContainerNodeStructType, + ContainerType, + ListCompositeType, + ReusableListIterator, + UintNumberType, + ValueOf, +} from "../../../src"; const byteType = new UintNumberType(1); @@ -20,33 +28,53 @@ describe("ListCompositeType types", () => { }); } - for (const type of [ - new ListCompositeType(containerType, 2 ** 40, {typeName: "List(Container)"}), - new ListCompositeType(containerNodeStructType, 2 ** 40, {typeName: "List(ContainerNodeStruct)"}), - ]) { - const viewDU = type.toViewDU(newFilledArray(len, {a: 1, b: 2})); + for (const [i, type] of [containerType, containerNodeStructType].entries()) { + const listType = new ListCompositeType(type, 2 ** 40, { + typeName: `List(${i === 0 ? "Container" : "ContainerNodeStruct"})`, + }); + const viewDU = listType.toViewDU(newFilledArray(len, {a: 1, b: 2})); - itBench(`${type.typeName} len ${len} ViewDU.getAllReadonly() + iterate`, () => { + itBench(`${listType.typeName} len ${len} ViewDU.getAllReadonly() + iterate`, () => { const values = viewDU.getAllReadonly(); for (let i = 0; i < len; i++) { values[i]; } }); - itBench(`${type.typeName} len ${len} ViewDU.getAllReadonlyValues() + iterate`, () => { + const viewDUs = new ReusableListIterator>(); + itBench(`${listType.typeName} len ${len} ViewDU.getAllReadonlyIter() + iterate`, () => { + viewDUs.reset(); + viewDU.getAllReadonlyIter(viewDUs); + viewDUs.clean(); + for (const viewDU of viewDUs) { + viewDU; + } + }); + + itBench(`${listType.typeName} len ${len} ViewDU.getAllReadonlyValues() + iterate`, () => { const values = viewDU.getAllReadonlyValues(); for (let i = 0; i < len; i++) { values[i]; } }); - itBench(`${type.typeName} len ${len} ViewDU.get(i)`, () => { + const values = new ReusableListIterator>(); + itBench(`${listType.typeName} len ${len} ViewDU.getAllReadonlyValuesIter() + iterate`, () => { + values.clean(); + viewDU.getAllReadonlyValuesIter(values); + values.reset(); + for (const value of values) { + value; + } + }); + + itBench(`${listType.typeName} len ${len} ViewDU.get(i)`, () => { for (let i = 0; i < len; i++) { viewDU.get(i); } }); - itBench(`${type.typeName} len ${len} ViewDU.getReadonly(i)`, () => { + itBench(`${listType.typeName} len ${len} ViewDU.getReadonly(i)`, () => { for (let i = 0; i < len; i++) { viewDU.getReadonly(i); } diff --git a/packages/ssz/test/perf/iterate.test.ts b/packages/ssz/test/perf/iterate.test.ts index 92b923e3..f585dffd 100644 --- a/packages/ssz/test/perf/iterate.test.ts +++ b/packages/ssz/test/perf/iterate.test.ts @@ -1,6 +1,6 @@ import {itBench, setBenchOpts} from "@dapplion/benchmark"; -import {ListBasicType, UintNumberType} from "../../src"; -import {Validators} from "../lodestarTypes/phase0/sszTypes"; +import {CompositeViewDU, ListBasicType, ReusableListIterator, UintNumberType} from "../../src"; +import {Validators, Validator} from "../lodestarTypes/phase0/sszTypes"; describe("iterate", () => { setBenchOpts({noThreshold: true}); @@ -53,6 +53,22 @@ describe("readonly values - iterator vs array", () => { validatorsArray[i]; } }); + + const viewDUs = new ReusableListIterator>(); + itBench("compositeListValue.getAllReadonlyIter()", () => { + viewDUs.reset(); + validators.getAllReadonlyIter(viewDUs); + viewDUs.clean(); + }); + + itBench("compositeListValue.getAllReadonlyIter() + loop all", () => { + viewDUs.reset(); + validators.getAllReadonlyIter(viewDUs); + viewDUs.clean(); + for (const viewDU of viewDUs) { + viewDU; + } + }); }); // eslint-disable-next-line @typescript-eslint/explicit-function-return-type diff --git a/packages/ssz/test/perf/util/reusableListIterator.test.ts b/packages/ssz/test/perf/util/reusableListIterator.test.ts new file mode 100644 index 00000000..4417077e --- /dev/null +++ b/packages/ssz/test/perf/util/reusableListIterator.test.ts @@ -0,0 +1,44 @@ +import {itBench} from "@dapplion/benchmark"; +import {ReusableListIterator} from "../../../src"; +import {Validator} from "../../lodestarTypes/phase0"; +import {getValidator} from "../../utils/generateEth2Objs"; + +/** + * This test create validator object every time intentionally, this mimics an environment where there are a lot of memory allocation. + * On average, Array is very fast, however it's pretty expensive to allocate a big array and it may cause a spike due to gc randomly. + * ReusableListIterator is faster in average and it's more stable due to no memory allocation involved. + * ReusableListIterator + ✓ ReusableListIterator 2000000 items 0.5724982 ops/s 1.746731 s/op - 13 runs 24.1 s + ✓ Array 2000000 items 0.4655988 ops/s 2.147772 s/op - 14 runs 32.0 s + */ +describe("ReusableListIterator", function () { + const length = 2_000_000; + const list = new ReusableListIterator(); + itBench({ + id: `ReusableListIterator ${length} items`, + fn: () => { + // reusable, just reset + list.reset(); + for (let i = 0; i < length; i++) { + list.push(getValidator(i)); + } + for (const a of list) { + a; + } + }, + }); + + itBench({ + id: `Array ${length} items`, + fn: () => { + // allocate every time + const arr = new Array(length); + for (let i = 0; i < length; i++) { + arr[i] = getValidator(i); + } + for (const a of arr) { + a; + } + }, + }); +}); diff --git a/packages/ssz/test/unit/byType/listComposite/tree.test.ts b/packages/ssz/test/unit/byType/listComposite/tree.test.ts index 2f146313..5b738c88 100644 --- a/packages/ssz/test/unit/byType/listComposite/tree.test.ts +++ b/packages/ssz/test/unit/byType/listComposite/tree.test.ts @@ -1,5 +1,13 @@ import {expect} from "chai"; -import {CompositeView, ContainerType, ListCompositeType, toHexString, UintNumberType, ValueOf} from "../../../../src"; +import { + CompositeView, + ContainerType, + ListCompositeType, + ReusableListIterator, + toHexString, + UintNumberType, + ValueOf, +} from "../../../../src"; import {ArrayCompositeTreeViewDU} from "../../../../src/viewDU/arrayComposite"; import {ssz} from "../../../lodestarTypes/primitive"; import {runViewTestMutation} from "../runViewTestMutation"; @@ -112,11 +120,21 @@ describe("ListCompositeType tree reads", () => { // Only for viewDU if (view instanceof ArrayCompositeTreeViewDU) { expect(() => view.getAllReadonly()).to.throw("Must commit changes before reading all nodes"); + expect(() => view.getAllReadonlyIter()).to.throw("Must commit changes before reading all nodes"); view.commit(); } expect(view.getAllReadonly().map(elementToValue)).deep.equals(values, "Wrong getAllReadonly()"); + (view.getAllReadonlyIter() as CompositeView[]).map(elementToValue); expect(view.getAllReadonlyValues()).deep.equals(values, "Wrong getAllReadonlyValues()"); + const result = new ReusableListIterator>(); + view.getAllReadonlyValuesIter(result); + expect(result.toArray()).deep.equals(values, "Wrong getAllReadonlyValues()"); + // reuse ReusableListIterator + result.reset(); + view.getAllReadonlyValuesIter(result); + result.clean(); + expect(result.toArray()).deep.equals(values, "Wrong getAllReadonlyValues()"); // Only for viewDU if (view instanceof ArrayCompositeTreeViewDU) { diff --git a/packages/ssz/test/unit/util/reusableListIterator.test.ts b/packages/ssz/test/unit/util/reusableListIterator.test.ts new file mode 100644 index 00000000..8af8140e --- /dev/null +++ b/packages/ssz/test/unit/util/reusableListIterator.test.ts @@ -0,0 +1,64 @@ +import {expect} from "chai"; +import {ReusableListIterator} from "../../../src/util/reusableListIterator"; + +describe("ReusableListIterator", () => { + let list: ReusableListIterator; + + beforeEach(() => { + list = new ReusableListIterator(); + list.push(0); + }); + + it("should reset", () => { + list.reset(); + expect(list.length).to.be.equal(0); + expect(list.totalLength).to.be.equal(1); + expect(list.toArray()).to.be.deep.equal([]); + }); + + it("should push", () => { + list.push(1); + expect(list.length).to.be.equal(2); + expect(list.totalLength).to.be.equal(2); + const arr = list.toArray(); + expect(arr.length).to.be.equal(2); + expect(arr).to.be.deep.equal([0, 1]); + }); + + it("reset then push full", () => { + list.push(1); + list.reset(); + list.push(1); + list.push(2); + list.clean(); + expect(list.length).to.be.equal(2); + expect(list.totalLength).to.be.equal(2); + const arr = list.toArray(); + expect(arr).to.be.deep.equal([1, 2]); + }); + + it("reset then push partial", () => { + list.push(1); + // totalLength = 2 now + list.reset(); + list.push(1); + list.clean(); + expect(list.length).to.be.equal(1); + expect(list.totalLength).to.be.equal(2); + const arr = list.toArray(); + expect(arr).to.be.deep.equal([1]); + }); + + it("clean", () => { + list.push(1); + list.reset(); + list.push(1); + list.clean(); + expect(list.length).to.be.equal(1); + expect(list.totalLength).to.be.equal(2); + const arr = list.toArray(); + expect(arr).to.be.deep.equal([1]); + const all = list.dump(); + expect(all).to.be.deep.equal([1, null]); + }); +}); From 23557d74dce46bd6039e1c5fc6a7fe2d98ea7ecc Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Wed, 7 Aug 2024 13:47:01 +0700 Subject: [PATCH 089/113] chore: fix lint --- packages/persistent-merkle-tree/src/tree.ts | 1 - yarn.lock | 21 +++++++++++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/packages/persistent-merkle-tree/src/tree.ts b/packages/persistent-merkle-tree/src/tree.ts index aee817e0..33fbe57a 100644 --- a/packages/persistent-merkle-tree/src/tree.ts +++ b/packages/persistent-merkle-tree/src/tree.ts @@ -362,7 +362,6 @@ export function setNodesAtDepth( if (nodesDepth === 0) { return nodes.length > 0 ? nodes[0] : rootNode; } - const offset = hcOffset; /** * Contiguous filled stack of parent nodes. It get filled in the first descent diff --git a/yarn.lock b/yarn.lock index 6d4c7fbe..e6d96d22 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1244,6 +1244,11 @@ "@babel/helper-validator-identifier" "^7.24.5" to-fast-properties "^2.0.0" +"@chainsafe/as-sha256@^0.4.1", "@chainsafe/as-sha256@^0.4.2": + version "0.4.2" + resolved "https://registry.yarnpkg.com/@chainsafe/as-sha256/-/as-sha256-0.4.2.tgz#21ad1371e2245e430c1a554a05f10d333c6f42cc" + integrity sha512-HJ8GZBRjLeWtRsAXf3EbNsNzmTGpzTFjfpSf4yHkLYC+E52DhT6hwz+7qpj6I/EmFzSUm5tYYvT9K8GZokLQCQ== + "@chainsafe/babel-plugin-inline-binary-import@^1.0.3": version "1.0.3" resolved "https://registry.yarnpkg.com/@chainsafe/babel-plugin-inline-binary-import/-/babel-plugin-inline-binary-import-1.0.3.tgz#08dde20d91cf5d18f2c253edf32547943a16e409" @@ -1276,6 +1281,22 @@ "@chainsafe/hashtree-linux-arm64-gnu" "1.0.1" "@chainsafe/hashtree-linux-x64-gnu" "1.0.1" +"@chainsafe/persistent-merkle-tree@^0.7.1", "@chainsafe/persistent-merkle-tree@^0.7.2": + version "0.7.2" + resolved "https://registry.yarnpkg.com/@chainsafe/persistent-merkle-tree/-/persistent-merkle-tree-0.7.2.tgz#f0ef91daf36752f827432333cbc965f4bf6e750e" + integrity sha512-BUAqrmSUmy6bZhXxnhpR+aYoEDdCeS1dQvq/aje0CDEB14ZHF9UVN2mL9MolOD0ANUiP1OaPG3KfVBxvuW8aTg== + dependencies: + "@chainsafe/as-sha256" "^0.4.2" + "@noble/hashes" "^1.3.0" + +"@chainsafe/ssz@0.16.0": + version "0.16.0" + resolved "https://registry.yarnpkg.com/@chainsafe/ssz/-/ssz-0.16.0.tgz#262c491ac037777a16e8d8db479da2ba27539b8d" + integrity sha512-CgTDyrkbAKvrKwHxPT5rerXAHP3NB+uOvpnN9Gn8aJ/4TGOKhOboj4131bSFUZ679uPJ6pu6391cvInuOdrglw== + dependencies: + "@chainsafe/as-sha256" "^0.4.2" + "@chainsafe/persistent-merkle-tree" "^0.7.2" + "@chainsafe/ssz@^0.15.1": version "0.15.1" resolved "https://registry.yarnpkg.com/@chainsafe/ssz/-/ssz-0.15.1.tgz#008a711c3bcdc0d207cd4be15108870b0b1c60c0" From aa7ebfb23caa4abe740cf36793558cd537b9177f Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Wed, 7 Aug 2024 15:06:12 +0700 Subject: [PATCH 090/113] feat: new batchHashTreeRoot() method for ViewDU --- packages/persistent-merkle-tree/package.json | 5 -- .../persistent-merkle-tree/src/subtree.ts | 1 - packages/ssz/src/viewDU/abstract.ts | 13 ++- .../ssz/test/perf/eth2/beaconState.test.ts | 20 ++--- .../ssz/test/perf/eth2/hashTreeRoot.test.ts | 12 ++- packages/ssz/test/spec/runValidTest.ts | 14 +++ .../test/unit/byType/bitArray/tree.test.ts | 6 +- .../test/unit/byType/bitVector/tree.test.ts | 8 +- .../test/unit/byType/container/tree.test.ts | 24 +++--- .../test/unit/byType/listBasic/tree.test.ts | 20 ++--- .../unit/byType/listComposite/tree.test.ts | 32 +++---- .../test/unit/byType/runViewTestMutation.ts | 85 ++++++++++--------- .../ssz/test/unit/eth2/beaconState.test.ts | 55 ++++++------ .../ssz/test/unit/eth2/validators.test.ts | 60 +------------ .../phase0/listValidator.test.ts | 6 +- .../lodestarTypes/phase0/validator.test.ts | 2 + packages/ssz/test/unit/regressions.test.ts | 2 + .../ssz/test/unit/unchangedViewDUs.test.ts | 16 ++-- 18 files changed, 179 insertions(+), 202 deletions(-) diff --git a/packages/persistent-merkle-tree/package.json b/packages/persistent-merkle-tree/package.json index 4f518628..10570bb4 100644 --- a/packages/persistent-merkle-tree/package.json +++ b/packages/persistent-merkle-tree/package.json @@ -49,10 +49,5 @@ "@chainsafe/as-sha256": "0.5.0", "@chainsafe/hashtree": "1.0.1", "@noble/hashes": "^1.3.0" - }, - "peerDependencies": { - "@chainsafe/hashtree-linux-x64-gnu": "1.0.1", - "@chainsafe/hashtree-linux-arm64-gnu": "1.0.1", - "@chainsafe/hashtree-darwin-arm64": "1.0.1" } } diff --git a/packages/persistent-merkle-tree/src/subtree.ts b/packages/persistent-merkle-tree/src/subtree.ts index 65ea3f51..44dc7987 100644 --- a/packages/persistent-merkle-tree/src/subtree.ts +++ b/packages/persistent-merkle-tree/src/subtree.ts @@ -38,7 +38,6 @@ export function subtreeFillToLength(bottom: Node, depth: number, length: number) /** * WARNING: Mutates the provided nodes array. - * @param hashCompRootNode is a hacky way from ssz to set `dest` of HashComputation for BranchNodeStruct * TODO: Don't mutate the nodes array. * hcByLevel is an output parameter that will be filled with the hash computations if exists. */ diff --git a/packages/ssz/src/viewDU/abstract.ts b/packages/ssz/src/viewDU/abstract.ts index 08bd8551..4ef1124d 100644 --- a/packages/ssz/src/viewDU/abstract.ts +++ b/packages/ssz/src/viewDU/abstract.ts @@ -58,10 +58,17 @@ export abstract class TreeViewDU createPartiallyModifiedDenebState(), fn: (state: CompositeViewDU) => { @@ -37,7 +35,7 @@ describe(`BeaconState ViewDU partially modified tree vc=${vc} numModified=${numM }, }); - itBench({ + itBench.skip({ id: `BeaconState ViewDU validator tree creation vc=${numModified}`, beforeEach: () => { const state = createPartiallyModifiedDenebState(); @@ -54,17 +52,17 @@ describe(`BeaconState ViewDU partially modified tree vc=${vc} numModified=${numM const hc = new HashComputationGroup(); itBench({ - id: `BeaconState ViewDU hashTreeRoot vc=${vc}`, + id: `BeaconState ViewDU batchHashTreeRoot vc=${vc}`, beforeEach: () => createPartiallyModifiedDenebState(), fn: (state: CompositeViewDU) => { // commit() step is inside hashTreeRoot(), reuse HashComputationGroup - if (toHexString(state.hashTreeRoot(hc)) !== expectedRoot) { - throw new Error("hashTreeRoot does not match expectedRoot"); + if (toHexString(state.batchHashTreeRoot(hc)) !== expectedRoot) { + throw new Error("batchHashTreeRoot does not match expectedRoot"); } }, }); - itBench({ + itBench.skip({ id: `BeaconState ViewDU hashTreeRoot - commit step vc=${vc}`, beforeEach: () => createPartiallyModifiedDenebState(), fn: (state: CompositeViewDU) => { @@ -72,7 +70,7 @@ describe(`BeaconState ViewDU partially modified tree vc=${vc} numModified=${numM }, }); - itBench({ + itBench.skip({ id: `BeaconState ViewDU hashTreeRoot - hash step vc=${vc}`, beforeEach: () => { const state = createPartiallyModifiedDenebState(); diff --git a/packages/ssz/test/perf/eth2/hashTreeRoot.test.ts b/packages/ssz/test/perf/eth2/hashTreeRoot.test.ts index f94cc6ec..1ae6da27 100644 --- a/packages/ssz/test/perf/eth2/hashTreeRoot.test.ts +++ b/packages/ssz/test/perf/eth2/hashTreeRoot.test.ts @@ -1,5 +1,5 @@ import {itBench} from "@dapplion/benchmark"; -import {hasher, uint8ArrayToHashObject} from "@chainsafe/persistent-merkle-tree"; +import {HashComputationGroup, hasher, uint8ArrayToHashObject} from "@chainsafe/persistent-merkle-tree"; import * as sszPhase0 from "../../lodestarTypes/phase0/sszTypes"; import * as sszAltair from "../../lodestarTypes/altair/sszTypes"; import { @@ -68,6 +68,16 @@ describe("HashTreeRoot frequent eth2 objects", () => { }, }); + const hc = new HashComputationGroup(); + itBench, Uint8Array>({ + id: `BeaconState vc ${validatorCount} - batchHashTreeRoot tree`, + before: () => getStateViewDU().serialize(), + beforeEach: (bytes) => sszAltair.BeaconState.deserializeToViewDU(bytes), + fn: (state) => { + state.batchHashTreeRoot(hc); + }, + }); + for (const {fieldName, fieldType} of sszAltair.BeaconState.fieldsEntries) { // Only benchmark big data structures if (fieldType.maxSize < 10e6 || !isCompositeType(fieldType)) { diff --git a/packages/ssz/test/spec/runValidTest.ts b/packages/ssz/test/spec/runValidTest.ts index 0d12a4b6..5ea219eb 100644 --- a/packages/ssz/test/spec/runValidTest.ts +++ b/packages/ssz/test/spec/runValidTest.ts @@ -114,6 +114,20 @@ export function runValidSszTest(type: Type, testData: ValidTestCaseData assertRoot(root, "type.hashTreeRoot()"); } + if (isCompositeType(type)) { + // batchHashTreeRoot() + const root = wrapErr(() => { + const node = type.value_toTree(testDataValue); + const viewDU = type.getViewDU(node); + if (viewDU instanceof TreeViewDU) { + return viewDU.batchHashTreeRoot(); + } else { + return type.hashTreeRoot(testDataValue); + } + }, "type.hashTreeRoot()"); + assertRoot(root, "ViewDU.batchHashTreeRoot()"); + } + // value -> tree - value_toTree() const node = wrapErr(() => type.value_toTree(testDataValue), "type.value_toTree()"); assertNode(node, "type.value_toTree()"); diff --git a/packages/ssz/test/unit/byType/bitArray/tree.test.ts b/packages/ssz/test/unit/byType/bitArray/tree.test.ts index b6adf522..456243c5 100644 --- a/packages/ssz/test/unit/byType/bitArray/tree.test.ts +++ b/packages/ssz/test/unit/byType/bitArray/tree.test.ts @@ -51,19 +51,19 @@ for (const type of [new BitVectorType(4), new BitListType(4)]) { }); } -describe("BitArray batchHash", () => { +describe("BitArray batchHashTreeRoot", () => { const sszType = new BitListType(4); const value = fromNum(4, 0b0010); const expectedRoot = sszType.toView(value).hashTreeRoot(); it("fresh ViewDU", () => { - expect(sszType.toViewDU(value).hashTreeRoot()).to.be.deep.equal(expectedRoot); + expect(sszType.toViewDU(value).batchHashTreeRoot()).to.be.deep.equal(expectedRoot); }); it("set then hashTreeRoot", () => { const viewDU = sszType.toViewDU(fromNum(4, 0b0011)); viewDU.set(0, false); - expect(sszType.toViewDU(value).hashTreeRoot()).to.be.deep.equal(expectedRoot); + expect(sszType.toViewDU(value).batchHashTreeRoot()).to.be.deep.equal(expectedRoot); }); }); diff --git a/packages/ssz/test/unit/byType/bitVector/tree.test.ts b/packages/ssz/test/unit/byType/bitVector/tree.test.ts index c1a48290..04f2ee14 100644 --- a/packages/ssz/test/unit/byType/bitVector/tree.test.ts +++ b/packages/ssz/test/unit/byType/bitVector/tree.test.ts @@ -49,19 +49,19 @@ runViewTestMutation({ ], }); -describe("BitVector batchHash", () => { +describe("BitVector batchHashTreeRoot", () => { const sszType = new BitVectorType(4); const value = fromNum(4, 0b0010); const expectedRoot = sszType.toView(value).hashTreeRoot(); it("fresh ViewDU", () => { - expect(sszType.toViewDU(value).hashTreeRoot()).to.be.deep.equal(expectedRoot); + expect(sszType.toViewDU(value).batchHashTreeRoot()).to.be.deep.equal(expectedRoot); }); - it("set then hashTreeRoot", () => { + it("set then batchHashTreeRoot", () => { const viewDU = sszType.toViewDU(fromNum(4, 0b0011)); viewDU.set(0, false); - expect(sszType.toViewDU(value).hashTreeRoot()).to.be.deep.equal(expectedRoot); + expect(sszType.toViewDU(value).batchHashTreeRoot()).to.be.deep.equal(expectedRoot); }); }); diff --git a/packages/ssz/test/unit/byType/container/tree.test.ts b/packages/ssz/test/unit/byType/container/tree.test.ts index bca2fd4f..dea2f7e2 100644 --- a/packages/ssz/test/unit/byType/container/tree.test.ts +++ b/packages/ssz/test/unit/byType/container/tree.test.ts @@ -221,7 +221,7 @@ runViewTestMutation({ ], }); -describe("ContainerViewDU batchHash", function () { +describe("ContainerViewDU batchHashTreeRoot", function () { const childContainerType = new ContainerType({b0: uint64NumInfType, b1: uint64NumInfType}); const parentContainerType = new ContainerType({ // a basic type @@ -233,29 +233,29 @@ describe("ContainerViewDU batchHash", function () { const expectedRoot = parentContainerType.toView(value).hashTreeRoot(); it("fresh ViewDU", () => { - expect(parentContainerType.toViewDU(value).hashTreeRoot()).to.be.deep.equal(expectedRoot); + expect(parentContainerType.toViewDU(value).batchHashTreeRoot()).to.be.deep.equal(expectedRoot); }); it("full hash then modify basic type", () => { const viewDU = parentContainerType.toViewDU({a: 9, b: {b0: 100, b1: 101}}); - viewDU.hashTreeRoot(); + viewDU.batchHashTreeRoot(); viewDU.a += 1; - expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); }); it("full hash then modify full child container", () => { const viewDU = parentContainerType.toViewDU({a: 10, b: {b0: 99, b1: 999}}); - viewDU.hashTreeRoot(); + viewDU.batchHashTreeRoot(); viewDU.b = childContainerType.toViewDU({b0: 100, b1: 101}); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); }); it("full hash then modify partial child container", () => { const viewDU = parentContainerType.toViewDU({a: 10, b: {b0: 99, b1: 999}}); - viewDU.hashTreeRoot(); + viewDU.batchHashTreeRoot(); viewDU.b.b0 = 100; viewDU.b.b1 = 101; - expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); }); }); @@ -286,14 +286,14 @@ describe("ContainerNodeStruct batchHash", function () { const expectedRoot = containerType.toView(value).hashTreeRoot(); it("fresh ViewDU", () => { - expect(containerType.toViewDU(value).hashTreeRoot()).to.be.deep.equal(expectedRoot); + expect(containerType.toViewDU(value).batchHashTreeRoot()).to.be.deep.equal(expectedRoot); }); it("full hash then modify basic type", () => { const viewDU = containerType.toViewDU({...value, exitEpoch: 3}); - viewDU.hashTreeRoot(); + viewDU.batchHashTreeRoot(); viewDU.exitEpoch *= 1_000_000; - expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); }); it("modify basic type", () => { @@ -304,6 +304,6 @@ describe("ContainerNodeStruct batchHash", function () { }); viewDU.exitEpoch -= 1; viewDU.withdrawableEpoch -= 1; - expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); }); }); diff --git a/packages/ssz/test/unit/byType/listBasic/tree.test.ts b/packages/ssz/test/unit/byType/listBasic/tree.test.ts index 3924564c..3036ae33 100644 --- a/packages/ssz/test/unit/byType/listBasic/tree.test.ts +++ b/packages/ssz/test/unit/byType/listBasic/tree.test.ts @@ -241,31 +241,31 @@ describe("ListBasicType.sliceTo", () => { } }); -describe("ListBasicType batchHash", function () { +describe("ListBasicType batchHashTreeRoot", function () { const value = [1, 2, 3, 4]; const expectedRoot = ListN64Uint64NumberType.toView(value).hashTreeRoot(); it("fresh ViewDU", () => { - expect(ListN64Uint64NumberType.toViewDU(value).hashTreeRoot()).to.be.deep.equal(expectedRoot); + expect(ListN64Uint64NumberType.toViewDU(value).batchHashTreeRoot()).to.be.deep.equal(expectedRoot); }); - it("push then hashTreeRoot()", () => { + it("push then batchHashTreeRoot()", () => { const viewDU = ListN64Uint64NumberType.defaultViewDU(); viewDU.push(1); viewDU.push(2); viewDU.push(3); viewDU.push(4); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); }); - it("push then modify then hashTreeRoot()", () => { + it("push then modify then batchHashTreeRoot()", () => { const viewDU = ListN64Uint64NumberType.defaultViewDU(); viewDU.push(1); viewDU.push(2); viewDU.push(3); viewDU.push(44); viewDU.set(3, 4); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); }); it("full hash then modify", () => { @@ -274,10 +274,10 @@ describe("ListBasicType batchHash", function () { viewDU.push(2); viewDU.push(33); viewDU.push(44); - viewDU.hashTreeRoot(); + viewDU.batchHashTreeRoot(); viewDU.set(2, 3); viewDU.set(3, 4); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); }); // similar to a fresh ViewDU but it's good to test @@ -288,7 +288,7 @@ describe("ListBasicType batchHash", function () { viewDU.push(3); viewDU.push(4); viewDU.push(5); - viewDU.hashTreeRoot(); - expect(viewDU.sliceTo(3).hashTreeRoot()).to.be.deep.equal(expectedRoot); + viewDU.batchHashTreeRoot(); + expect(viewDU.sliceTo(3).batchHashTreeRoot()).to.be.deep.equal(expectedRoot); }); }); diff --git a/packages/ssz/test/unit/byType/listComposite/tree.test.ts b/packages/ssz/test/unit/byType/listComposite/tree.test.ts index 5b738c88..2b0a6b91 100644 --- a/packages/ssz/test/unit/byType/listComposite/tree.test.ts +++ b/packages/ssz/test/unit/byType/listComposite/tree.test.ts @@ -232,7 +232,7 @@ describe("ListCompositeType.sliceFrom", () => { }); }); -describe("ListCompositeType batchHash", () => { +describe("ListCompositeType batchHashTreeRoot", () => { const value = [ {a: 1, b: 2}, {a: 3, b: 4}, @@ -240,56 +240,56 @@ describe("ListCompositeType batchHash", () => { const expectedRoot = listOfContainersType.toView(value).hashTreeRoot(); it("fresh ViewDU", () => { - expect(listOfContainersType.toViewDU(value).hashTreeRoot()).to.be.deep.equal(expectedRoot); + expect(listOfContainersType.toViewDU(value).batchHashTreeRoot()).to.be.deep.equal(expectedRoot); }); - it("push then hashTreeRoot()", () => { + it("push then batchHashTreeRoot()", () => { const viewDU = listOfContainersType.defaultViewDU(); viewDU.push(containerUintsType.toViewDU({a: 1, b: 2})); viewDU.push(containerUintsType.toViewDU({a: 3, b: 4})); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); }); it("full hash then modify full non-hashed child element", () => { const viewDU = listOfContainersType.defaultViewDU(); viewDU.push(containerUintsType.toViewDU({a: 1, b: 2})); viewDU.push(containerUintsType.toViewDU({a: 33, b: 44})); - viewDU.hashTreeRoot(); + viewDU.batchHashTreeRoot(); viewDU.set(1, containerUintsType.toViewDU({a: 3, b: 4})); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); }); it("full hash then modify partially hashed child element", () => { const viewDU = listOfContainersType.defaultViewDU(); viewDU.push(containerUintsType.toViewDU({a: 1, b: 2})); viewDU.push(containerUintsType.toViewDU({a: 33, b: 44})); - viewDU.hashTreeRoot(); + viewDU.batchHashTreeRoot(); const item1 = containerUintsType.toViewDU({a: 3, b: 44}); - item1.hashTreeRoot(); + item1.batchHashTreeRoot(); item1.b = 4; viewDU.set(1, item1); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); }); it("full hash then modify full hashed child element", () => { const viewDU = listOfContainersType.defaultViewDU(); viewDU.push(containerUintsType.toViewDU({a: 1, b: 2})); viewDU.push(containerUintsType.toViewDU({a: 33, b: 44})); - viewDU.hashTreeRoot(); + viewDU.batchHashTreeRoot(); const item1 = containerUintsType.toViewDU({a: 3, b: 4}); - item1.hashTreeRoot(); + item1.batchHashTreeRoot(); viewDU.set(1, item1); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); }); it("full hash then modify partial child element", () => { const viewDU = listOfContainersType.defaultViewDU(); viewDU.push(containerUintsType.toViewDU({a: 1, b: 2})); viewDU.push(containerUintsType.toViewDU({a: 33, b: 44})); - viewDU.hashTreeRoot(); + viewDU.batchHashTreeRoot(); viewDU.get(1).a = 3; viewDU.get(1).b = 4; - expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); }); // similar to a fresh ViewDU but it's good to test @@ -298,7 +298,7 @@ describe("ListCompositeType batchHash", () => { viewDU.push(containerUintsType.toViewDU({a: 1, b: 2})); viewDU.push(containerUintsType.toViewDU({a: 3, b: 4})); viewDU.push(containerUintsType.toViewDU({a: 5, b: 6})); - viewDU.hashTreeRoot(); - expect(viewDU.sliceTo(1).hashTreeRoot()).to.be.deep.equal(expectedRoot); + viewDU.batchHashTreeRoot(); + expect(viewDU.sliceTo(1).batchHashTreeRoot()).to.be.deep.equal(expectedRoot); }); }); diff --git a/packages/ssz/test/unit/byType/runViewTestMutation.ts b/packages/ssz/test/unit/byType/runViewTestMutation.ts index d6cae2f5..85298774 100644 --- a/packages/ssz/test/unit/byType/runViewTestMutation.ts +++ b/packages/ssz/test/unit/byType/runViewTestMutation.ts @@ -32,15 +32,22 @@ const runViewTestMutationFn = function runViewTestMutation, value: ValueOf, message: string): void { + function assertValidView(view: TreeViewDU, value: ValueOf, message: string, batchHash: boolean): void { expect(type.toJson(view.toValue())).to.deep.equal(type.toJson(value), `Wrong json - ${message}`); expect(toHexString(view.serialize())).to.equal(toHexString(type.serialize(value)), `Wrong serialized - ${message}`); - expect(toHexString(view.hashTreeRoot())).to.equal( - toHexString(type.hashTreeRoot(value)), - `Wrong hashTreeRoot - ${message}` - ); + if (batchHash) { + expect(toHexString(view.batchHashTreeRoot())).to.equal( + toHexString(type.hashTreeRoot(value)), + `Wrong batchHashTreeRoot - ${message}` + ); + } else { + expect(toHexString(view.hashTreeRoot())).to.equal( + toHexString(type.hashTreeRoot(value)), + `Wrong hashTreeRoot - ${message}` + ); + } } // eslint-disable-next-line no-only-tests/no-only-tests @@ -61,46 +68,48 @@ const runViewTestMutationFn = function runViewTestMutation) ?? tvBefore; - assertValidView(tvAfter as TreeViewDU, valueAfter, "after mutation"); + assertValidView(tvAfter as TreeViewDU, valueAfter, "after mutation", false); if (assertFn) assertFn(tvAfter as CompositeViewDU); }); } - const treeViewDUId = `${id} - TreeViewDU`; - if ((!onlyId || treeViewDUId.includes(onlyId)) && !skipTreeViewDU) { - it(treeViewDUId, () => { - const tvBefore = type.toViewDU(valueBefore) as TreeViewDU; - - // Set to mutable, and edit - const tvAfter = (fn(tvBefore as CompositeViewDU) ?? tvBefore) as CompositeViewDU; - - if (treeViewToStruct) { - const tvAfterStruct = treeViewToStruct(tvAfter); - expect(type.toJson(tvAfterStruct)).to.deep.equal( - type.toJson(valueAfter), - "Wrong value after mutation before commit" - ); - } - - if (assertFn) assertFn(tvAfter as CompositeViewDU); + for (const batchHash of [false, true]) { + const treeViewDUId = `${id} - TreeViewDU, batchHash = ${batchHash}`; + if ((!onlyId || treeViewDUId.includes(onlyId)) && !skipTreeViewDU) { + it(treeViewDUId, () => { + const tvBefore = type.toViewDU(valueBefore) as TreeViewDU; - type.commitViewDU(tvAfter); - assertValidView(tvAfter as TreeViewDU, valueAfter, "after mutation"); - - if (assertFn) assertFn(tvAfter as CompositeViewDU); - - if (!skipCloneMutabilityViewDU) { - // Ensure correct mutability of clone and caches // Set to mutable, and edit - const tvBefore2 = type.toViewDU(valueBefore) as TreeViewDU; - const tvAfter2 = (fn(tvBefore2 as CompositeViewDU) ?? tvBefore2) as CompositeViewDU; - // Drop changes - (tvAfter2 as TreeViewDU).clone(); - // Assert same value as before - assertValidView(tvAfter2 as TreeViewDU, valueBefore, "dropped mutation"); - } - }); + const tvAfter = (fn(tvBefore as CompositeViewDU) ?? tvBefore) as CompositeViewDU; + + if (treeViewToStruct) { + const tvAfterStruct = treeViewToStruct(tvAfter); + expect(type.toJson(tvAfterStruct)).to.deep.equal( + type.toJson(valueAfter), + "Wrong value after mutation before commit" + ); + } + + if (assertFn) assertFn(tvAfter as CompositeViewDU); + + type.commitViewDU(tvAfter); + assertValidView(tvAfter as TreeViewDU, valueAfter, "after mutation", batchHash); + + if (assertFn) assertFn(tvAfter as CompositeViewDU); + + if (!skipCloneMutabilityViewDU) { + // Ensure correct mutability of clone and caches + // Set to mutable, and edit + const tvBefore2 = type.toViewDU(valueBefore) as TreeViewDU; + const tvAfter2 = (fn(tvBefore2 as CompositeViewDU) ?? tvBefore2) as CompositeViewDU; + // Drop changes + (tvAfter2 as TreeViewDU).clone(); + // Assert same value as before + assertValidView(tvAfter2 as TreeViewDU, valueBefore, "dropped mutation", batchHash); + } + }); + } } } }); diff --git a/packages/ssz/test/unit/eth2/beaconState.test.ts b/packages/ssz/test/unit/eth2/beaconState.test.ts index 5c41d0ab..9b886f82 100644 --- a/packages/ssz/test/unit/eth2/beaconState.test.ts +++ b/packages/ssz/test/unit/eth2/beaconState.test.ts @@ -7,19 +7,18 @@ import {BitArray, fromHexString} from "../../../src"; const VALIDATOR_REGISTRY_LIMIT = 1099511627776; export const Balances = new ListUintNum64Type(VALIDATOR_REGISTRY_LIMIT); -// TODO - batch: mix the commit() or hashTreeRoot()? -describe("BeaconState ViewDU batch hash", function () { +describe("BeaconState ViewDU batchHashTreeRoot", function () { const view = BeaconState.defaultView(); const viewDU = BeaconState.defaultViewDU(); it("BeaconState ViewDU should have same hashTreeRoot() to View", () => { // genesisTime viewDU.genesisTime = view.genesisTime = 1e9; - expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); // genesisValidatorsRoot viewDU.genesisValidatorsRoot = view.genesisValidatorsRoot = Buffer.alloc(32, 1); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); // fork const fork: phase0.Fork = { @@ -29,7 +28,7 @@ describe("BeaconState ViewDU batch hash", function () { }; view.fork = BeaconState.fields.fork.toView(fork); viewDU.fork = BeaconState.fields.fork.toViewDU(fork); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); // latestBlockHeader const latestBlockHeader: phase0.BeaconBlockHeader = { @@ -41,21 +40,21 @@ describe("BeaconState ViewDU batch hash", function () { }; view.latestBlockHeader = BeaconState.fields.latestBlockHeader.toView(latestBlockHeader); viewDU.latestBlockHeader = BeaconState.fields.latestBlockHeader.toViewDU(latestBlockHeader); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); // blockRoots const blockRoots = ssz.phase0.HistoricalBlockRoots.defaultValue(); blockRoots[0] = fromHexString("0x1234"); view.blockRoots = ssz.phase0.HistoricalBlockRoots.toView(blockRoots); viewDU.blockRoots = ssz.phase0.HistoricalBlockRoots.toViewDU(blockRoots); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); // stateRoots const stateRoots = ssz.phase0.HistoricalStateRoots.defaultValue(); stateRoots[0] = fromHexString("0x5678"); view.stateRoots = ssz.phase0.HistoricalStateRoots.toView(stateRoots); viewDU.stateRoots = ssz.phase0.HistoricalStateRoots.toViewDU(stateRoots); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); // historical_roots Frozen in Capella, replaced by historical_summaries // Eth1 @@ -66,19 +65,19 @@ describe("BeaconState ViewDU batch hash", function () { }; view.eth1Data = BeaconState.fields.eth1Data.toView(eth1Data); viewDU.eth1Data = BeaconState.fields.eth1Data.toViewDU(eth1Data); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); // Eth1DataVotes const eth1DataVotes = ssz.phase0.Eth1DataVotes.defaultValue(); eth1DataVotes[0] = eth1Data; view.eth1DataVotes = ssz.phase0.Eth1DataVotes.toView(eth1DataVotes); viewDU.eth1DataVotes = ssz.phase0.Eth1DataVotes.toViewDU(eth1DataVotes); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); // Eth1DepositIndex view.eth1DepositIndex = 1000; viewDU.eth1DepositIndex = 1000; - expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); // validators const validator = { @@ -93,34 +92,34 @@ describe("BeaconState ViewDU batch hash", function () { }; view.validators = BeaconState.fields.validators.toView([validator]); viewDU.validators = BeaconState.fields.validators.toViewDU([validator]); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); // balances view.balances = BeaconState.fields.balances.toView([1000, 2000, 3000]); viewDU.balances = Balances.toViewDU([1000, 2000, 3000]); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); // randaoMixes const randaoMixes = ssz.phase0.RandaoMixes.defaultValue(); randaoMixes[0] = fromHexString("0x1234"); view.randaoMixes = ssz.phase0.RandaoMixes.toView(randaoMixes); viewDU.randaoMixes = ssz.phase0.RandaoMixes.toViewDU(randaoMixes); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); // slashings view.slashings = BeaconState.fields.slashings.toView(Array.from({length: 64}, () => BigInt(1000))); viewDU.slashings = BeaconState.fields.slashings.toViewDU(Array.from({length: 64}, () => BigInt(1000))); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); // previousEpochAttestations view.previousEpochParticipation = BeaconState.fields.previousEpochParticipation.toView([1, 2, 3]); viewDU.previousEpochParticipation = BeaconState.fields.previousEpochParticipation.toViewDU([1, 2, 3]); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); // currentEpochAttestations view.currentEpochParticipation = BeaconState.fields.currentEpochParticipation.toView([1, 2, 3]); viewDU.currentEpochParticipation = BeaconState.fields.currentEpochParticipation.toViewDU([1, 2, 3]); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); // justificationBits view.justificationBits = BeaconState.fields.justificationBits.toView( @@ -129,7 +128,7 @@ describe("BeaconState ViewDU batch hash", function () { viewDU.justificationBits = BeaconState.fields.justificationBits.toViewDU( BitArray.fromBoolArray([true, false, true, true]) ); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); // previousJustifiedCheckpoint const checkpoint: phase0.Checkpoint = { @@ -138,22 +137,22 @@ describe("BeaconState ViewDU batch hash", function () { }; view.previousJustifiedCheckpoint = BeaconState.fields.previousJustifiedCheckpoint.toView(checkpoint); viewDU.previousJustifiedCheckpoint = BeaconState.fields.previousJustifiedCheckpoint.toViewDU(checkpoint); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); // currentJustifiedCheckpoint view.currentJustifiedCheckpoint = BeaconState.fields.currentJustifiedCheckpoint.toView(checkpoint); viewDU.currentJustifiedCheckpoint = BeaconState.fields.currentJustifiedCheckpoint.toViewDU(checkpoint); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); // finalizedCheckpoint view.finalizedCheckpoint = BeaconState.fields.finalizedCheckpoint.toView(checkpoint); viewDU.finalizedCheckpoint = BeaconState.fields.finalizedCheckpoint.toViewDU(checkpoint); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); // inactivityScores view.inactivityScores = BeaconState.fields.inactivityScores.toView([1, 2, 3]); viewDU.inactivityScores = BeaconState.fields.inactivityScores.toViewDU([1, 2, 3]); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); // currentSyncCommittee const syncCommittee: altair.SyncCommittee = { @@ -162,12 +161,12 @@ describe("BeaconState ViewDU batch hash", function () { }; view.currentSyncCommittee = BeaconState.fields.currentSyncCommittee.toView(syncCommittee); viewDU.currentSyncCommittee = BeaconState.fields.currentSyncCommittee.toViewDU(syncCommittee); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); // nextSyncCommittee view.nextSyncCommittee = BeaconState.fields.nextSyncCommittee.toView(syncCommittee); viewDU.nextSyncCommittee = BeaconState.fields.nextSyncCommittee.toViewDU(syncCommittee); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); // latestExecutionPayloadHeader const latestExecutionPayloadHeader = BeaconState.fields.latestExecutionPayloadHeader.defaultValue(); @@ -179,15 +178,15 @@ describe("BeaconState ViewDU batch hash", function () { BeaconState.fields.latestExecutionPayloadHeader.toView(latestExecutionPayloadHeader); viewDU.latestExecutionPayloadHeader = BeaconState.fields.latestExecutionPayloadHeader.toViewDU(latestExecutionPayloadHeader); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); // nextWithdrawalIndex viewDU.nextWithdrawalIndex = view.nextWithdrawalIndex = 1000; - expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); // nextWithdrawalValidatorIndex viewDU.nextWithdrawalValidatorIndex = view.nextWithdrawalValidatorIndex = 1000; - expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); // historicalSummaries const historicalSummaries = { @@ -196,6 +195,6 @@ describe("BeaconState ViewDU batch hash", function () { }; view.historicalSummaries = BeaconState.fields.historicalSummaries.toView([historicalSummaries]); viewDU.historicalSummaries = BeaconState.fields.historicalSummaries.toViewDU([historicalSummaries]); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); }); }); diff --git a/packages/ssz/test/unit/eth2/validators.test.ts b/packages/ssz/test/unit/eth2/validators.test.ts index 4d877428..1cccd691 100644 --- a/packages/ssz/test/unit/eth2/validators.test.ts +++ b/packages/ssz/test/unit/eth2/validators.test.ts @@ -2,7 +2,6 @@ import {expect} from "chai"; import {describe, it} from "mocha"; import {toHexString, ListCompositeType, ValueOf, CompositeViewDU} from "../../../src"; import {ValidatorContainer, ValidatorNodeStruct} from "../../lodestarTypes/phase0/sszTypes"; -import {HashComputationLevel} from "@chainsafe/persistent-merkle-tree"; type Validator = ValueOf; const validator: Validator = { @@ -34,6 +33,7 @@ describe("Container with BranchNodeStruct", function () { getExitEpoch: (treeBacked) => treeBacked.exitEpoch, getPubkey: (treeBacked) => toHexString(treeBacked.pubkey), hashTreeRoot: (treeBacked) => treeBacked.hashTreeRoot(), + batchHashTreeRoot: (treeBacked) => treeBacked.batchHashTreeRoot(), getProof: (treeBacked) => treeBacked.createProof(validatorProofJsonPaths), serialize: (treeBacked) => treeBacked.serialize(), }; @@ -105,61 +105,3 @@ describe("Container with BranchNodeStruct", function () { }); }); }); - -/** - * modifying any fields should result in the whole tree being recomputed - * 0 root - * / \ - * 1 10 11 - * / \ / \ - * 2 20 21 22 23 - * / \ / \ / \ / \ - * 3 pub with eff sla act act exit with - * / \ - * 4 pub0 pub1 - * This does not suport batch hash - **/ -describe.skip("getHashComputations BranchNodeStruct", function () { - const testCases: {name: string; fn: (validator: ValueOf) => void}[] = [ - {name: "modify pubkey", fn: (validator) => (validator.pubkey = Buffer.alloc(48, 0x01))}, - { - name: "modify withdrawalCredentials", - fn: (validator) => (validator.withdrawalCredentials = Buffer.alloc(32, 0x01)), - }, - {name: "modify effectiveBalance", fn: (validator) => (validator.effectiveBalance += 1e9)}, - {name: "modify slashed", fn: (validator) => (validator.slashed = true)}, - {name: "modify activationEligibilityEpoch", fn: (validator) => (validator.activationEligibilityEpoch += 1e6)}, - {name: "modify activationEpoch", fn: (validator) => (validator.activationEpoch += 1e6)}, - {name: "modify exitEpoch", fn: (validator) => (validator.exitEpoch += 1e6)}, - {name: "modify withdrawableEpoch", fn: (validator) => (validator.withdrawableEpoch += 1e6)}, - { - name: "modify all", - fn: (validator) => { - validator.pubkey = Buffer.alloc(48, 0x01); - validator.withdrawalCredentials = Buffer.alloc(32, 0x01); - validator.effectiveBalance += 1e9; - validator.slashed = true; - validator.activationEligibilityEpoch += 1e6; - validator.activationEpoch += 1e6; - validator.exitEpoch += 1e6; - validator.withdrawableEpoch += 1e6; - }, - }, - ]; - - for (const {name, fn} of testCases) { - it(name, () => { - const hcByLevel: HashComputationLevel[] = []; - const validatorViewDU = ValidatorNodeStruct.toViewDU(validator); - // cache all roots - validatorViewDU.hashTreeRoot(); - fn(validatorViewDU); - validatorViewDU.commit(0, hcByLevel); - expect(hcByLevel.length).to.be.equal(4); - expect(hcByLevel[0].length).to.be.equal(1); - expect(hcByLevel[1].length).to.be.equal(2); - expect(hcByLevel[2].length).to.be.equal(4); - expect(hcByLevel[3].length).to.be.equal(1); - }); - } -}); diff --git a/packages/ssz/test/unit/lodestarTypes/phase0/listValidator.test.ts b/packages/ssz/test/unit/lodestarTypes/phase0/listValidator.test.ts index 6602afed..ff3ae2a2 100644 --- a/packages/ssz/test/unit/lodestarTypes/phase0/listValidator.test.ts +++ b/packages/ssz/test/unit/lodestarTypes/phase0/listValidator.test.ts @@ -35,7 +35,7 @@ describe("ListValidator ssz type", function () { oldViewDU.get(i).activationEpoch = 2024; newViewDU.get(i).activationEpoch = 2024; } - expect(newViewDU.hashTreeRoot()).to.be.deep.equal(oldViewDU.hashTreeRoot()); + expect(newViewDU.batchHashTreeRoot()).to.be.deep.equal(oldViewDU.batchHashTreeRoot()); expect(newViewDU.serialize()).to.be.deep.equal(oldViewDU.serialize()); }); } @@ -54,7 +54,7 @@ describe("ListValidator ssz type", function () { oldViewDU.get(index).activationEpoch = 2024; newViewDU.get(index).activationEpoch = 2024; } - expect(newViewDU.hashTreeRoot()).to.be.deep.equal(oldViewDU.hashTreeRoot()); + expect(newViewDU.batchHashTreeRoot()).to.be.deep.equal(oldViewDU.batchHashTreeRoot()); expect(newViewDU.serialize()).to.be.deep.equal(oldViewDU.serialize()); }); } @@ -78,7 +78,7 @@ describe("ListValidator ssz type", function () { newViewDU.push(ssz.phase0.Validator.toViewDU(validator)); } oldViewDU.commit(); - expect(newViewDU.hashTreeRoot()).to.be.deep.equal(oldViewDU.node.root); + expect(newViewDU.batchHashTreeRoot()).to.be.deep.equal(oldViewDU.node.root); expect(newViewDU.serialize()).to.be.deep.equal(oldViewDU.serialize()); const allValidators = newViewDU.getAllReadonlyValues(); for (let i = 0; i < numPush; i++) { diff --git a/packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts b/packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts index cf01c2f1..6725a92b 100644 --- a/packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts +++ b/packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts @@ -35,8 +35,10 @@ describe("Validator ssz types", function () { const root = ValidatorContainer.hashTreeRoot(validator); const root2 = ssz.phase0.Validator.hashTreeRoot(validator); const root3 = ssz.phase0.Validator.toViewDU(validator).hashTreeRoot(); + const root4 = ssz.phase0.Validator.toViewDU(validator).batchHashTreeRoot(); expect(root2).to.be.deep.equal(root); expect(root3).to.be.deep.equal(root); + expect(root4).to.be.deep.equal(root); } }); }); diff --git a/packages/ssz/test/unit/regressions.test.ts b/packages/ssz/test/unit/regressions.test.ts index 4f5ecaf9..6dc0c22f 100644 --- a/packages/ssz/test/unit/regressions.test.ts +++ b/packages/ssz/test/unit/regressions.test.ts @@ -32,6 +32,8 @@ describe("Regressions / known issues", () => { const bytes = SyncCommitteeBits.serialize(bitArray); const rootByTreeBacked = SyncCommitteeBits.deserializeToViewDU(bytes).hashTreeRoot(); expect(toHexString(rootByStruct)).to.be.equal(toHexString(rootByTreeBacked), "Inconsistent hashTreeRoot"); + const rootByBatch = SyncCommitteeBits.deserializeToViewDU(bytes).batchHashTreeRoot(); + expect(toHexString(rootByStruct)).to.be.equal(toHexString(rootByBatch), "Inconsistent hashTreeRoot"); }); it("converts bit arrays to tree", function () { diff --git a/packages/ssz/test/unit/unchangedViewDUs.test.ts b/packages/ssz/test/unit/unchangedViewDUs.test.ts index 98e696c9..aa61dfe2 100644 --- a/packages/ssz/test/unit/unchangedViewDUs.test.ts +++ b/packages/ssz/test/unit/unchangedViewDUs.test.ts @@ -5,25 +5,25 @@ import {getRandomState} from "../utils/generateEth2Objs"; describe("Unchanged ViewDUs", () => { const state = sszAltair.BeaconState.toViewDU(getRandomState(100)); - it("should not recompute hashTreeRoot() when no fields is changed", () => { - const root = state.hashTreeRoot(); + it("should not recompute batchHashTreeRoot() when no fields is changed", () => { + const root = state.batchHashTreeRoot(); // this causes viewsChanged inside BeaconState container state.validators.length; state.balances.length; // but we should not recompute root, should get from cache instead - const root2 = state.hashTreeRoot(); - expect(root2).to.equal(root, "should not recompute hashTreeRoot() when no fields are changed"); + const root2 = state.batchHashTreeRoot(); + expect(root2).to.equal(root, "should not recompute batchHashTreeRoot() when no fields are changed"); }); - it("handle childViewDU.hashTreeRoot()", () => { + it("handle childViewDU.batchHashTreeRoot()", () => { const state2 = state.clone(); state2.latestBlockHeader.stateRoot = Buffer.alloc(32, 3); - const root2 = state2.hashTreeRoot(); + const root2 = state2.batchHashTreeRoot(); const state3 = state.clone(); state3.latestBlockHeader.stateRoot = Buffer.alloc(32, 3); - // hashTreeRoot() also does the commit() + // batchHashTreeRoot() also does the commit() state3.latestBlockHeader.commit(); - const root3 = state3.hashTreeRoot(); + const root3 = state3.batchHashTreeRoot(); expect(root3).to.be.deep.equal(root2); }); }); From a6191d46b6d9de24eb28f206e750b5710a0f3793 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Fri, 9 Aug 2024 13:39:29 +0700 Subject: [PATCH 091/113] feat: implement ViewDU.batchHashTreeRoot() --- packages/ssz/src/type/arrayBasic.ts | 18 +- packages/ssz/src/type/bitArray.ts | 6 +- packages/ssz/src/type/byteArray.ts | 19 +- packages/ssz/src/type/composite.ts | 3 +- packages/ssz/src/type/container.ts | 9 +- packages/ssz/src/type/listBasic.ts | 25 +- packages/ssz/src/type/listComposite.ts | 25 +- packages/ssz/src/type/optional.ts | 18 +- packages/ssz/src/type/union.ts | 18 +- packages/ssz/src/type/vectorBasic.ts | 14 +- packages/ssz/src/type/vectorComposite.ts | 14 +- packages/ssz/src/view/arrayBasic.ts | 12 +- packages/ssz/src/view/arrayComposite.ts | 12 +- packages/ssz/src/viewDU/abstract.ts | 39 +- packages/ssz/src/viewDU/arrayBasic.ts | 34 +- packages/ssz/src/viewDU/arrayComposite.ts | 40 +- packages/ssz/src/viewDU/bitArray.ts | 8 +- packages/ssz/src/viewDU/container.ts | 41 +- .../ssz/src/viewDU/containerNodeStruct.ts | 32 +- .../ssz/test/perf/eth2/hashTreeRoot.test.ts | 12 +- packages/ssz/test/spec/runValidTest.ts | 14 + .../test/unit/byType/bitArray/tree.test.ts | 17 + .../test/unit/byType/bitVector/tree.test.ts | 17 + .../test/unit/byType/container/tree.test.ts | 417 ++++++++++++++++++ .../test/unit/byType/listBasic/tree.test.ts | 75 ++++ .../unit/byType/listComposite/tree.test.ts | 124 +++++- .../test/unit/byType/optional/tree.test.ts | 4 +- .../test/unit/byType/runViewTestMutation.ts | 85 ++-- .../test/unit/byType/vectorBasic/tree.test.ts | 67 +++ .../unit/byType/vectorComposite/tree.test.ts | 121 +++++ .../ssz/test/unit/eth2/beaconState.test.ts | 200 +++++++++ .../ssz/test/unit/eth2/validators.test.ts | 26 +- packages/ssz/test/unit/regressions.test.ts | 2 + .../ssz/test/unit/unchangedViewDUs.test.ts | 29 ++ yarn.lock | 21 + 35 files changed, 1496 insertions(+), 122 deletions(-) create mode 100644 packages/ssz/test/unit/byType/vectorBasic/tree.test.ts create mode 100644 packages/ssz/test/unit/byType/vectorComposite/tree.test.ts create mode 100644 packages/ssz/test/unit/eth2/beaconState.test.ts create mode 100644 packages/ssz/test/unit/unchangedViewDUs.test.ts diff --git a/packages/ssz/src/type/arrayBasic.ts b/packages/ssz/src/type/arrayBasic.ts index c731e02a..2ad5d3c3 100644 --- a/packages/ssz/src/type/arrayBasic.ts +++ b/packages/ssz/src/type/arrayBasic.ts @@ -5,6 +5,8 @@ import { getNodesAtDepth, packedNodeRootsToBytes, packedRootsBytesToNode, + HashComputationLevel, + levelAtIndex, } from "@chainsafe/persistent-merkle-tree"; import {Type, ValueOf, ByteViews} from "./abstract"; import {BasicType} from "./basic"; @@ -39,14 +41,24 @@ export function addLengthNode(chunksNode: Node, length: number): Node { return new BranchNode(chunksNode, LeafNode.fromUint32(length)); } -export function setChunksNode(rootNode: Node, chunksNode: Node, newLength?: number): Node { +export function setChunksNode( + rootNode: Node, + chunksNode: Node, + newLength: number | null, + hcOffset = 0, + hcByLevel: HashComputationLevel[] | null = null +): Node { const lengthNode = - newLength !== undefined + newLength !== null ? // If newLength is set, create a new node for length LeafNode.fromUint32(newLength) : // else re-use existing node (rootNode.right as LeafNode); - return new BranchNode(chunksNode, lengthNode); + const branchNode = new BranchNode(chunksNode, lengthNode); + if (hcByLevel !== null) { + levelAtIndex(hcByLevel, hcOffset).push(chunksNode, lengthNode, branchNode); + } + return branchNode; } export type ArrayProps = {isList: true; limit: number} | {isList: false; length: number}; diff --git a/packages/ssz/src/type/bitArray.ts b/packages/ssz/src/type/bitArray.ts index 5351286f..5071550c 100644 --- a/packages/ssz/src/type/bitArray.ts +++ b/packages/ssz/src/type/bitArray.ts @@ -1,4 +1,4 @@ -import {concatGindices, Gindex, Node, toGindex, Tree} from "@chainsafe/persistent-merkle-tree"; +import {concatGindices, Gindex, Node, toGindex, Tree, HashComputationLevel} from "@chainsafe/persistent-merkle-tree"; import {fromHexString, toHexString, byteArrayEquals} from "../util/byteArray"; import {splitIntoRootChunks} from "../util/merkleize"; import {CompositeType, LENGTH_GINDEX} from "./composite"; @@ -29,8 +29,8 @@ export abstract class BitArrayType extends CompositeType extends Type { /** INTERNAL METHOD: Given a Tree View, returns a `Node` with all its updated data */ abstract commitView(view: TV): Node; /** INTERNAL METHOD: Given a Deferred Update Tree View returns a `Node` with all its updated data */ - abstract commitViewDU(view: TVDU): Node; + abstract commitViewDU(view: TVDU, hcOffset?: number, hcByLevel?: HashComputationLevel[] | null): Node; /** INTERNAL METHOD: Return the cache of a Deferred Update Tree View. May return `undefined` if this ViewDU has no cache */ abstract cacheOfViewDU(view: TVDU): unknown; diff --git a/packages/ssz/src/type/container.ts b/packages/ssz/src/type/container.ts index 97b10aa1..daa1911d 100644 --- a/packages/ssz/src/type/container.ts +++ b/packages/ssz/src/type/container.ts @@ -7,6 +7,7 @@ import { toGindex, concatGindices, getNode, + HashComputationLevel, } from "@chainsafe/persistent-merkle-tree"; import {maxChunksToDepth} from "../util/merkleize"; import {Require} from "../util/types"; @@ -162,8 +163,12 @@ export class ContainerType>> extends return view.node; } - commitViewDU(view: ContainerTreeViewDUType): Node { - view.commit(); + commitViewDU( + view: ContainerTreeViewDUType, + hcOffset = 0, + hcByLevel: HashComputationLevel[] | null = null + ): Node { + view.commit(hcOffset, hcByLevel); return view.node; } diff --git a/packages/ssz/src/type/listBasic.ts b/packages/ssz/src/type/listBasic.ts index 53ae8783..c9e397e6 100644 --- a/packages/ssz/src/type/listBasic.ts +++ b/packages/ssz/src/type/listBasic.ts @@ -1,4 +1,4 @@ -import {LeafNode, Node, Tree} from "@chainsafe/persistent-merkle-tree"; +import {LeafNode, Node, Tree, HashComputationLevel} from "@chainsafe/persistent-merkle-tree"; import {ValueOf} from "./abstract"; import {BasicType} from "./basic"; import {ByteViews} from "./composite"; @@ -93,8 +93,12 @@ export class ListBasicType> return view.node; } - commitViewDU(view: ListBasicTreeViewDU): Node { - view.commit(); + commitViewDU( + view: ListBasicTreeViewDU, + hcOffset = 0, + hcByLevel: HashComputationLevel[] | null = null + ): Node { + view.commit(hcOffset, hcByLevel); return view.node; } @@ -144,8 +148,19 @@ export class ListBasicType> return node.left; } - tree_setChunksNode(rootNode: Node, chunksNode: Node, newLength?: number): Node { - return setChunksNode(rootNode, chunksNode, newLength); + tree_chunksNodeOffset(): number { + // one more level for length, see setChunksNode below + return 1; + } + + tree_setChunksNode( + rootNode: Node, + chunksNode: Node, + newLength: number | null, + hcOffset = 0, + hcByLevel: HashComputationLevel[] | null = null + ): Node { + return setChunksNode(rootNode, chunksNode, newLength, hcOffset, hcByLevel); } // Merkleization diff --git a/packages/ssz/src/type/listComposite.ts b/packages/ssz/src/type/listComposite.ts index 48fc37df..dad8e77c 100644 --- a/packages/ssz/src/type/listComposite.ts +++ b/packages/ssz/src/type/listComposite.ts @@ -1,4 +1,4 @@ -import {Node, Tree} from "@chainsafe/persistent-merkle-tree"; +import {Node, Tree, HashComputationLevel} from "@chainsafe/persistent-merkle-tree"; import { mixInLength, maxChunksToDepth, @@ -97,8 +97,12 @@ export class ListCompositeType< return view.node; } - commitViewDU(view: ListCompositeTreeViewDU): Node { - view.commit(); + commitViewDU( + view: ListCompositeTreeViewDU, + hcOffset = 0, + hcByLevel: HashComputationLevel[] | null = null + ): Node { + view.commit(hcOffset, hcByLevel); return view.node; } @@ -150,8 +154,19 @@ export class ListCompositeType< return node.left; } - tree_setChunksNode(rootNode: Node, chunksNode: Node, newLength?: number): Node { - return setChunksNode(rootNode, chunksNode, newLength); + tree_chunksNodeOffset(): number { + // one more level for length, see setChunksNode below + return 1; + } + + tree_setChunksNode( + rootNode: Node, + chunksNode: Node, + newLength: number | null, + hcOffset = 0, + hcByLevel: HashComputationLevel[] | null = null + ): Node { + return setChunksNode(rootNode, chunksNode, newLength, hcOffset, hcByLevel); } // Merkleization diff --git a/packages/ssz/src/type/optional.ts b/packages/ssz/src/type/optional.ts index 59c38d6b..5ae7e2bb 100644 --- a/packages/ssz/src/type/optional.ts +++ b/packages/ssz/src/type/optional.ts @@ -1,4 +1,12 @@ -import {concatGindices, Gindex, Node, Tree, zeroNode} from "@chainsafe/persistent-merkle-tree"; +import { + concatGindices, + Gindex, + Node, + Tree, + zeroNode, + HashComputationLevel, + getHashComputations, +} from "@chainsafe/persistent-merkle-tree"; import {mixInLength} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; @@ -75,8 +83,12 @@ export class OptionalType> extends CompositeTy } // TODO add an OptionalViewDU - commitViewDU(view: ValueOfType): Node { - return this.value_toTree(view); + commitViewDU(view: ValueOfType, hcOffset = 0, hcByLevel: HashComputationLevel[] | null = null): Node { + const node = this.value_toTree(view); + if (hcByLevel !== null && node.h0 === null) { + getHashComputations(node, hcOffset, hcByLevel); + } + return node; } // TODO add an OptionalViewDU diff --git a/packages/ssz/src/type/union.ts b/packages/ssz/src/type/union.ts index c6fea712..fbd7f97a 100644 --- a/packages/ssz/src/type/union.ts +++ b/packages/ssz/src/type/union.ts @@ -1,4 +1,12 @@ -import {concatGindices, getNode, Gindex, Node, Tree} from "@chainsafe/persistent-merkle-tree"; +import { + concatGindices, + getNode, + Gindex, + Node, + Tree, + HashComputationLevel, + getHashComputations, +} from "@chainsafe/persistent-merkle-tree"; import {mixInLength} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; @@ -106,8 +114,12 @@ export class UnionType[]> extends CompositeType< return this.value_toTree(view); } - commitViewDU(view: ValueOfTypes): Node { - return this.value_toTree(view); + commitViewDU(view: ValueOfTypes, hcOffset = 0, hcByLevel: HashComputationLevel[] | null = null): Node { + const node = this.value_toTree(view); + if (hcByLevel !== null && node.h0 === null) { + getHashComputations(node, hcOffset, hcByLevel); + } + return node; } value_serializedSize(value: ValueOfTypes): number { diff --git a/packages/ssz/src/type/vectorBasic.ts b/packages/ssz/src/type/vectorBasic.ts index 061008e0..d52a9405 100644 --- a/packages/ssz/src/type/vectorBasic.ts +++ b/packages/ssz/src/type/vectorBasic.ts @@ -1,4 +1,4 @@ -import {Node, Tree} from "@chainsafe/persistent-merkle-tree"; +import {Node, Tree, HashComputationLevel} from "@chainsafe/persistent-merkle-tree"; import {maxChunksToDepth, splitIntoRootChunks} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; @@ -83,8 +83,12 @@ export class VectorBasicType> return view.node; } - commitViewDU(view: ArrayBasicTreeViewDU): Node { - view.commit(); + commitViewDU( + view: ArrayBasicTreeViewDU, + hcOffset = 0, + hcByLevel: HashComputationLevel[] | null = null + ): Node { + view.commit(hcOffset, hcByLevel); return view.node; } @@ -132,6 +136,10 @@ export class VectorBasicType> return node; } + tree_chunksNodeOffset(): number { + return 0; + } + tree_setChunksNode(rootNode: Node, chunksNode: Node): Node { return chunksNode; } diff --git a/packages/ssz/src/type/vectorComposite.ts b/packages/ssz/src/type/vectorComposite.ts index 68455bb1..e1af8dd4 100644 --- a/packages/ssz/src/type/vectorComposite.ts +++ b/packages/ssz/src/type/vectorComposite.ts @@ -1,4 +1,4 @@ -import {Node, Tree} from "@chainsafe/persistent-merkle-tree"; +import {Node, Tree, HashComputationLevel} from "@chainsafe/persistent-merkle-tree"; import {maxChunksToDepth} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; @@ -90,8 +90,12 @@ export class VectorCompositeType< return view.node; } - commitViewDU(view: ArrayCompositeTreeViewDU): Node { - view.commit(); + commitViewDU( + view: ArrayCompositeTreeViewDU, + hcOffset = 0, + hcByLevel: HashComputationLevel[] | null = null + ): Node { + view.commit(hcOffset, hcByLevel); return view.node; } @@ -139,6 +143,10 @@ export class VectorCompositeType< return node; } + tree_chunksNodeOffset(): number { + return 0; + } + tree_setChunksNode(rootNode: Node, chunksNode: Node): Node { return chunksNode; } diff --git a/packages/ssz/src/view/arrayBasic.ts b/packages/ssz/src/view/arrayBasic.ts index e96ce1d1..3b58051b 100644 --- a/packages/ssz/src/view/arrayBasic.ts +++ b/packages/ssz/src/view/arrayBasic.ts @@ -1,4 +1,4 @@ -import {getNodesAtDepth, LeafNode, Node, Tree} from "@chainsafe/persistent-merkle-tree"; +import {getNodesAtDepth, LeafNode, Node, Tree, HashComputationLevel} from "@chainsafe/persistent-merkle-tree"; import {ValueOf} from "../type/abstract"; import {BasicType} from "../type/basic"; import {CompositeType} from "../type/composite"; @@ -21,8 +21,16 @@ export type ArrayBasicType> = CompositeTy tree_setLength(tree: Tree, length: number): void; /** INTERNAL METHOD: Return the chunks node from a root node */ tree_getChunksNode(rootNode: Node): Node; + /** INTERNAL METHOD: Return the offset from root for HashComputation */ + tree_chunksNodeOffset(): number; /** INTERNAL METHOD: Return a new root node with changed chunks node and length */ - tree_setChunksNode(rootNode: Node, chunksNode: Node, newLength?: number): Node; + tree_setChunksNode( + rootNode: Node, + chunksNode: Node, + newLength: number | null, + hcOffset?: number, + hcByLevel?: HashComputationLevel[] | null + ): Node; }; export class ArrayBasicTreeView> extends TreeView> { diff --git a/packages/ssz/src/view/arrayComposite.ts b/packages/ssz/src/view/arrayComposite.ts index 252a3587..4bac64e0 100644 --- a/packages/ssz/src/view/arrayComposite.ts +++ b/packages/ssz/src/view/arrayComposite.ts @@ -1,4 +1,4 @@ -import {getNodesAtDepth, Node, toGindexBitstring, Tree} from "@chainsafe/persistent-merkle-tree"; +import {getNodesAtDepth, Node, toGindexBitstring, Tree, HashComputationLevel} from "@chainsafe/persistent-merkle-tree"; import {ValueOf} from "../type/abstract"; import {CompositeType, CompositeView, CompositeViewDU} from "../type/composite"; import {TreeView} from "./abstract"; @@ -16,8 +16,16 @@ export type ArrayCompositeType< tree_setLength(tree: Tree, length: number): void; /** INTERNAL METHOD: Return the chunks node from a root node */ tree_getChunksNode(rootNode: Node): Node; + /** INTERNAL METHOD: Return the offset from root for HashComputation */ + tree_chunksNodeOffset(): number; /** INTERNAL METHOD: Return a new root node with changed chunks node and length */ - tree_setChunksNode(rootNode: Node, chunksNode: Node, newLength?: number): Node; + tree_setChunksNode( + rootNode: Node, + chunksNode: Node, + newLength: number | null, + hcOffset?: number, + hcByLevel?: HashComputationLevel[] | null + ): Node; }; export class ArrayCompositeTreeView< diff --git a/packages/ssz/src/viewDU/abstract.ts b/packages/ssz/src/viewDU/abstract.ts index 29878637..144268f5 100644 --- a/packages/ssz/src/viewDU/abstract.ts +++ b/packages/ssz/src/viewDU/abstract.ts @@ -1,6 +1,17 @@ +import {HashComputationLevel, executeHashComputations, HashComputationGroup} from "@chainsafe/persistent-merkle-tree"; import {ByteViews, CompositeType} from "../type/composite"; import {TreeView} from "../view/abstract"; +/** + * Always allocating a new HashComputationGroup for each hashTreeRoot() is not great for gc + * because a lot of ViewDUs are not changed and computed root already. + */ +const symbolCachedTreeRoot = Symbol("ssz_cached_tree_root"); + +export type NodeWithCachedTreeRoot = { + [symbolCachedTreeRoot]?: Uint8Array; +}; + /* eslint-disable @typescript-eslint/member-ordering */ /** @@ -19,7 +30,7 @@ export abstract class TreeViewDU> extend return values; } - commit(): void { + /** + * When we need to compute HashComputations (hcByLevel != null): + * - if old _rootNode is hashed, then only need to put pending changes to hcByLevel + * - if old _rootNode is not hashed, need to traverse and put to hcByLevel + */ + commit(hcOffset = 0, hcByLevel: HashComputationLevel[] | null = null): void { + const isOldRootHashed = this._rootNode.h0 !== null; if (this.nodesChanged.size === 0) { + if (!isOldRootHashed && hcByLevel !== null) { + getHashComputations(this._rootNode, hcOffset, hcByLevel); + } return; } @@ -164,15 +181,22 @@ export class ArrayBasicTreeViewDU> extend } const chunksNode = this.type.tree_getChunksNode(this._rootNode); - // TODO: Ensure fast setNodesAtDepth() method is correct - const newChunksNode = setNodesAtDepth(chunksNode, this.type.chunkDepth, indexes, nodes); + const offsetThis = hcOffset + this.type.tree_chunksNodeOffset(); + const byLevelThis = hcByLevel != null && isOldRootHashed ? hcByLevel : null; + const newChunksNode = setNodesAtDepth(chunksNode, this.type.chunkDepth, indexes, nodes, offsetThis, byLevelThis); this._rootNode = this.type.tree_setChunksNode( this._rootNode, newChunksNode, - this.dirtyLength ? this._length : undefined + this.dirtyLength ? this._length : null, + hcOffset, + isOldRootHashed ? hcByLevel : null ); + if (!isOldRootHashed && hcByLevel !== null) { + getHashComputations(this._rootNode, hcOffset, hcByLevel); + } + this.nodesChanged.clear(); this.dirtyLength = false; } diff --git a/packages/ssz/src/viewDU/arrayComposite.ts b/packages/ssz/src/viewDU/arrayComposite.ts index 81ca9e02..44c50375 100644 --- a/packages/ssz/src/viewDU/arrayComposite.ts +++ b/packages/ssz/src/viewDU/arrayComposite.ts @@ -1,4 +1,11 @@ -import {getNodeAtDepth, getNodesAtDepth, Node, setNodesAtDepth} from "@chainsafe/persistent-merkle-tree"; +import { + getHashComputations, + getNodeAtDepth, + getNodesAtDepth, + HashComputationLevel, + Node, + setNodesAtDepth, +} from "@chainsafe/persistent-merkle-tree"; import {ValueOf} from "../type/abstract"; import {CompositeType, CompositeView, CompositeViewDU} from "../type/composite"; import {ArrayCompositeType} from "../view/arrayComposite"; @@ -163,15 +170,29 @@ export class ArrayCompositeTreeViewDU< return values; } - commit(): void { + /** + * When we need to compute HashComputations (hcByLevel != null): + * - if old _rootNode is hashed, then only need to put pending changes to hcByLevel + * - if old _rootNode is not hashed, need to traverse and put to hcByLevel + */ + commit(hcOffset = 0, hcByLevel: HashComputationLevel[] | null = null): void { + const isOldRootHashed = this._rootNode.h0 !== null; if (this.viewsChanged.size === 0) { + if (!isOldRootHashed && hcByLevel !== null) { + getHashComputations(this._rootNode, hcOffset, hcByLevel); + } return; } + // each view may mutate hcByLevel at offset + depth + const offsetView = hcOffset + this.type.depth; + // Depth includes the extra level for the length node + const byLevelView = hcByLevel != null && isOldRootHashed ? hcByLevel : null; + const nodesChanged: {index: number; node: Node}[] = []; for (const [index, view] of this.viewsChanged) { - const node = this.type.elementType.commitViewDU(view); + const node = this.type.elementType.commitViewDU(view, offsetView, byLevelView); // Set new node in nodes array to ensure data represented in the tree and fast nodes access is equal this.nodes[index] = node; nodesChanged.push({index, node}); @@ -187,15 +208,22 @@ export class ArrayCompositeTreeViewDU< const nodes = nodesChangedSorted.map((entry) => entry.node); const chunksNode = this.type.tree_getChunksNode(this._rootNode); - // TODO: Ensure fast setNodesAtDepth() method is correct - const newChunksNode = setNodesAtDepth(chunksNode, this.type.chunkDepth, indexes, nodes); + const offsetThis = hcOffset + this.type.tree_chunksNodeOffset(); + const byLevelThis = hcByLevel != null && isOldRootHashed ? hcByLevel : null; + const newChunksNode = setNodesAtDepth(chunksNode, this.type.chunkDepth, indexes, nodes, offsetThis, byLevelThis); this._rootNode = this.type.tree_setChunksNode( this._rootNode, newChunksNode, - this.dirtyLength ? this._length : undefined + this.dirtyLength ? this._length : null, + hcOffset, + hcByLevel ); + if (!isOldRootHashed && hcByLevel !== null) { + getHashComputations(this._rootNode, hcOffset, hcByLevel); + } + this.viewsChanged.clear(); this.dirtyLength = false; } diff --git a/packages/ssz/src/viewDU/bitArray.ts b/packages/ssz/src/viewDU/bitArray.ts index 1c3a5421..b9c12d14 100644 --- a/packages/ssz/src/viewDU/bitArray.ts +++ b/packages/ssz/src/viewDU/bitArray.ts @@ -1,4 +1,4 @@ -import {Node} from "@chainsafe/persistent-merkle-tree"; +import {HashComputationLevel, Node, getHashComputations} from "@chainsafe/persistent-merkle-tree"; import {BitArray} from "../value/bitArray"; import {CompositeType} from "../type/composite"; import {TreeViewDU} from "./abstract"; @@ -22,10 +22,14 @@ export class BitArrayTreeViewDU extends TreeViewDU>> extends }; } - commit(): void { + /** + * When we need to compute HashComputations (hcByLevel != null): + * - if old _rootNode is hashed, then only need to put pending changes to hcByLevel + * - if old _rootNode is not hashed, need to traverse and put to hcByLevel + */ + commit(hcOffset = 0, hcByLevel: HashComputationLevel[] | null = null): void { + const isOldRootHashed = this._rootNode.h0 !== null; if (this.nodesChanged.size === 0 && this.viewsChanged.size === 0) { + if (!isOldRootHashed && hcByLevel !== null) { + getHashComputations(this._rootNode, hcOffset, hcByLevel); + } return; } + // each view may mutate hcByLevel at offset + depth + const offsetView = hcOffset + this.type.depth; + // if old root is not hashed, no need to pass hcByLevel to child view bc we need to do full traversal here + const byLevelView = hcByLevel != null && isOldRootHashed ? hcByLevel : null; + const nodesChanged: {index: number; node: Node}[] = []; for (const [index, view] of this.viewsChanged) { const fieldType = this.type.fieldsEntries[index].fieldType as unknown as CompositeTypeAny; - const node = fieldType.commitViewDU(view); + const node = fieldType.commitViewDU(view, offsetView, byLevelView); // Set new node in nodes array to ensure data represented in the tree and fast nodes access is equal this.nodes[index] = node; nodesChanged.push({index, node}); @@ -96,7 +117,19 @@ class ContainerTreeViewDU>> extends const indexes = nodesChangedSorted.map((entry) => entry.index); const nodes = nodesChangedSorted.map((entry) => entry.node); - this._rootNode = setNodesAtDepth(this._rootNode, this.type.depth, indexes, nodes); + this._rootNode = setNodesAtDepth( + this._rootNode, + this.type.depth, + indexes, + nodes, + hcOffset, + isOldRootHashed ? hcByLevel : null + ); + + // old root is not hashed, need to traverse + if (!isOldRootHashed && hcByLevel !== null) { + getHashComputations(this._rootNode, hcOffset, hcByLevel); + } this.nodesChanged.clear(); this.viewsChanged.clear(); diff --git a/packages/ssz/src/viewDU/containerNodeStruct.ts b/packages/ssz/src/viewDU/containerNodeStruct.ts index c69cd45a..9aa45ed7 100644 --- a/packages/ssz/src/viewDU/containerNodeStruct.ts +++ b/packages/ssz/src/viewDU/containerNodeStruct.ts @@ -1,4 +1,4 @@ -import {Node} from "@chainsafe/persistent-merkle-tree"; +import {Node, HashComputationLevel} from "@chainsafe/persistent-merkle-tree"; import {Type, ValueOf} from "../type/abstract"; import {isCompositeType} from "../type/composite"; import {BranchNodeStruct} from "../branchNodeStruct"; @@ -8,7 +8,7 @@ import {TreeViewDU} from "./abstract"; /* eslint-disable @typescript-eslint/member-ordering */ -class ContainerTreeViewDU>> extends TreeViewDU< +export class ContainerNodeStructTreeViewDU>> extends TreeViewDU< ContainerTypeGeneric > { protected valueChanged: ValueOfFields | null = null; @@ -27,15 +27,27 @@ class ContainerTreeViewDU>> extends return; } - commit(): void { - if (this.valueChanged === null) { - return; - } + get value(): ValueOfFields { + return this.valueChanged ?? this._rootNode.value; + } - const value = this.valueChanged; - this.valueChanged = null; + /** + * There are 2 cases: + * - normal commit() or hashTreeRoot(): hcByLevel is null, no need to compute root + * - batchHashTreeRoot(): hcByLevel is not null, need to compute root because this does not support HashComputation + */ + commit(_?: number, hcByLevel: HashComputationLevel[] | null = null): void { + if (this.valueChanged !== null) { + const value = this.valueChanged; + this.valueChanged = null; + + this._rootNode = this.type.value_toTree(value) as BranchNodeStruct>; + } - this._rootNode = this.type.value_toTree(value) as BranchNodeStruct>; + if (this._rootNode.h0 === null && hcByLevel !== null) { + // consumer is batchHashTreeRoot() + this._rootNode.rootHashObject; + } } protected clearCache(): void { @@ -46,7 +58,7 @@ class ContainerTreeViewDU>> extends export function getContainerTreeViewDUClass>>( type: ContainerTypeGeneric ): ContainerTreeViewDUTypeConstructor { - class CustomContainerTreeViewDU extends ContainerTreeViewDU {} + class CustomContainerTreeViewDU extends ContainerNodeStructTreeViewDU {} // Dynamically define prototype methods for (let index = 0; index < type.fieldsEntries.length; index++) { diff --git a/packages/ssz/test/perf/eth2/hashTreeRoot.test.ts b/packages/ssz/test/perf/eth2/hashTreeRoot.test.ts index f94cc6ec..1ae6da27 100644 --- a/packages/ssz/test/perf/eth2/hashTreeRoot.test.ts +++ b/packages/ssz/test/perf/eth2/hashTreeRoot.test.ts @@ -1,5 +1,5 @@ import {itBench} from "@dapplion/benchmark"; -import {hasher, uint8ArrayToHashObject} from "@chainsafe/persistent-merkle-tree"; +import {HashComputationGroup, hasher, uint8ArrayToHashObject} from "@chainsafe/persistent-merkle-tree"; import * as sszPhase0 from "../../lodestarTypes/phase0/sszTypes"; import * as sszAltair from "../../lodestarTypes/altair/sszTypes"; import { @@ -68,6 +68,16 @@ describe("HashTreeRoot frequent eth2 objects", () => { }, }); + const hc = new HashComputationGroup(); + itBench, Uint8Array>({ + id: `BeaconState vc ${validatorCount} - batchHashTreeRoot tree`, + before: () => getStateViewDU().serialize(), + beforeEach: (bytes) => sszAltair.BeaconState.deserializeToViewDU(bytes), + fn: (state) => { + state.batchHashTreeRoot(hc); + }, + }); + for (const {fieldName, fieldType} of sszAltair.BeaconState.fieldsEntries) { // Only benchmark big data structures if (fieldType.maxSize < 10e6 || !isCompositeType(fieldType)) { diff --git a/packages/ssz/test/spec/runValidTest.ts b/packages/ssz/test/spec/runValidTest.ts index 56307baf..1bac7760 100644 --- a/packages/ssz/test/spec/runValidTest.ts +++ b/packages/ssz/test/spec/runValidTest.ts @@ -117,6 +117,20 @@ export function runValidSszTest(type: Type, testData: ValidTestCaseData assertRoot(root, "type.hashTreeRoot()"); } + if (isCompositeType(type)) { + // batchHashTreeRoot() + const root = wrapErr(() => { + const node = type.value_toTree(testDataValue); + const viewDU = type.getViewDU(node); + if (viewDU instanceof TreeViewDU) { + return viewDU.batchHashTreeRoot(); + } else { + return type.hashTreeRoot(testDataValue); + } + }, "type.hashTreeRoot()"); + assertRoot(root, "ViewDU.batchHashTreeRoot()"); + } + // value -> tree - value_toTree() const node = wrapErr(() => type.value_toTree(testDataValue), "type.value_toTree()"); assertNode(node, "type.value_toTree()"); diff --git a/packages/ssz/test/unit/byType/bitArray/tree.test.ts b/packages/ssz/test/unit/byType/bitArray/tree.test.ts index 8d33314b..456243c5 100644 --- a/packages/ssz/test/unit/byType/bitArray/tree.test.ts +++ b/packages/ssz/test/unit/byType/bitArray/tree.test.ts @@ -1,3 +1,4 @@ +import {expect} from "chai"; import {BitVectorType, BitListType, BitArray} from "../../../../src"; import {runViewTestMutation} from "../runViewTestMutation"; @@ -50,6 +51,22 @@ for (const type of [new BitVectorType(4), new BitListType(4)]) { }); } +describe("BitArray batchHashTreeRoot", () => { + const sszType = new BitListType(4); + const value = fromNum(4, 0b0010); + const expectedRoot = sszType.toView(value).hashTreeRoot(); + + it("fresh ViewDU", () => { + expect(sszType.toViewDU(value).batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("set then hashTreeRoot", () => { + const viewDU = sszType.toViewDU(fromNum(4, 0b0011)); + viewDU.set(0, false); + expect(sszType.toViewDU(value).batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); +}); + function fromNum(bitLen: number, num: number): BitArray { const bitArray = BitArray.fromBitLen(bitLen); for (let i = 0; i < bitLen; i++) { diff --git a/packages/ssz/test/unit/byType/bitVector/tree.test.ts b/packages/ssz/test/unit/byType/bitVector/tree.test.ts index 8dbe47ef..04f2ee14 100644 --- a/packages/ssz/test/unit/byType/bitVector/tree.test.ts +++ b/packages/ssz/test/unit/byType/bitVector/tree.test.ts @@ -1,3 +1,4 @@ +import {expect} from "chai"; import {BitVectorType, BitArray} from "../../../../src"; import {runViewTestMutation} from "../runViewTestMutation"; @@ -48,6 +49,22 @@ runViewTestMutation({ ], }); +describe("BitVector batchHashTreeRoot", () => { + const sszType = new BitVectorType(4); + const value = fromNum(4, 0b0010); + const expectedRoot = sszType.toView(value).hashTreeRoot(); + + it("fresh ViewDU", () => { + expect(sszType.toViewDU(value).batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("set then batchHashTreeRoot", () => { + const viewDU = sszType.toViewDU(fromNum(4, 0b0011)); + viewDU.set(0, false); + expect(sszType.toViewDU(value).batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); +}); + function fromNum(bitLen: number, num: number): BitArray { const bitArray = BitArray.fromBitLen(bitLen); for (let i = 0; i < bitLen; i++) { diff --git a/packages/ssz/test/unit/byType/container/tree.test.ts b/packages/ssz/test/unit/byType/container/tree.test.ts index 91a68c4a..6b545792 100644 --- a/packages/ssz/test/unit/byType/container/tree.test.ts +++ b/packages/ssz/test/unit/byType/container/tree.test.ts @@ -1,5 +1,10 @@ import {expect} from "chai"; import { + BitArray, + BitListType, + BitVectorType, + BooleanType, + ByteListType, ByteVectorType, ContainerNodeStructType, ContainerType, @@ -7,8 +12,11 @@ import { ListCompositeType, NoneType, toHexString, + UintNumberType, UnionType, ValueOf, + VectorBasicType, + VectorCompositeType, } from "../../../../src"; import {uint64NumInfType, uint64NumType} from "../../../utils/primitiveTypes"; import {runViewTestMutation} from "../runViewTestMutation"; @@ -218,3 +226,412 @@ runViewTestMutation({ }, ], }); + +describe("ContainerViewDU batchHashTreeRoot", function () { + const childContainerType = new ContainerType({f0: uint64NumInfType, f1: uint64NumInfType}); + const unionType = new UnionType([new NoneType(), uint64NumType]); + const listBasicType = new ListBasicType(uint64NumType, 10); + const vectorBasicType = new VectorBasicType(uint64NumType, 2); + const listCompositeType = new ListCompositeType(childContainerType, 10); + const vectorCompositeType = new VectorCompositeType(childContainerType, 1); + const bitVectorType = new BitVectorType(64); + const bitListType = new BitListType(4); + const childContainerStruct = new ContainerNodeStructType({g0: uint64NumInfType, g1: uint64NumInfType}); + const parentContainerType = new ContainerType({ + a: uint64NumType, + b: new BooleanType(), + c: unionType, + d: new ByteListType(64), + e: new ByteVectorType(64), + // a child container type + f: childContainerType, + g: childContainerStruct, + h: listBasicType, + i: vectorBasicType, + j: listCompositeType, + k: vectorCompositeType, + l: bitVectorType, + m: bitListType, + // TODO: add more tests when OptionalType is implemented + }); + + const value: ValueOf = { + a: 10, + b: true, + c: {selector: 1, value: 100}, + d: Buffer.alloc(64, 2), + e: Buffer.alloc(64, 1), + f: {f0: 100, f1: 101}, + g: {g0: 100, g1: 101}, + h: [1, 2], + i: [1, 2], + j: [{f0: 1, f1: 2}], + k: [{f0: 1, f1: 2}], + l: BitArray.fromSingleBit(64, 5), + m: BitArray.fromSingleBit(4, 1), + }; + const expectedRoot = parentContainerType.toView(value).hashTreeRoot(); + + it("fresh ViewDU", () => { + expect(parentContainerType.toViewDU(value).batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify Number type", () => { + const viewDU = parentContainerType.toViewDU({...value, a: 9}); + viewDU.batchHashTreeRoot(); + viewDU.a += 1; + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign again but commit before batchHashTreeRoot() + viewDU.a = 10; + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify BooleanType", () => { + const viewDU = parentContainerType.toViewDU({...value, b: false}); + viewDU.batchHashTreeRoot(); + viewDU.b = true; + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign again but commit before batchHashTreeRoot() + viewDU.b = true; + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify UnionType", () => { + const viewDU = parentContainerType.toViewDU({...value, c: {selector: 1, value: 101}}); + viewDU.batchHashTreeRoot(); + viewDU.c = unionType.toViewDU({selector: 1, value: 100}); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign again but commit before batchHashTreeRoot() + viewDU.c = unionType.toViewDU({selector: 1, value: 100}); + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify ByteVectorType", () => { + const viewDU = parentContainerType.toViewDU(value); + viewDU.batchHashTreeRoot(); + // this takes more than 1 chunk so the resulting node is a branch node + viewDU.e = viewDU.e.slice(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign again but commit before batchHashTreeRoot() + viewDU.e = viewDU.e.slice(); + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify ByteListType", () => { + const viewDU = parentContainerType.toViewDU(value); + viewDU.batchHashTreeRoot(); + // this takes more than 1 chunk so the resulting node is a branch node + viewDU.d = viewDU.d.slice(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign again but commit before batchHashTreeRoot() + viewDU.d = viewDU.d.slice(); + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify full child container", () => { + const viewDU = parentContainerType.toViewDU({...value, f: {f0: 99, f1: 999}}); + viewDU.batchHashTreeRoot(); + viewDU.f = childContainerType.toViewDU({f0: 100, f1: 101}); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign again but commit before batchHashTreeRoot() + viewDU.f = childContainerType.toViewDU({f0: 100, f1: 101}); + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify partial child container", () => { + const viewDU = parentContainerType.toViewDU({...value, f: {f0: 99, f1: 999}}); + viewDU.batchHashTreeRoot(); + viewDU.f.f0 = 100; + viewDU.f.f1 = 101; + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign again but commit before batchHashTreeRoot() + viewDU.f.f0 = 100; + viewDU.f.f1 = 101; + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify ContainerNodeStructType", () => { + const viewDU = parentContainerType.toViewDU({...value, g: {g0: 99, g1: 999}}); + viewDU.batchHashTreeRoot(); + viewDU.g = childContainerStruct.toViewDU({g0: 100, g1: 101}); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign again but commit before batchHashTreeRoot() + viewDU.g = childContainerStruct.toViewDU({g0: 100, g1: 101}); + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify partial ContainerNodeStructType", () => { + const viewDU = parentContainerType.toViewDU({...value, g: {g0: 99, g1: 999}}); + viewDU.batchHashTreeRoot(); + viewDU.g.g0 = 100; + viewDU.g.g1 = 101; + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign again but commit before batchHashTreeRoot() + viewDU.g.g0 = 100; + viewDU.g.g1 = 101; + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify ListBasicType", () => { + const viewDU = parentContainerType.toViewDU({...value, h: []}); + viewDU.batchHashTreeRoot(); + viewDU.h = listBasicType.toViewDU([1, 2]); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign again but commit before batchHashTreeRoot() + viewDU.h = listBasicType.toViewDU([1, 2]); + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then push 1 item to ListBasicType", () => { + const viewDU = parentContainerType.toViewDU({...value, h: [1]}); + viewDU.batchHashTreeRoot(); + viewDU.h.push(2); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign again but commit before batchHashTreeRoot() + viewDU.h = listBasicType.toViewDU([1, 2]); + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify 1 item of ListBasicType", () => { + const viewDU = parentContainerType.toViewDU({...value, h: [1, 3]}); + viewDU.batchHashTreeRoot(); + viewDU.h.set(1, 2); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign again but commit before batchHashTreeRoot() + viewDU.h.set(1, 2); + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify VectorBasicType", () => { + const viewDU = parentContainerType.toViewDU({...value, i: []}); + viewDU.batchHashTreeRoot(); + viewDU.i = vectorBasicType.toViewDU([1, 2]); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign again but commit before batchHashTreeRoot() + viewDU.i = vectorBasicType.toViewDU([1, 2]); + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify 1 item of VectorBasicType", () => { + const viewDU = parentContainerType.toViewDU({...value, i: [1, 3]}); + viewDU.batchHashTreeRoot(); + viewDU.i.set(1, 2); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign again but commit before batchHashTreeRoot() + viewDU.i.set(1, 2); + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify ListCompositeType", () => { + const viewDU = parentContainerType.toViewDU({...value, j: []}); + viewDU.batchHashTreeRoot(); + viewDU.j = listCompositeType.toViewDU([{f0: 1, f1: 2}]); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign again but commit before batchHashTreeRoot() + viewDU.j = listCompositeType.toViewDU([{f0: 1, f1: 2}]); + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then push 1 item to ListCompositeType", () => { + const viewDU = parentContainerType.toViewDU({...value, j: []}); + viewDU.batchHashTreeRoot(); + viewDU.j.push(childContainerType.toViewDU({f0: 1, f1: 2})); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign again but commit before batchHashTreeRoot() + viewDU.j = listCompositeType.toViewDU([{f0: 1, f1: 2}]); + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify 1 item of ListCompositeType", () => { + const viewDU = parentContainerType.toViewDU({...value, j: [{f0: 1, f1: 3}]}); + viewDU.batchHashTreeRoot(); + viewDU.j.set(0, childContainerType.toViewDU({f0: 1, f1: 2})); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign again but commit before batchHashTreeRoot() + viewDU.j.set(0, childContainerType.toViewDU({f0: 1, f1: 2})); + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify 1 field of 1 item of ListCompositeType", () => { + const viewDU = parentContainerType.toViewDU({...value, j: [{f0: 1, f1: 3}]}); + viewDU.batchHashTreeRoot(); + viewDU.j.get(0).f1 = 2; + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign again but commit before batchHashTreeRoot() + viewDU.j.get(0).f1 = 2; + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify VectorCompositeType", () => { + const viewDU = parentContainerType.toViewDU({...value, k: [{f0: 9, f1: 9}]}); + viewDU.batchHashTreeRoot(); + viewDU.k = vectorCompositeType.toViewDU([{f0: 1, f1: 2}]); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign again but commit before batchHashTreeRoot() + viewDU.k = vectorCompositeType.toViewDU([{f0: 1, f1: 2}]); + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify 1 item of VectorCompositeType", () => { + const viewDU = parentContainerType.toViewDU({...value, k: [{f0: 1, f1: 3}]}); + viewDU.batchHashTreeRoot(); + viewDU.k.set(0, childContainerType.toViewDU({f0: 1, f1: 2})); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign again but commit before batchHashTreeRoot() + viewDU.k.set(0, childContainerType.toViewDU({f0: 1, f1: 2})); + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify 1 field 1 item of VectorCompositeType", () => { + const viewDU = parentContainerType.toViewDU({...value, k: [{f0: 1, f1: 3}]}); + viewDU.batchHashTreeRoot(); + viewDU.k.get(0).f1 = 2; + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign again but commit before batchHashTreeRoot() + viewDU.k.get(0).f1 = 2; + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify BitVectorType", () => { + const viewDU = parentContainerType.toViewDU({...value, l: BitArray.fromSingleBit(64, 4)}); + viewDU.batchHashTreeRoot(); + viewDU.l = bitVectorType.toViewDU(BitArray.fromSingleBit(64, 5)); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign again but commit before batchHashTreeRoot() + viewDU.l = bitVectorType.toViewDU(BitArray.fromSingleBit(64, 5)); + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify BitVectorType bit", () => { + const viewDU = parentContainerType.toViewDU({...value, l: BitArray.fromSingleBit(64, 4)}); + viewDU.batchHashTreeRoot(); + viewDU.l.set(4, false); + viewDU.l.set(5, true); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign again but commit before batchHashTreeRoot() + viewDU.l.set(4, false); + viewDU.l.set(5, true); + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify BitListType", () => { + const viewDU = parentContainerType.toViewDU({...value, m: BitArray.fromSingleBit(4, 0)}); + viewDU.batchHashTreeRoot(); + viewDU.m = bitListType.toViewDU(BitArray.fromSingleBit(4, 1)); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign again but commit before batchHashTreeRoot() + viewDU.m = bitListType.toViewDU(BitArray.fromSingleBit(4, 1)); + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify BitListType bit", () => { + const viewDU = parentContainerType.toViewDU({...value, m: BitArray.fromSingleBit(4, 0)}); + viewDU.batchHashTreeRoot(); + viewDU.m.set(0, false); + viewDU.m.set(1, true); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign again but commit before batchHashTreeRoot() + viewDU.m.set(0, false); + viewDU.m.set(1, true); + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); +}); + +describe("ContainerNodeStruct batchHashTreeRoot", function () { + const EpochInf = new UintNumberType(8, {clipInfinity: true}); + + // Ethereum consensus validator type + const containerType = new ContainerNodeStructType({ + pubkey: new ByteVectorType(48), + withdrawalCredentials: new ByteVectorType(32), + effectiveBalance: new UintNumberType(8), + slashed: new BooleanType(), + activationEligibilityEpoch: EpochInf, + activationEpoch: EpochInf, + exitEpoch: EpochInf, + withdrawableEpoch: EpochInf, + }); + const value = { + pubkey: Buffer.alloc(48, 0xaa), + withdrawalCredentials: Buffer.alloc(32, 0xbb), + effectiveBalance: 32e9, + slashed: false, + activationEligibilityEpoch: 1_000_000, + activationEpoch: 2_000_000, + exitEpoch: 3_000_000, + withdrawableEpoch: 4_000_000, + }; + const expectedRoot = containerType.toView(value).hashTreeRoot(); + + it("fresh ViewDU", () => { + expect(containerType.toViewDU(value).batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify basic type", () => { + const viewDU = containerType.toViewDU({...value, exitEpoch: 3}); + viewDU.batchHashTreeRoot(); + viewDU.exitEpoch *= 1_000_000; + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("modify basic type", () => { + const viewDU = containerType.toViewDU({ + ...value, + exitEpoch: value.exitEpoch + 1, + withdrawableEpoch: value.withdrawableEpoch + 1, + }); + viewDU.exitEpoch -= 1; + viewDU.withdrawableEpoch -= 1; + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); +}); diff --git a/packages/ssz/test/unit/byType/listBasic/tree.test.ts b/packages/ssz/test/unit/byType/listBasic/tree.test.ts index d7f56b4d..22b5f50d 100644 --- a/packages/ssz/test/unit/byType/listBasic/tree.test.ts +++ b/packages/ssz/test/unit/byType/listBasic/tree.test.ts @@ -240,3 +240,78 @@ describe("ListBasicType.sliceTo", () => { }); } }); + +describe("ListBasicType batchHashTreeRoot", function () { + const value = [1, 2, 3, 4]; + const expectedRoot = ListN64Uint64NumberType.toView(value).hashTreeRoot(); + + it("fresh ViewDU", () => { + expect(ListN64Uint64NumberType.toViewDU(value).batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("push then batchHashTreeRoot()", () => { + const viewDU = ListN64Uint64NumberType.defaultViewDU(); + viewDU.push(1); + viewDU.push(2); + viewDU.push(3); + viewDU.push(4); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign the same value again, commit() then batchHashTreeRoot() + viewDU.set(0, 1); + viewDU.set(1, 2); + viewDU.set(2, 3); + viewDU.set(3, 4); + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("push then modify then batchHashTreeRoot()", () => { + const viewDU = ListN64Uint64NumberType.defaultViewDU(); + viewDU.push(1); + viewDU.push(2); + viewDU.push(3); + viewDU.push(44); + viewDU.set(3, 4); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign the same value again, commit() then batchHashTreeRoot() + viewDU.set(3, 44); + viewDU.set(3, 4); + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify", () => { + const viewDU = ListN64Uint64NumberType.defaultViewDU(); + viewDU.push(1); + viewDU.push(2); + viewDU.push(33); + viewDU.push(44); + viewDU.batchHashTreeRoot(); + viewDU.set(2, 3); + viewDU.set(3, 4); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign the same value again, commit() then batchHashTreeRoot() + viewDU.set(2, 33); + viewDU.set(3, 44); + viewDU.commit(); + viewDU.set(2, 3); + viewDU.set(3, 4); + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + // similar to a fresh ViewDU but it's good to test + it("sliceTo()", () => { + const viewDU = ListN64Uint64NumberType.defaultViewDU(); + viewDU.push(1); + viewDU.push(2); + viewDU.push(3); + viewDU.push(4); + viewDU.push(5); + viewDU.batchHashTreeRoot(); + expect(viewDU.sliceTo(3).batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); +}); diff --git a/packages/ssz/test/unit/byType/listComposite/tree.test.ts b/packages/ssz/test/unit/byType/listComposite/tree.test.ts index 21fab6f1..f1de130e 100644 --- a/packages/ssz/test/unit/byType/listComposite/tree.test.ts +++ b/packages/ssz/test/unit/byType/listComposite/tree.test.ts @@ -1,5 +1,13 @@ import {expect} from "chai"; -import {CompositeView, ContainerType, ListCompositeType, toHexString, UintNumberType, ValueOf} from "../../../../src"; +import { + CompositeView, + ContainerNodeStructType, + ContainerType, + ListCompositeType, + toHexString, + UintNumberType, + ValueOf, +} from "../../../../src"; import {ArrayCompositeTreeViewDU} from "../../../../src/viewDU/arrayComposite"; import {ssz} from "../../../lodestarTypes/primitive"; import {runViewTestMutation} from "../runViewTestMutation"; @@ -9,7 +17,7 @@ const containerUintsType = new ContainerType( {a: uint64NumInfType, b: uint64NumInfType}, {typeName: "Container(uint64)"} ); -const listOfContainersType = new ListCompositeType(containerUintsType, 4); +const listOfContainersType = new ListCompositeType(containerUintsType, 4, {typeName: "ListCompositeType(Container)"}); runViewTestMutation({ type: listOfContainersType, @@ -213,3 +221,115 @@ describe("ListCompositeType.sliceFrom", () => { } }); }); + +describe("ListCompositeType batchHashTreeRoot", () => { + const value = [ + {a: 1, b: 2}, + {a: 3, b: 4}, + ]; + const containerStructUintsType = new ContainerNodeStructType( + {a: uint64NumInfType, b: uint64NumInfType}, + {typeName: "ContainerNodeStruct(uint64)"} + ); + const listOfContainersType2 = new ListCompositeType(containerStructUintsType, 4, { + typeName: "ListCompositeType(ContainerNodeStructType)", + }); + + for (const list of [listOfContainersType, listOfContainersType2]) { + const typeName = list.typeName; + const expectedRoot = list.toView(value).hashTreeRoot(); + + it(`${typeName} - fresh ViewDU`, () => { + expect(listOfContainersType.toViewDU(value).batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it(`${typeName} - push then batchHashTreeRoot()`, () => { + const viewDU = listOfContainersType.defaultViewDU(); + viewDU.push(containerUintsType.toViewDU({a: 1, b: 2})); + viewDU.push(containerUintsType.toViewDU({a: 3, b: 4})); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign again, commit() then batchHashTreeRoot() + viewDU.set(0, containerUintsType.toViewDU({a: 1, b: 2})); + viewDU.set(1, containerUintsType.toViewDU({a: 3, b: 4})); + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it(`${typeName} - full hash then modify full non-hashed child element`, () => { + const viewDU = listOfContainersType.defaultViewDU(); + viewDU.push(containerUintsType.toViewDU({a: 1, b: 2})); + viewDU.push(containerUintsType.toViewDU({a: 33, b: 44})); + viewDU.batchHashTreeRoot(); + viewDU.set(1, containerUintsType.toViewDU({a: 3, b: 4})); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign the same value again, commit() then batchHashTreeRoot() + viewDU.set(1, containerUintsType.toViewDU({a: 3, b: 4})); + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it(`${typeName} - full hash then modify partially hashed child element`, () => { + const viewDU = listOfContainersType.defaultViewDU(); + viewDU.push(containerUintsType.toViewDU({a: 1, b: 2})); + viewDU.push(containerUintsType.toViewDU({a: 33, b: 44})); + viewDU.batchHashTreeRoot(); + const item1 = containerUintsType.toViewDU({a: 3, b: 44}); + item1.batchHashTreeRoot(); + item1.b = 4; + viewDU.set(1, item1); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign the same value again, commit() then batchHashTreeRoot() + const item2 = viewDU.get(1); + item2.a = 3; + item2.b = 4; + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it(`${typeName} - full hash then modify full hashed child element`, () => { + const viewDU = listOfContainersType.defaultViewDU(); + viewDU.push(containerUintsType.toViewDU({a: 1, b: 2})); + viewDU.push(containerUintsType.toViewDU({a: 33, b: 44})); + viewDU.batchHashTreeRoot(); + const item1 = containerUintsType.toViewDU({a: 3, b: 4}); + item1.batchHashTreeRoot(); + viewDU.set(1, item1); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign the same value again, commit() then batchHashTreeRoot() + const newItem = containerUintsType.toViewDU({a: 3, b: 4}); + viewDU.set(1, newItem); + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it(`${typeName} - full hash then modify partial child element`, () => { + const viewDU = listOfContainersType.defaultViewDU(); + viewDU.push(containerUintsType.toViewDU({a: 1, b: 2})); + viewDU.push(containerUintsType.toViewDU({a: 33, b: 44})); + viewDU.batchHashTreeRoot(); + viewDU.get(1).a = 3; + viewDU.get(1).b = 4; + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign the same value again, commit() then batchHashTreeRoot() + viewDU.get(1).a = 3; + viewDU.get(1).b = 4; + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + // similar to a fresh ViewDU but it's good to test + it(`${typeName} - sliceTo()`, () => { + const viewDU = listOfContainersType.defaultViewDU(); + viewDU.push(containerUintsType.toViewDU({a: 1, b: 2})); + viewDU.push(containerUintsType.toViewDU({a: 3, b: 4})); + viewDU.push(containerUintsType.toViewDU({a: 5, b: 6})); + viewDU.batchHashTreeRoot(); + expect(viewDU.sliceTo(1).batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + } +}); diff --git a/packages/ssz/test/unit/byType/optional/tree.test.ts b/packages/ssz/test/unit/byType/optional/tree.test.ts index c61b9478..5b69d17a 100644 --- a/packages/ssz/test/unit/byType/optional/tree.test.ts +++ b/packages/ssz/test/unit/byType/optional/tree.test.ts @@ -8,7 +8,7 @@ const SimpleObject = new ContainerType({ }); describe("Optional view tests", () => { - // unimplemented + // TODO: implement // eslint-disable-next-line @typescript-eslint/no-unsafe-call it.skip("optional simple type", () => { const type = new OptionalType(byteType); @@ -22,7 +22,7 @@ describe("Optional view tests", () => { expect(toHexString(type.commitViewDU(viewDU).root)).equals(toHexString(root)); }); - // unimplemented + // TODO: implement // eslint-disable-next-line @typescript-eslint/no-unsafe-call it.skip("optional composite type", () => { const type = new OptionalType(SimpleObject); diff --git a/packages/ssz/test/unit/byType/runViewTestMutation.ts b/packages/ssz/test/unit/byType/runViewTestMutation.ts index d6cae2f5..85298774 100644 --- a/packages/ssz/test/unit/byType/runViewTestMutation.ts +++ b/packages/ssz/test/unit/byType/runViewTestMutation.ts @@ -32,15 +32,22 @@ const runViewTestMutationFn = function runViewTestMutation, value: ValueOf, message: string): void { + function assertValidView(view: TreeViewDU, value: ValueOf, message: string, batchHash: boolean): void { expect(type.toJson(view.toValue())).to.deep.equal(type.toJson(value), `Wrong json - ${message}`); expect(toHexString(view.serialize())).to.equal(toHexString(type.serialize(value)), `Wrong serialized - ${message}`); - expect(toHexString(view.hashTreeRoot())).to.equal( - toHexString(type.hashTreeRoot(value)), - `Wrong hashTreeRoot - ${message}` - ); + if (batchHash) { + expect(toHexString(view.batchHashTreeRoot())).to.equal( + toHexString(type.hashTreeRoot(value)), + `Wrong batchHashTreeRoot - ${message}` + ); + } else { + expect(toHexString(view.hashTreeRoot())).to.equal( + toHexString(type.hashTreeRoot(value)), + `Wrong hashTreeRoot - ${message}` + ); + } } // eslint-disable-next-line no-only-tests/no-only-tests @@ -61,46 +68,48 @@ const runViewTestMutationFn = function runViewTestMutation) ?? tvBefore; - assertValidView(tvAfter as TreeViewDU, valueAfter, "after mutation"); + assertValidView(tvAfter as TreeViewDU, valueAfter, "after mutation", false); if (assertFn) assertFn(tvAfter as CompositeViewDU); }); } - const treeViewDUId = `${id} - TreeViewDU`; - if ((!onlyId || treeViewDUId.includes(onlyId)) && !skipTreeViewDU) { - it(treeViewDUId, () => { - const tvBefore = type.toViewDU(valueBefore) as TreeViewDU; - - // Set to mutable, and edit - const tvAfter = (fn(tvBefore as CompositeViewDU) ?? tvBefore) as CompositeViewDU; - - if (treeViewToStruct) { - const tvAfterStruct = treeViewToStruct(tvAfter); - expect(type.toJson(tvAfterStruct)).to.deep.equal( - type.toJson(valueAfter), - "Wrong value after mutation before commit" - ); - } - - if (assertFn) assertFn(tvAfter as CompositeViewDU); + for (const batchHash of [false, true]) { + const treeViewDUId = `${id} - TreeViewDU, batchHash = ${batchHash}`; + if ((!onlyId || treeViewDUId.includes(onlyId)) && !skipTreeViewDU) { + it(treeViewDUId, () => { + const tvBefore = type.toViewDU(valueBefore) as TreeViewDU; - type.commitViewDU(tvAfter); - assertValidView(tvAfter as TreeViewDU, valueAfter, "after mutation"); - - if (assertFn) assertFn(tvAfter as CompositeViewDU); - - if (!skipCloneMutabilityViewDU) { - // Ensure correct mutability of clone and caches // Set to mutable, and edit - const tvBefore2 = type.toViewDU(valueBefore) as TreeViewDU; - const tvAfter2 = (fn(tvBefore2 as CompositeViewDU) ?? tvBefore2) as CompositeViewDU; - // Drop changes - (tvAfter2 as TreeViewDU).clone(); - // Assert same value as before - assertValidView(tvAfter2 as TreeViewDU, valueBefore, "dropped mutation"); - } - }); + const tvAfter = (fn(tvBefore as CompositeViewDU) ?? tvBefore) as CompositeViewDU; + + if (treeViewToStruct) { + const tvAfterStruct = treeViewToStruct(tvAfter); + expect(type.toJson(tvAfterStruct)).to.deep.equal( + type.toJson(valueAfter), + "Wrong value after mutation before commit" + ); + } + + if (assertFn) assertFn(tvAfter as CompositeViewDU); + + type.commitViewDU(tvAfter); + assertValidView(tvAfter as TreeViewDU, valueAfter, "after mutation", batchHash); + + if (assertFn) assertFn(tvAfter as CompositeViewDU); + + if (!skipCloneMutabilityViewDU) { + // Ensure correct mutability of clone and caches + // Set to mutable, and edit + const tvBefore2 = type.toViewDU(valueBefore) as TreeViewDU; + const tvAfter2 = (fn(tvBefore2 as CompositeViewDU) ?? tvBefore2) as CompositeViewDU; + // Drop changes + (tvAfter2 as TreeViewDU).clone(); + // Assert same value as before + assertValidView(tvAfter2 as TreeViewDU, valueBefore, "dropped mutation", batchHash); + } + }); + } } } }); diff --git a/packages/ssz/test/unit/byType/vectorBasic/tree.test.ts b/packages/ssz/test/unit/byType/vectorBasic/tree.test.ts new file mode 100644 index 00000000..69cebbd8 --- /dev/null +++ b/packages/ssz/test/unit/byType/vectorBasic/tree.test.ts @@ -0,0 +1,67 @@ +import {expect} from "chai"; +import {UintNumberType, VectorBasicType} from "../../../../src"; +import {runViewTestMutation} from "../runViewTestMutation"; + +const uint64NumInf = new UintNumberType(8, {clipInfinity: true}); +const vectorType = new VectorBasicType(uint64NumInf, 8); + +runViewTestMutation({ + type: vectorType, + mutations: [ + { + id: "set basic", + valueBefore: [1, 2, 3, 4, 5, 6, 7, 8], + valueAfter: [0, 1, 2, 3, 4, 5, 6, 7], + fn: (tv) => { + tv.set(0, 0); + tv.set(1, 1); + tv.set(2, 2); + tv.set(3, 3); + tv.set(4, 4); + tv.set(5, 5); + tv.set(6, 6); + tv.set(7, 7); + }, + }, + { + id: "swap two indices", + valueBefore: [1, 2, 3, 4, 5, 6, 7, 8], + valueAfter: [8, 2, 3, 4, 5, 6, 7, 1], + fn: (tv) => { + const i0 = tv.get(0); + const i7 = tv.get(7); + tv.set(0, i7); + tv.set(7, i0); + }, + }, + ], +}); + +describe("VectorBasicType batchHashTreeRoot", () => { + const value = [0, 1, 2, 3, 4, 5, 6, 7, 8]; + const expectedRoot = vectorType.hashTreeRoot(value); + + it("fresh ViewDU", () => { + expect(vectorType.toViewDU(value).batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it("full hash then modify", () => { + const viewDU = vectorType.defaultViewDU(); + viewDU.hashTreeRoot(); + viewDU.set(0, 0); + viewDU.set(1, 1); + viewDU.set(2, 2); + viewDU.set(3, 3); + viewDU.set(4, 4); + viewDU.set(5, 5); + viewDU.set(6, 6); + viewDU.set(7, 7); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign the same value again, commit() then batchHashTreeRoot() + viewDU.set(0, 0); + viewDU.set(7, 7); + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); +}); diff --git a/packages/ssz/test/unit/byType/vectorComposite/tree.test.ts b/packages/ssz/test/unit/byType/vectorComposite/tree.test.ts new file mode 100644 index 00000000..b013cae5 --- /dev/null +++ b/packages/ssz/test/unit/byType/vectorComposite/tree.test.ts @@ -0,0 +1,121 @@ +import {expect} from "chai"; +import {ContainerNodeStructType, ContainerType, UintNumberType, ValueOf, VectorCompositeType} from "../../../../src"; +import {runViewTestMutation} from "../runViewTestMutation"; + +const uint64NumInfType = new UintNumberType(8, {clipInfinity: true}); +const containerUintsType = new ContainerType( + {a: uint64NumInfType, b: uint64NumInfType}, + {typeName: "Container(uint64)"} +); +const vectorOfContainersType = new VectorCompositeType(containerUintsType, 2, {typeName: "VectorComposite(Container)"}); + +runViewTestMutation({ + type: vectorOfContainersType, + treeViewToStruct: (tv) => { + const arr: ValueOf = []; + for (let i = 0; i < tv.length; i++) { + const item = tv.get(i); + arr.push({a: item.a, b: item.b}); + } + return arr; + }, + mutations: [ + { + id: "set", + valueBefore: [ + {a: 1, b: 2}, + {a: 3, b: 4}, + ], + valueAfter: [ + {a: 5, b: 6}, + {a: 7, b: 8}, + ], + fn: (tv) => { + tv.set(0, containerUintsType.toViewDU({a: 5, b: 6})); + tv.set(1, containerUintsType.toViewDU({a: 7, b: 8})); + }, + }, + { + id: "set child properties", + valueBefore: [ + {a: 1, b: 2}, + {a: 3, b: 4}, + ], + valueAfter: [ + {a: 5, b: 2}, + {a: 3, b: 8}, + ], + fn: (tv) => { + tv.get(0).a = 5; + tv.get(1).b = 8; + }, + }, + { + id: "swap indices", + valueBefore: [ + {a: 1, b: 2}, + {a: 3, b: 4}, + ], + valueAfter: [ + {a: 3, b: 4}, + {a: 1, b: 2}, + ], + fn: (tv) => { + const item0 = tv.get(0); + const item1 = tv.get(1); + tv.set(0, item1); + tv.set(1, item0); + }, + }, + ], +}); + +describe("VectorCompositeType batchHashTreeRoot", () => { + const value = [ + {a: 1, b: 2}, + {a: 3, b: 4}, + ]; + const containerUintsType = new ContainerNodeStructType( + {a: uint64NumInfType, b: uint64NumInfType}, + {typeName: "ContainerNodeStruct(uint64)"} + ); + const vectorOfContainersType2 = new VectorCompositeType(containerUintsType, 2, { + typeName: "VectorComposite(ContainerNodeStruct)", + }); + for (const vector of [vectorOfContainersType, vectorOfContainersType2]) { + const typeName = vector.typeName; + const expectedRoot = vectorOfContainersType.toView(value).hashTreeRoot(); + + it(`${typeName} - fresh ViewDU`, () => { + expect(vectorOfContainersType.toViewDU(value).batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it(`${typeName} - modify 1 full element`, () => { + const viewDU = vectorOfContainersType.toViewDU([ + {a: 1, b: 2}, + {a: 0, b: 0}, + ]); + viewDU.set(1, containerUintsType.toViewDU({a: 3, b: 4})); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign the same value again, commit() then batchHashTreeRoot(); + viewDU.set(1, containerUintsType.toViewDU({a: 3, b: 4})); + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + + it(`${typeName} - modify 1 property of 1 element`, () => { + const viewDU = vectorOfContainersType.toViewDU([ + {a: 1, b: 2}, + {a: 3, b: 0}, + ]); + viewDU.get(1).b = 4; + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + + // assign the same value again, commit() then batchHashTreeRoot(); + viewDU.get(1).b = 4; + viewDU.commit(); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); + }); + } +}); diff --git a/packages/ssz/test/unit/eth2/beaconState.test.ts b/packages/ssz/test/unit/eth2/beaconState.test.ts new file mode 100644 index 00000000..9b886f82 --- /dev/null +++ b/packages/ssz/test/unit/eth2/beaconState.test.ts @@ -0,0 +1,200 @@ +import {expect} from "chai"; +import {BeaconState} from "../../lodestarTypes/deneb/sszTypes"; +import {ListUintNum64Type} from "../../../src/type/listUintNum64"; +import {altair, phase0, ssz} from "../../lodestarTypes"; +import {BitArray, fromHexString} from "../../../src"; + +const VALIDATOR_REGISTRY_LIMIT = 1099511627776; +export const Balances = new ListUintNum64Type(VALIDATOR_REGISTRY_LIMIT); + +describe("BeaconState ViewDU batchHashTreeRoot", function () { + const view = BeaconState.defaultView(); + const viewDU = BeaconState.defaultViewDU(); + + it("BeaconState ViewDU should have same hashTreeRoot() to View", () => { + // genesisTime + viewDU.genesisTime = view.genesisTime = 1e9; + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // genesisValidatorsRoot + viewDU.genesisValidatorsRoot = view.genesisValidatorsRoot = Buffer.alloc(32, 1); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // fork + const fork: phase0.Fork = { + epoch: 1000, + previousVersion: fromHexString("0x03001020"), + currentVersion: fromHexString("0x04001020"), + }; + view.fork = BeaconState.fields.fork.toView(fork); + viewDU.fork = BeaconState.fields.fork.toViewDU(fork); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // latestBlockHeader + const latestBlockHeader: phase0.BeaconBlockHeader = { + slot: 1000, + proposerIndex: 1, + parentRoot: fromHexString("0xac80c66f413218e2c9c7bcb2408ccdceacf3bcd7e7df58474e0c6aa9d7f328a0"), + stateRoot: fromHexString("0xed29eed3dbee72caf3b13df84d01ebda1482dbd0ce084e1ce8862b4acb740ed8"), + bodyRoot: fromHexString("0x32c644ca1b5d1583d445e9d41c81b3e98465fefad4f0db16084cbce7f1b7b849"), + }; + view.latestBlockHeader = BeaconState.fields.latestBlockHeader.toView(latestBlockHeader); + viewDU.latestBlockHeader = BeaconState.fields.latestBlockHeader.toViewDU(latestBlockHeader); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // blockRoots + const blockRoots = ssz.phase0.HistoricalBlockRoots.defaultValue(); + blockRoots[0] = fromHexString("0x1234"); + view.blockRoots = ssz.phase0.HistoricalBlockRoots.toView(blockRoots); + viewDU.blockRoots = ssz.phase0.HistoricalBlockRoots.toViewDU(blockRoots); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // stateRoots + const stateRoots = ssz.phase0.HistoricalStateRoots.defaultValue(); + stateRoots[0] = fromHexString("0x5678"); + view.stateRoots = ssz.phase0.HistoricalStateRoots.toView(stateRoots); + viewDU.stateRoots = ssz.phase0.HistoricalStateRoots.toViewDU(stateRoots); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // historical_roots Frozen in Capella, replaced by historical_summaries + // Eth1 + const eth1Data: phase0.Eth1Data = { + depositRoot: fromHexString("0x1234"), + depositCount: 1000, + blockHash: fromHexString("0x5678"), + }; + view.eth1Data = BeaconState.fields.eth1Data.toView(eth1Data); + viewDU.eth1Data = BeaconState.fields.eth1Data.toViewDU(eth1Data); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // Eth1DataVotes + const eth1DataVotes = ssz.phase0.Eth1DataVotes.defaultValue(); + eth1DataVotes[0] = eth1Data; + view.eth1DataVotes = ssz.phase0.Eth1DataVotes.toView(eth1DataVotes); + viewDU.eth1DataVotes = ssz.phase0.Eth1DataVotes.toViewDU(eth1DataVotes); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // Eth1DepositIndex + view.eth1DepositIndex = 1000; + viewDU.eth1DepositIndex = 1000; + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // validators + const validator = { + pubkey: Buffer.alloc(48, 0xaa), + withdrawalCredentials: Buffer.alloc(32, 0xbb), + effectiveBalance: 32e9, + slashed: false, + activationEligibilityEpoch: 1_000_000, + activationEpoch: 2_000_000, + exitEpoch: 3_000_000, + withdrawableEpoch: 4_000_000, + }; + view.validators = BeaconState.fields.validators.toView([validator]); + viewDU.validators = BeaconState.fields.validators.toViewDU([validator]); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // balances + view.balances = BeaconState.fields.balances.toView([1000, 2000, 3000]); + viewDU.balances = Balances.toViewDU([1000, 2000, 3000]); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // randaoMixes + const randaoMixes = ssz.phase0.RandaoMixes.defaultValue(); + randaoMixes[0] = fromHexString("0x1234"); + view.randaoMixes = ssz.phase0.RandaoMixes.toView(randaoMixes); + viewDU.randaoMixes = ssz.phase0.RandaoMixes.toViewDU(randaoMixes); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // slashings + view.slashings = BeaconState.fields.slashings.toView(Array.from({length: 64}, () => BigInt(1000))); + viewDU.slashings = BeaconState.fields.slashings.toViewDU(Array.from({length: 64}, () => BigInt(1000))); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // previousEpochAttestations + view.previousEpochParticipation = BeaconState.fields.previousEpochParticipation.toView([1, 2, 3]); + viewDU.previousEpochParticipation = BeaconState.fields.previousEpochParticipation.toViewDU([1, 2, 3]); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // currentEpochAttestations + view.currentEpochParticipation = BeaconState.fields.currentEpochParticipation.toView([1, 2, 3]); + viewDU.currentEpochParticipation = BeaconState.fields.currentEpochParticipation.toViewDU([1, 2, 3]); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // justificationBits + view.justificationBits = BeaconState.fields.justificationBits.toView( + BitArray.fromBoolArray([true, false, true, true]) + ); + viewDU.justificationBits = BeaconState.fields.justificationBits.toViewDU( + BitArray.fromBoolArray([true, false, true, true]) + ); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // previousJustifiedCheckpoint + const checkpoint: phase0.Checkpoint = { + epoch: 1000, + root: fromHexString("0x1234"), + }; + view.previousJustifiedCheckpoint = BeaconState.fields.previousJustifiedCheckpoint.toView(checkpoint); + viewDU.previousJustifiedCheckpoint = BeaconState.fields.previousJustifiedCheckpoint.toViewDU(checkpoint); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // currentJustifiedCheckpoint + view.currentJustifiedCheckpoint = BeaconState.fields.currentJustifiedCheckpoint.toView(checkpoint); + viewDU.currentJustifiedCheckpoint = BeaconState.fields.currentJustifiedCheckpoint.toViewDU(checkpoint); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // finalizedCheckpoint + view.finalizedCheckpoint = BeaconState.fields.finalizedCheckpoint.toView(checkpoint); + viewDU.finalizedCheckpoint = BeaconState.fields.finalizedCheckpoint.toViewDU(checkpoint); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // inactivityScores + view.inactivityScores = BeaconState.fields.inactivityScores.toView([1, 2, 3]); + viewDU.inactivityScores = BeaconState.fields.inactivityScores.toViewDU([1, 2, 3]); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // currentSyncCommittee + const syncCommittee: altair.SyncCommittee = { + pubkeys: Array.from({length: 32}, () => Buffer.alloc(48, 0xaa)), + aggregatePubkey: fromHexString("0x1234"), + }; + view.currentSyncCommittee = BeaconState.fields.currentSyncCommittee.toView(syncCommittee); + viewDU.currentSyncCommittee = BeaconState.fields.currentSyncCommittee.toViewDU(syncCommittee); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // nextSyncCommittee + view.nextSyncCommittee = BeaconState.fields.nextSyncCommittee.toView(syncCommittee); + viewDU.nextSyncCommittee = BeaconState.fields.nextSyncCommittee.toViewDU(syncCommittee); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // latestExecutionPayloadHeader + const latestExecutionPayloadHeader = BeaconState.fields.latestExecutionPayloadHeader.defaultValue(); + latestExecutionPayloadHeader.blockNumber = 1000; + latestExecutionPayloadHeader.parentHash = fromHexString( + "0xac80c66f413218e2c9c7bcb2408ccdceacf3bcd7e7df58474e0c6aa9d7f328a0" + ); + view.latestExecutionPayloadHeader = + BeaconState.fields.latestExecutionPayloadHeader.toView(latestExecutionPayloadHeader); + viewDU.latestExecutionPayloadHeader = + BeaconState.fields.latestExecutionPayloadHeader.toViewDU(latestExecutionPayloadHeader); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // nextWithdrawalIndex + viewDU.nextWithdrawalIndex = view.nextWithdrawalIndex = 1000; + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // nextWithdrawalValidatorIndex + viewDU.nextWithdrawalValidatorIndex = view.nextWithdrawalValidatorIndex = 1000; + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + + // historicalSummaries + const historicalSummaries = { + blockSummaryRoot: fromHexString("0xac80c66f413218e2c9c7bcb2408ccdceacf3bcd7e7df58474e0c6aa9d7f328a0"), + stateSummaryRoot: fromHexString("0x32c644ca1b5d1583d445e9d41c81b3e98465fefad4f0db16084cbce7f1b7b849"), + }; + view.historicalSummaries = BeaconState.fields.historicalSummaries.toView([historicalSummaries]); + viewDU.historicalSummaries = BeaconState.fields.historicalSummaries.toViewDU([historicalSummaries]); + expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(view.hashTreeRoot()); + }); +}); diff --git a/packages/ssz/test/unit/eth2/validators.test.ts b/packages/ssz/test/unit/eth2/validators.test.ts index a0878f9c..1cccd691 100644 --- a/packages/ssz/test/unit/eth2/validators.test.ts +++ b/packages/ssz/test/unit/eth2/validators.test.ts @@ -3,22 +3,21 @@ import {describe, it} from "mocha"; import {toHexString, ListCompositeType, ValueOf, CompositeViewDU} from "../../../src"; import {ValidatorContainer, ValidatorNodeStruct} from "../../lodestarTypes/phase0/sszTypes"; +type Validator = ValueOf; +const validator: Validator = { + pubkey: Buffer.alloc(48, 0xaa), + withdrawalCredentials: Buffer.alloc(32, 0xbb), + effectiveBalance: 32e9, + slashed: false, + activationEligibilityEpoch: 1_000_000, + activationEpoch: 2_000_000, + exitEpoch: 3_000_000, + withdrawableEpoch: 4_000_000, +}; + describe("Container with BranchNodeStruct", function () { this.timeout(0); - type Validator = ValueOf; - - const validator: Validator = { - pubkey: Buffer.alloc(48, 0xaa), - withdrawalCredentials: Buffer.alloc(32, 0xbb), - effectiveBalance: 32e9, - slashed: false, - activationEligibilityEpoch: 1_000_000, - activationEpoch: 2_000_000, - exitEpoch: 3_000_000, - withdrawableEpoch: 4_000_000, - }; - const validatorViewDU = ValidatorContainer.toViewDU(validator); const validatorNodeStructViewDU = ValidatorNodeStruct.toViewDU(validator); @@ -34,6 +33,7 @@ describe("Container with BranchNodeStruct", function () { getExitEpoch: (treeBacked) => treeBacked.exitEpoch, getPubkey: (treeBacked) => toHexString(treeBacked.pubkey), hashTreeRoot: (treeBacked) => treeBacked.hashTreeRoot(), + batchHashTreeRoot: (treeBacked) => treeBacked.batchHashTreeRoot(), getProof: (treeBacked) => treeBacked.createProof(validatorProofJsonPaths), serialize: (treeBacked) => treeBacked.serialize(), }; diff --git a/packages/ssz/test/unit/regressions.test.ts b/packages/ssz/test/unit/regressions.test.ts index 4f5ecaf9..6dc0c22f 100644 --- a/packages/ssz/test/unit/regressions.test.ts +++ b/packages/ssz/test/unit/regressions.test.ts @@ -32,6 +32,8 @@ describe("Regressions / known issues", () => { const bytes = SyncCommitteeBits.serialize(bitArray); const rootByTreeBacked = SyncCommitteeBits.deserializeToViewDU(bytes).hashTreeRoot(); expect(toHexString(rootByStruct)).to.be.equal(toHexString(rootByTreeBacked), "Inconsistent hashTreeRoot"); + const rootByBatch = SyncCommitteeBits.deserializeToViewDU(bytes).batchHashTreeRoot(); + expect(toHexString(rootByStruct)).to.be.equal(toHexString(rootByBatch), "Inconsistent hashTreeRoot"); }); it("converts bit arrays to tree", function () { diff --git a/packages/ssz/test/unit/unchangedViewDUs.test.ts b/packages/ssz/test/unit/unchangedViewDUs.test.ts new file mode 100644 index 00000000..f1e57a36 --- /dev/null +++ b/packages/ssz/test/unit/unchangedViewDUs.test.ts @@ -0,0 +1,29 @@ +import {expect} from "chai"; +import * as sszAltair from "../lodestarTypes/altair/sszTypes"; +import {getRandomState} from "../utils/generateEth2Objs"; + +describe("Unchanged ViewDUs", () => { + const state = sszAltair.BeaconState.toViewDU(getRandomState(100)); + + it.skip("should not recompute batchHashTreeRoot() when no fields is changed", () => { + const root = state.batchHashTreeRoot(); + // this causes viewsChanged inside BeaconState container + state.validators.length; + state.balances.length; + // but we should not recompute root, should get from cache instead + const root2 = state.batchHashTreeRoot(); + expect(root2).to.equal(root, "should not recompute batchHashTreeRoot() when no fields are changed"); + }); + + it("handle childViewDU.batchHashTreeRoot()", () => { + const state2 = state.clone(); + state2.latestBlockHeader.stateRoot = Buffer.alloc(32, 3); + const root2 = state2.batchHashTreeRoot(); + const state3 = state.clone(); + state3.latestBlockHeader.stateRoot = Buffer.alloc(32, 3); + // batchHashTreeRoot() also does the commit() + state3.latestBlockHeader.commit(); + const root3 = state3.batchHashTreeRoot(); + expect(root3).to.be.deep.equal(root2); + }); +}); diff --git a/yarn.lock b/yarn.lock index 7e92c77c..fef8f425 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1244,6 +1244,11 @@ "@babel/helper-validator-identifier" "^7.24.5" to-fast-properties "^2.0.0" +"@chainsafe/as-sha256@^0.4.1", "@chainsafe/as-sha256@^0.4.2": + version "0.4.2" + resolved "https://registry.yarnpkg.com/@chainsafe/as-sha256/-/as-sha256-0.4.2.tgz#21ad1371e2245e430c1a554a05f10d333c6f42cc" + integrity sha512-HJ8GZBRjLeWtRsAXf3EbNsNzmTGpzTFjfpSf4yHkLYC+E52DhT6hwz+7qpj6I/EmFzSUm5tYYvT9K8GZokLQCQ== + "@chainsafe/babel-plugin-inline-binary-import@^1.0.3": version "1.0.3" resolved "https://registry.yarnpkg.com/@chainsafe/babel-plugin-inline-binary-import/-/babel-plugin-inline-binary-import-1.0.3.tgz#08dde20d91cf5d18f2c253edf32547943a16e409" @@ -1276,6 +1281,22 @@ "@chainsafe/hashtree-linux-arm64-gnu" "1.0.1" "@chainsafe/hashtree-linux-x64-gnu" "1.0.1" +"@chainsafe/persistent-merkle-tree@^0.7.1", "@chainsafe/persistent-merkle-tree@^0.7.2": + version "0.7.2" + resolved "https://registry.yarnpkg.com/@chainsafe/persistent-merkle-tree/-/persistent-merkle-tree-0.7.2.tgz#f0ef91daf36752f827432333cbc965f4bf6e750e" + integrity sha512-BUAqrmSUmy6bZhXxnhpR+aYoEDdCeS1dQvq/aje0CDEB14ZHF9UVN2mL9MolOD0ANUiP1OaPG3KfVBxvuW8aTg== + dependencies: + "@chainsafe/as-sha256" "^0.4.2" + "@noble/hashes" "^1.3.0" + +"@chainsafe/ssz@0.16.0": + version "0.16.0" + resolved "https://registry.yarnpkg.com/@chainsafe/ssz/-/ssz-0.16.0.tgz#262c491ac037777a16e8d8db479da2ba27539b8d" + integrity sha512-CgTDyrkbAKvrKwHxPT5rerXAHP3NB+uOvpnN9Gn8aJ/4TGOKhOboj4131bSFUZ679uPJ6pu6391cvInuOdrglw== + dependencies: + "@chainsafe/as-sha256" "^0.4.2" + "@chainsafe/persistent-merkle-tree" "^0.7.2" + "@chainsafe/ssz@^0.15.1": version "0.15.1" resolved "https://registry.yarnpkg.com/@chainsafe/ssz/-/ssz-0.15.1.tgz#008a711c3bcdc0d207cd4be15108870b0b1c60c0" From 6aaa7bd37bfbed3038badceeb9b08a611ec14dda Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Fri, 9 Aug 2024 15:47:05 +0700 Subject: [PATCH 092/113] chore: add benchmark --- .../test/perf/hasher.test.ts | 43 +++- .../ssz/test/perf/eth2/beaconState.test.ts | 214 ++++++++++++++++++ 2 files changed, 254 insertions(+), 3 deletions(-) create mode 100644 packages/ssz/test/perf/eth2/beaconState.test.ts diff --git a/packages/persistent-merkle-tree/test/perf/hasher.test.ts b/packages/persistent-merkle-tree/test/perf/hasher.test.ts index bb8c3ecf..ac14b161 100644 --- a/packages/persistent-merkle-tree/test/perf/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/perf/hasher.test.ts @@ -1,10 +1,10 @@ import {itBench} from "@dapplion/benchmark"; -import {HashObject, uint8ArrayToHashObject} from "../../src/hasher"; +import {HashObject, setHasher, uint8ArrayToHashObject} from "../../src/hasher"; import {hasher as asShaHasher} from "../../src/hasher/as-sha256"; import {hasher as nobleHasher} from "../../src/hasher/noble"; import {hasher as hashtreeHasher} from "../../src/hasher/hashtree"; import {buildComparisonTrees} from "../utils/tree"; -import { HashComputationLevel, getHashComputations } from "../../src"; +import {HashComputationLevel, getHashComputations} from "../../src"; describe("hasher", function () { this.timeout(0); @@ -65,4 +65,41 @@ describe("hasher", function () { } }); -// TODO - batch: test more methods +describe.only("hashtree", function () { + itBench({ + id: `getHashComputations`, + beforeEach: () => { + const [tree] = buildComparisonTrees(16); + return tree; + }, + fn: (tree) => { + const hcByLevel: HashComputationLevel[] = []; + getHashComputations(tree, 0, hcByLevel); + }, + }); + + itBench({ + id: `executeHashComputations - hashtree`, + beforeEach: () => { + const [tree] = buildComparisonTrees(16); + return tree; + }, + fn: (tree) => { + const hcByLevel: HashComputationLevel[] = []; + getHashComputations(tree, 0, hcByLevel); + hashtreeHasher.executeHashComputations(hcByLevel); + }, + }); + + itBench({ + id: `root - hashtree`, + beforeEach: () => { + const [tree] = buildComparisonTrees(16); + setHasher(hashtreeHasher); + return tree; + }, + fn: (tree) => { + tree.root; + }, + }); +}); diff --git a/packages/ssz/test/perf/eth2/beaconState.test.ts b/packages/ssz/test/perf/eth2/beaconState.test.ts new file mode 100644 index 00000000..32d8b031 --- /dev/null +++ b/packages/ssz/test/perf/eth2/beaconState.test.ts @@ -0,0 +1,214 @@ +import {itBench, setBenchOpts} from "@dapplion/benchmark"; +import {HashComputationLevel, executeHashComputations, HashComputationGroup} from "@chainsafe/persistent-merkle-tree"; +import {BeaconState} from "../../lodestarTypes/altair/sszTypes"; +import {BitArray, CompositeViewDU, toHexString} from "../../../src"; +import {preset} from "../../lodestarTypes/params"; +const {SLOTS_PER_HISTORICAL_ROOT, EPOCHS_PER_ETH1_VOTING_PERIOD, SLOTS_PER_EPOCH} = preset; + +const vc = 200_000; +const numModified = vc / 20; +// every we increase vc, need to change this value from "recursive hash" test +const expectedRoot = "0x759d635af161ac1e4f4af11aa7721fd4996253af50f8a81e5003bbb4cbcaae42"; + +/** + * This simulates a BeaconState being modified after an epoch transition in lodestar + * The fresh tree batch hash bechmark is in packages/persistent-merkle-tree/test/perf/node.test.ts + * Note that this benchmark is not very stable because we cannot apply runsFactor as once commit() we + * cannot compute HashComputationGroup again. + * Increasing number of validators could be OOM since we have to create BeaconState every time + */ +describe(`BeaconState ViewDU partially modified tree vc=${vc} numModified=${numModified}`, function () { + setBenchOpts({ + minMs: 20_000, + }); + + itBench({ + id: `BeaconState ViewDU hashTreeRoot() vc=${vc}`, + beforeEach: () => createPartiallyModifiedDenebState(), + fn: (state: CompositeViewDU) => { + state.hashTreeRoot(); + if (toHexString(state.node.root) !== expectedRoot) { + throw new Error("hashTreeRoot does not match expectedRoot"); + } + }, + }); + + itBench({ + id: `BeaconState ViewDU recursive hash - commit step vc=${vc}`, + beforeEach: () => createPartiallyModifiedDenebState(), + fn: (state: CompositeViewDU) => { + state.commit(); + }, + }); + + itBench({ + id: `BeaconState ViewDU validator tree creation vc=${numModified}`, + beforeEach: () => { + const state = createPartiallyModifiedDenebState(); + state.commit(); + return state; + }, + fn: (state: CompositeViewDU) => { + const validators = state.validators; + for (let i = 0; i < numModified; i++) { + validators.getReadonly(i).node.left; + } + }, + }); + + const hc = new HashComputationGroup(); + itBench({ + id: `BeaconState ViewDU batchHashTreeRoot vc=${vc}`, + beforeEach: () => createPartiallyModifiedDenebState(), + fn: (state: CompositeViewDU) => { + // commit() step is inside hashTreeRoot(), reuse HashComputationGroup + if (toHexString(state.batchHashTreeRoot(hc)) !== expectedRoot) { + throw new Error("batchHashTreeRoot does not match expectedRoot"); + } + state.batchHashTreeRoot(hc); + }, + }); + + itBench({ + id: `BeaconState ViewDU hashTreeRoot - commit step vc=${vc}`, + beforeEach: () => createPartiallyModifiedDenebState(), + fn: (state: CompositeViewDU) => { + state.commit(0, []); + }, + }); + + itBench({ + id: `BeaconState ViewDU hashTreeRoot - hash step vc=${vc}`, + beforeEach: () => { + const state = createPartiallyModifiedDenebState(); + const hcByLevel: HashComputationLevel[] = []; + state.commit(0, hcByLevel); + return hcByLevel; + }, + fn: (hcByLevel) => { + executeHashComputations(hcByLevel); + }, + }); +}); + +let originalState: CompositeViewDU | null = null; +function createPartiallyModifiedDenebState(): CompositeViewDU { + if (originalState === null) { + originalState = createDenebState(vc); + // cache all roots + // the original state is huge, do not call hashTreeRoot() here + originalState.commit(); + originalState.node.root; + } + + const state = originalState.clone(); + state.slot++; + state.latestBlockHeader = BeaconState.fields.latestBlockHeader.toViewDU({ + slot: 1000, + proposerIndex: 1, + parentRoot: Buffer.alloc(32, 0xac), + stateRoot: Buffer.alloc(32, 0xed), + bodyRoot: Buffer.alloc(32, 0x32), + }); + state.blockRoots.set(0, Buffer.alloc(32, 0x01)); + state.stateRoots.set(0, Buffer.alloc(32, 0x01)); + state.historicalRoots.set(0, Buffer.alloc(32, 0x01)); + for (let i = 0; i < numModified; i++) { + state.validators.get(i).effectiveBalance += 1e9; + } + state.balances = BeaconState.fields.balances.toViewDU(Array.from({length: vc}, () => 32e9)); + + state.eth1Data = BeaconState.fields.eth1Data.toViewDU({ + depositRoot: Buffer.alloc(32, 0x02), + depositCount: 1000, + blockHash: Buffer.alloc(32, 0x03), + }); + state.eth1DataVotes.set(0, state.eth1Data); + state.eth1DepositIndex++; + state.randaoMixes.set(0, Buffer.alloc(32, 0x02)); + state.slashings.set(0, BigInt(1e9)); + + state.justificationBits = BeaconState.fields.justificationBits.toViewDU( + BitArray.fromBoolArray([true, false, true, true]) + ); + state.previousJustifiedCheckpoint = BeaconState.fields.previousJustifiedCheckpoint.toViewDU({ + epoch: 1000, + root: Buffer.alloc(32, 0x01), + }); + state.currentJustifiedCheckpoint = BeaconState.fields.currentJustifiedCheckpoint.toViewDU({ + epoch: 1000, + root: Buffer.alloc(32, 0x01), + }); + state.finalizedCheckpoint = BeaconState.fields.finalizedCheckpoint.toViewDU({ + epoch: 1000, + root: Buffer.alloc(32, 0x01), + }); + return state; +} + +function createDenebState(vc: number): CompositeViewDU { + const state = BeaconState.defaultViewDU(); + state.genesisTime = 1e9; + state.genesisValidatorsRoot = Buffer.alloc(32, 1); + state.slot = 1_000_000; + state.fork = BeaconState.fields.fork.toViewDU({ + epoch: 1000, + previousVersion: Buffer.alloc(4, 0x03), + currentVersion: Buffer.alloc(4, 0x04), + }); + state.latestBlockHeader = BeaconState.fields.latestBlockHeader.toViewDU({ + slot: 1000, + proposerIndex: 1, + parentRoot: Buffer.alloc(32, 0xac), + stateRoot: Buffer.alloc(32, 0xed), + bodyRoot: Buffer.alloc(32, 0x32), + }); + state.blockRoots = BeaconState.fields.blockRoots.toViewDU( + Array.from({length: 1_000_000}, () => Buffer.alloc(32, 0x01)) + ); + state.stateRoots = BeaconState.fields.stateRoots.toViewDU( + Array.from({length: 1_000_000}, () => Buffer.alloc(32, 0x01)) + ); + state.historicalRoots = BeaconState.fields.historicalRoots.toViewDU( + Array.from({length: 1_000_000}, () => Buffer.alloc(32, 0x01)) + ); + state.eth1DataVotes = BeaconState.fields.eth1DataVotes.toViewDU( + Array.from({length: EPOCHS_PER_ETH1_VOTING_PERIOD * SLOTS_PER_EPOCH}, () => { + return { + depositRoot: Buffer.alloc(32, 0x04), + depositCount: 1000, + blockHash: Buffer.alloc(32, 0x05), + }; + }) + ); + state.eth1DepositIndex = 1000; + const validators = Array.from({length: vc}, () => { + return { + pubkey: Buffer.alloc(48, 0xaa), + withdrawalCredentials: Buffer.alloc(32, 0xbb), + effectiveBalance: 32e9, + slashed: false, + activationEligibilityEpoch: 1_000_000, + activationEpoch: 2_000_000, + exitEpoch: 3_000_000, + withdrawableEpoch: 4_000_000, + }; + }); + state.validators = BeaconState.fields.validators.toViewDU(validators); + state.balances = BeaconState.fields.balances.toViewDU(Array.from({length: vc}, () => 32e9)); + // randomMixes + state.randaoMixes = BeaconState.fields.randaoMixes.toViewDU( + Array.from({length: SLOTS_PER_HISTORICAL_ROOT}, () => Buffer.alloc(32, 0x01)) + ); + // slashings + state.slashings = BeaconState.fields.slashings.toViewDU( + Array.from({length: SLOTS_PER_HISTORICAL_ROOT}, () => BigInt(1e9)) + ); + state.previousEpochParticipation = BeaconState.fields.previousEpochParticipation.toViewDU( + Array.from({length: vc}, () => 7) + ); + state.currentEpochParticipation = BeaconState.fields.previousEpochParticipation.toViewDU( + Array.from({length: vc}, () => 7) + ); + return state; +} From 0ee377b828b6daf59c67f1339bbd61ee555e8f92 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Fri, 9 Aug 2024 16:44:37 +0700 Subject: [PATCH 093/113] chore: remove describe.only --- packages/persistent-merkle-tree/test/perf/hasher.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/persistent-merkle-tree/test/perf/hasher.test.ts b/packages/persistent-merkle-tree/test/perf/hasher.test.ts index ac14b161..8e61634f 100644 --- a/packages/persistent-merkle-tree/test/perf/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/perf/hasher.test.ts @@ -65,7 +65,7 @@ describe("hasher", function () { } }); -describe.only("hashtree", function () { +describe("hashtree", function () { itBench({ id: `getHashComputations`, beforeEach: () => { From 04ed55362cc6ab83be72fe7406588162e8395889 Mon Sep 17 00:00:00 2001 From: twoeths Date: Fri, 16 Aug 2024 15:16:10 +0700 Subject: [PATCH 094/113] fix: do not change hashTreeRoot() (#393) * chore: increase modified validators to 100k in beaconState.test.ts perf test * feat: make hashTreeRoot() unchanged * chore: switch test order in beaconState.test.ts perf test --- packages/ssz/src/branchNodeStruct.ts | 9 +-- packages/ssz/src/type/containerNodeStruct.ts | 16 +---- packages/ssz/src/view/containerNodeStruct.ts | 4 +- .../phase0/viewDU/listValidator.ts | 11 +++- .../ssz/test/perf/eth2/beaconState.test.ts | 60 ++++++++++--------- 5 files changed, 49 insertions(+), 51 deletions(-) diff --git a/packages/ssz/src/branchNodeStruct.ts b/packages/ssz/src/branchNodeStruct.ts index c99779a4..471716c4 100644 --- a/packages/ssz/src/branchNodeStruct.ts +++ b/packages/ssz/src/branchNodeStruct.ts @@ -9,18 +9,15 @@ import {hashObjectToUint8Array, Node} from "@chainsafe/persistent-merkle-tree"; * expensive because the tree has to be recreated every time. */ export class BranchNodeStruct extends Node { - constructor( - private readonly valueToNode: (value: T) => Node, - private readonly hashTreeRootInto: (value: T, node: Node) => void, - readonly value: T - ) { + constructor(private readonly valueToNode: (value: T) => Node, readonly value: T) { // First null value is to save an extra variable to check if a node has a root or not super(null as unknown as number, 0, 0, 0, 0, 0, 0, 0); } get rootHashObject(): HashObject { if (this.h0 === null) { - this.hashTreeRootInto(this.value, this); + const node = this.valueToNode(this.value); + super.applyHash(node.rootHashObject); } return this; } diff --git a/packages/ssz/src/type/containerNodeStruct.ts b/packages/ssz/src/type/containerNodeStruct.ts index c884eea9..86aa0ee4 100644 --- a/packages/ssz/src/type/containerNodeStruct.ts +++ b/packages/ssz/src/type/containerNodeStruct.ts @@ -1,5 +1,4 @@ import {Node} from "@chainsafe/persistent-merkle-tree"; -import {byteArrayIntoHashObject} from "@chainsafe/as-sha256"; import {Type, ByteViews} from "./abstract"; import {isCompositeType} from "./composite"; import {ContainerType, ContainerOptions, renderContainerTypeName} from "./container"; @@ -74,7 +73,7 @@ export class ContainerNodeStructType tree_deserializeFromBytes(data: ByteViews, start: number, end: number): Node { const value = this.value_deserializeFromBytes(data, start, end); - return new BranchNodeStruct(this.valueToTree.bind(this), this.computeRootInto.bind(this), value); + return new BranchNodeStruct(this.valueToTree.bind(this), value); } // Proofs @@ -95,7 +94,7 @@ export class ContainerNodeStructType super.tree_serializeToBytes({uint8Array, dataView}, 0, node); const value = this.value_deserializeFromBytes({uint8Array, dataView}, 0, uint8Array.length); return { - node: new BranchNodeStruct(this.valueToTree.bind(this), this.computeRootInto.bind(this), value), + node: new BranchNodeStruct(this.valueToTree.bind(this), value), done: true, }; } @@ -107,7 +106,7 @@ export class ContainerNodeStructType } value_toTree(value: ValueOfFields): Node { - return new BranchNodeStruct(this.valueToTree.bind(this), this.computeRootInto.bind(this), value); + return new BranchNodeStruct(this.valueToTree.bind(this), value); } private valueToTree(value: ValueOfFields): Node { @@ -116,13 +115,4 @@ export class ContainerNodeStructType this.value_serializeToBytes({uint8Array, dataView}, 0, value); return super.tree_deserializeFromBytes({uint8Array, dataView}, 0, uint8Array.length); } - - private computeRootInto(value: ValueOfFields, node: Node): void { - if (node.h0 !== null) { - return; - } - - this.hashTreeRootInto(value, this.temporaryRoot, 0); - byteArrayIntoHashObject(this.temporaryRoot, 0, node); - } } diff --git a/packages/ssz/src/view/containerNodeStruct.ts b/packages/ssz/src/view/containerNodeStruct.ts index c45372bc..da83f4d8 100644 --- a/packages/ssz/src/view/containerNodeStruct.ts +++ b/packages/ssz/src/view/containerNodeStruct.ts @@ -60,7 +60,7 @@ export function getContainerTreeViewClass; - this.tree.rootNode = new BranchNodeStruct(node["valueToNode"], node["hashTreeRootInto"], newNodeValue); + this.tree.rootNode = new BranchNodeStruct(node["valueToNode"], newNodeValue); }, }); } @@ -86,7 +86,7 @@ export function getContainerTreeViewClass; - this.tree.rootNode = new BranchNodeStruct(node["valueToNode"], node["hashTreeRootInto"], newNodeValue); + this.tree.rootNode = new BranchNodeStruct(node["valueToNode"], newNodeValue); }, }); } diff --git a/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts b/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts index de3fcf38..d7a85284 100644 --- a/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts +++ b/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts @@ -38,6 +38,7 @@ const validatorRoots: Uint8Array[] = []; for (let i = 0; i < PARALLEL_FACTOR; i++) { validatorRoots.push(batchLevel3Bytes.subarray(i * 32, (i + 1) * 32)); } +const validatorRoot = new Uint8Array(32); export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU { constructor( @@ -49,6 +50,11 @@ export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU createPartiallyModifiedDenebState(), fn: (state: CompositeViewDU) => { - state.hashTreeRoot(); - if (toHexString(state.node.root) !== expectedRoot) { - throw new Error("hashTreeRoot does not match expectedRoot"); + // commit() step is inside hashTreeRoot(), reuse HashComputationGroup + if (toHexString(state.batchHashTreeRoot(hc)) !== expectedRoot) { + throw new Error( + `batchHashTreeRoot ${toHexString(state.batchHashTreeRoot(hc))} does not match expectedRoot ${expectedRoot}` + ); } + state.batchHashTreeRoot(hc); }, }); itBench({ - id: `BeaconState ViewDU recursive hash - commit step vc=${vc}`, + id: `BeaconState ViewDU batchHashTreeRoot - commit step vc=${vc}`, beforeEach: () => createPartiallyModifiedDenebState(), fn: (state: CompositeViewDU) => { - state.commit(); + state.commit(0, []); }, }); itBench({ - id: `BeaconState ViewDU validator tree creation vc=${numModified}`, + id: `BeaconState ViewDU batchHashTreeRoot - hash step vc=${vc}`, beforeEach: () => { const state = createPartiallyModifiedDenebState(); - state.commit(); - return state; + const hcByLevel: HashComputationLevel[] = []; + state.commit(0, hcByLevel); + return hcByLevel; }, - fn: (state: CompositeViewDU) => { - const validators = state.validators; - for (let i = 0; i < numModified; i++) { - validators.getReadonly(i).node.left; - } + fn: (hcByLevel) => { + executeHashComputations(hcByLevel); }, }); - const hc = new HashComputationGroup(); itBench({ - id: `BeaconState ViewDU batchHashTreeRoot vc=${vc}`, + id: `BeaconState ViewDU hashTreeRoot() vc=${vc}`, beforeEach: () => createPartiallyModifiedDenebState(), fn: (state: CompositeViewDU) => { - // commit() step is inside hashTreeRoot(), reuse HashComputationGroup - if (toHexString(state.batchHashTreeRoot(hc)) !== expectedRoot) { - throw new Error("batchHashTreeRoot does not match expectedRoot"); + state.hashTreeRoot(); + if (toHexString(state.node.root) !== expectedRoot) { + throw new Error(`hashTreeRoot ${toHexString(state.node.root)} does not match expectedRoot ${expectedRoot}`); } - state.batchHashTreeRoot(hc); }, }); @@ -73,20 +73,22 @@ describe(`BeaconState ViewDU partially modified tree vc=${vc} numModified=${numM id: `BeaconState ViewDU hashTreeRoot - commit step vc=${vc}`, beforeEach: () => createPartiallyModifiedDenebState(), fn: (state: CompositeViewDU) => { - state.commit(0, []); + state.commit(); }, }); itBench({ - id: `BeaconState ViewDU hashTreeRoot - hash step vc=${vc}`, + id: `BeaconState ViewDU hashTreeRoot - validator tree creation vc=${numModified}`, beforeEach: () => { const state = createPartiallyModifiedDenebState(); - const hcByLevel: HashComputationLevel[] = []; - state.commit(0, hcByLevel); - return hcByLevel; + state.commit(); + return state; }, - fn: (hcByLevel) => { - executeHashComputations(hcByLevel); + fn: (state: CompositeViewDU) => { + const validators = state.validators; + for (let i = 0; i < numModified; i++) { + validators.getReadonly(i).node.left; + } }, }); }); From ad686e6f59cf2f3cfd0247090e3418bd0d417da4 Mon Sep 17 00:00:00 2001 From: twoeths Date: Thu, 22 Aug 2024 16:08:30 +0700 Subject: [PATCH 095/113] fix: handle unmodified validators (#397) * fix: handle unmodified validators for batch hash flow * chore: more comments --- .../phase0/viewDU/listValidator.ts | 33 +++++++++++++------ .../ssz/test/perf/eth2/beaconState.test.ts | 5 +++ 2 files changed, 28 insertions(+), 10 deletions(-) diff --git a/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts b/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts index d7a85284..72fbfdb9 100644 --- a/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts +++ b/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts @@ -40,6 +40,11 @@ for (let i = 0; i < PARALLEL_FACTOR; i++) { } const validatorRoot = new Uint8Array(32); +/** + * Similar to ListCompositeTreeViewDU with some differences: + * - if called without params, it's from hashTreeRoot() api call, no need to compute root + * - otherwise it's from batchHashTreeRoot() call, compute validator roots in batch + */ export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU { constructor( readonly type: ListCompositeType, @@ -69,8 +74,20 @@ export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU >; - const indicesChanged = Array.from(this.viewsChanged.keys()).sort((a, b) => a - b); - const endBatch = indicesChanged.length - (indicesChanged.length % PARALLEL_FACTOR); + + const indicesChanged: number[] = []; + for (const [index, viewChanged] of viewsChanged) { + // should not have any params here in order not to compute root + viewChanged.commit(); + // `validators.get(i)` was called but it may not modify any property, do not need to compute root + if (viewChanged.node.h0 === null) { + indicesChanged.push(index); + } + } + + // these validators don't have roots, we compute roots in batch + const sortedIndicesChanged = indicesChanged.sort((a, b) => a - b); + const endBatch = sortedIndicesChanged.length - (sortedIndicesChanged.length % PARALLEL_FACTOR); // nodesChanged is sorted by index const nodesChanged: {index: number; node: Node}[] = []; // commit every 16 validators in batch @@ -80,7 +97,7 @@ export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU= 0; j--) { - const viewIndex = indicesChanged[i - j]; + const viewIndex = sortedIndicesChanged[i - j]; const indexInBatch = (i - j) % PARALLEL_FACTOR; const viewChanged = viewsChanged.get(viewIndex); if (viewChanged) { - // should not have any params here in order not to compute root - viewChanged.commit(); const branchNodeStruct = viewChanged.node; byteArrayIntoHashObject(validatorRoots[indexInBatch], 0, branchNodeStruct); nodesChanged.push({index: viewIndex, node: viewChanged.node}); @@ -114,12 +129,10 @@ export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU 32e9)); + // remaining validators are accessed with no modification + for (let i = numModified; i < vc; i++) { + state.validators.get(i); + } state.eth1Data = BeaconState.fields.eth1Data.toViewDU({ depositRoot: Buffer.alloc(32, 0x02), From 0b3ba85d59d6a000647cfff24d00b56de6c7e94c Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Mon, 26 Aug 2024 15:59:27 +0700 Subject: [PATCH 096/113] fix: separate batch hash and commit logic in listValidator ViewDU --- .../phase0/viewDU/listValidator.ts | 115 ++++++++++-------- 1 file changed, 62 insertions(+), 53 deletions(-) diff --git a/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts b/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts index 72fbfdb9..c9e0cb73 100644 --- a/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts +++ b/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts @@ -6,6 +6,7 @@ import {ListCompositeTreeViewDU} from "../../../../src/viewDU/listComposite"; import {ValidatorNodeStructType, ValidatorType, validatorToChunkBytes} from "../validator"; import {ByteViews} from "../../../../src"; import {ContainerNodeStructTreeViewDU} from "../../../../src/viewDU/containerNodeStruct"; +import {ValidatorIndex} from "../../primitive/types"; /** * hashtree has a MAX_SIZE of 1024 bytes = 32 chunks @@ -79,6 +80,8 @@ export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU a - b); - const endBatch = sortedIndicesChanged.length - (sortedIndicesChanged.length % PARALLEL_FACTOR); - // nodesChanged is sorted by index - const nodesChanged: {index: number; node: Node}[] = []; - // commit every 16 validators in batch - for (let i = 0; i < endBatch; i++) { - if (i % PARALLEL_FACTOR === 0) { - batchLevel3Bytes.fill(0); - batchLevel4Bytes.fill(0); - } - const indexInBatch = i % PARALLEL_FACTOR; - const viewIndex = sortedIndicesChanged[i]; - const viewChanged = viewsChanged.get(viewIndex); - if (viewChanged) { - validatorToChunkBytes(level3ByteViewsArr[indexInBatch], level4BytesArr[indexInBatch], viewChanged.value); - } - - if (indexInBatch === PARALLEL_FACTOR - 1) { - // hash level 4, this is populated to pubkeyRoots - digestNLevel(batchLevel4Bytes, 1); - for (let j = 0; j < PARALLEL_FACTOR; j++) { - level3ByteViewsArr[j].uint8Array.set(pubkeyRoots[j], 0); - } - // hash level 3, this is populated to validatorRoots - digestNLevel(batchLevel3Bytes, 3); - // commit all validators in this batch - for (let j = PARALLEL_FACTOR - 1; j >= 0; j--) { - const viewIndex = sortedIndicesChanged[i - j]; - const indexInBatch = (i - j) % PARALLEL_FACTOR; - const viewChanged = viewsChanged.get(viewIndex); - if (viewChanged) { - const branchNodeStruct = viewChanged.node; - byteArrayIntoHashObject(validatorRoots[indexInBatch], 0, branchNodeStruct); - nodesChanged.push({index: viewIndex, node: viewChanged.node}); - // Set new node in nodes array to ensure data represented in the tree and fast nodes access is equal - this.nodes[viewIndex] = viewChanged.node; - } - } - } - } - - // commit the remaining validators, we can do in batch too but don't want to create new Uint8Array views - // it's not much different to commit one by one - for (let i = endBatch; i < sortedIndicesChanged.length; i++) { - const viewIndex = sortedIndicesChanged[i]; - const viewChanged = viewsChanged.get(viewIndex); - if (viewChanged) { - // compute root for each validator - viewChanged.type.hashTreeRootInto(viewChanged.value, validatorRoot, 0); - byteArrayIntoHashObject(validatorRoot, 0, viewChanged.node); - nodesChanged.push({index: viewIndex, node: viewChanged.node}); - // Set new node in nodes array to ensure data represented in the tree and fast nodes access is equal - this.nodes[viewIndex] = viewChanged.node; - } + const nodesChanged: {index: ValidatorIndex; node: Node}[] = new Array<{index: ValidatorIndex; node: Node}>( + sortedIndicesChanged.length + ); + for (const [i, validatorIndex] of sortedIndicesChanged.entries()) { + nodesChanged[i] = {index: validatorIndex, node: this.nodes[validatorIndex]}; } + doBatchHashTreeRootValidators(sortedIndicesChanged, viewsChanged); // do the remaining commit step the same to parent (ArrayCompositeTreeViewDU) const indexes = nodesChanged.map((entry) => entry.index); @@ -168,3 +124,56 @@ export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU> +): void { + const endBatch = indices.length - (indices.length % PARALLEL_FACTOR); + + // commit every 16 validators in batch + for (let i = 0; i < endBatch; i++) { + if (i % PARALLEL_FACTOR === 0) { + batchLevel3Bytes.fill(0); + batchLevel4Bytes.fill(0); + } + const indexInBatch = i % PARALLEL_FACTOR; + const viewIndex = indices[i]; + const validator = validators.get(viewIndex); + if (validator) { + validatorToChunkBytes(level3ByteViewsArr[indexInBatch], level4BytesArr[indexInBatch], validator.value); + } + + if (indexInBatch === PARALLEL_FACTOR - 1) { + // hash level 4, this is populated to pubkeyRoots + digestNLevel(batchLevel4Bytes, 1); + for (let j = 0; j < PARALLEL_FACTOR; j++) { + level3ByteViewsArr[j].uint8Array.set(pubkeyRoots[j], 0); + } + // hash level 3, this is populated to validatorRoots + digestNLevel(batchLevel3Bytes, 3); + // commit all validators in this batch + for (let j = PARALLEL_FACTOR - 1; j >= 0; j--) { + const viewIndex = indices[i - j]; + const indexInBatch = (i - j) % PARALLEL_FACTOR; + const viewChanged = validators.get(viewIndex); + if (viewChanged) { + const branchNodeStruct = viewChanged.node; + byteArrayIntoHashObject(validatorRoots[indexInBatch], 0, branchNodeStruct); + } + } + } + } + + // commit the remaining validators, we can do in batch too but don't want to create new Uint8Array views + // it's not much different to commit one by one + for (let i = endBatch; i < indices.length; i++) { + const viewIndex = indices[i]; + const viewChanged = validators.get(viewIndex); + if (viewChanged) { + // compute root for each validator + viewChanged.type.hashTreeRootInto(viewChanged.value, validatorRoot, 0); + byteArrayIntoHashObject(validatorRoot, 0, viewChanged.node); + } + } +} From ed1b08a8d68f5745d2f448e83232a77c06094a53 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Mon, 26 Aug 2024 16:09:36 +0700 Subject: [PATCH 097/113] fix: do not throw error if commit ViewDU without tracking changes --- packages/ssz/src/viewDU/container.ts | 6 +----- packages/ssz/src/viewDU/listBasic.ts | 15 ++++----------- packages/ssz/src/viewDU/listComposite.ts | 13 +++---------- 3 files changed, 8 insertions(+), 26 deletions(-) diff --git a/packages/ssz/src/viewDU/container.ts b/packages/ssz/src/viewDU/container.ts index 130db3c7..96620b97 100644 --- a/packages/ssz/src/viewDU/container.ts +++ b/packages/ssz/src/viewDU/container.ts @@ -156,11 +156,7 @@ class ContainerTreeViewDU>> extends * Same method to `type/container.ts` that call ViewDU.serializeToBytes() of internal fields. */ serializeToBytes(output: ByteViews, offset: number): number { - // it's the responsibility of consumer to call commit() before calling this method - // if we do the commit() here, it'll lose all HashComputations that we want to batch - if (this.nodesChanged.size !== 0 || this.viewsChanged.size !== 0) { - throw Error(`Must commit changes before serializeToBytes(Uint8Array(${output.uint8Array.length}, ${offset})`); - } + this.commit(); let fixedIndex = offset; let variableIndex = offset + this.type.fixedEnd; diff --git a/packages/ssz/src/viewDU/listBasic.ts b/packages/ssz/src/viewDU/listBasic.ts index d26a5c6f..549168a0 100644 --- a/packages/ssz/src/viewDU/listBasic.ts +++ b/packages/ssz/src/viewDU/listBasic.ts @@ -46,14 +46,11 @@ export class ListBasicTreeViewDU> extends */ sliceTo(index: number): this { if (index < 0) { - throw Error(`Does not support sliceTo() with negative index ${index}`); + throw new Error(`Does not support sliceTo() with negative index ${index}`); } - // it's the responsibility of consumer to call commit() before calling this method - // if we do the commit() here, it'll lose all HashComputations that we want to batch - if (this.nodesChanged.size > 0) { - throw Error(`Must commit changes before sliceTo(${index})`); - } + // Commit before getting rootNode to ensure all pending data is in the rootNode + this.commit(); // All nodes beyond length are already zero if (index >= this._length - 1) { @@ -87,11 +84,7 @@ export class ListBasicTreeViewDU> extends * Same method to `type/listBasic.ts` leveraging cached nodes. */ serializeToBytes(output: ByteViews, offset: number): number { - // it's the responsibility of consumer to call commit() before calling this method - // if we do the commit() here, it'll lose all HashComputations that we want to batch - if (this.nodesChanged.size > 0) { - throw Error(`Must commit changes before serializeToBytes(Uint8Array(${output.uint8Array.length}), ${offset})`); - } + this.commit(); const {nodes, nodesPopulated} = this.cache; const chunksNode = this.type.tree_getChunksNode(this._rootNode); return tree_serializeToBytesArrayBasic( diff --git a/packages/ssz/src/viewDU/listComposite.ts b/packages/ssz/src/viewDU/listComposite.ts index 09993624..1eff31d5 100644 --- a/packages/ssz/src/viewDU/listComposite.ts +++ b/packages/ssz/src/viewDU/listComposite.ts @@ -44,11 +44,8 @@ export class ListCompositeTreeViewDU< * Note: Using index = -1, returns an empty list of length 0. */ sliceTo(index: number): this { - // it's the responsibility of consumer to call commit() before calling this method - // if we do the commit() here, it'll lose all HashComputations that we want to batch - if (this.viewsChanged.size > 0) { - throw Error(`Must commit changes before sliceTo(${index})`); - } + // Commit before getting rootNode to ensure all pending data is in the rootNode + this.commit(); const rootNode = this._rootNode; const length = this._length; @@ -114,11 +111,7 @@ export class ListCompositeTreeViewDU< * Same method to `type/listComposite.ts` leveraging cached nodes. */ serializeToBytes(output: ByteViews, offset: number): number { - // it's the responsibility of consumer to call commit() before calling this method - // if we do the commit() here, it'll lose all HashComputations that we want to batch - if (this.viewsChanged.size > 0) { - throw Error(`Must commit changes before serializeToBytes(Uint8Array(${output.uint8Array.length}, ${offset})`); - } + this.commit(); this.populateAllNodes(); const chunksNode = this.type.tree_getChunksNode(this._rootNode); return tree_serializeToBytesArrayComposite( From 1b6967688f6001b3c966fd9daf8bed40be1ff4bd Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Wed, 11 Sep 2024 14:03:28 +0700 Subject: [PATCH 098/113] feat: implement forEach() and forEachValue() for ArrayCompositeType --- packages/ssz/src/viewDU/arrayComposite.ts | 23 ++++---------- .../test/perf/byType/listComposite.test.ts | 30 +------------------ packages/ssz/test/perf/iterate.test.ts | 20 ++----------- .../unit/byType/listComposite/tree.test.ts | 11 ------- 4 files changed, 9 insertions(+), 75 deletions(-) diff --git a/packages/ssz/src/viewDU/arrayComposite.ts b/packages/ssz/src/viewDU/arrayComposite.ts index 6856c63a..37868bb6 100644 --- a/packages/ssz/src/viewDU/arrayComposite.ts +++ b/packages/ssz/src/viewDU/arrayComposite.ts @@ -10,7 +10,6 @@ import {ValueOf} from "../type/abstract"; import {CompositeType, CompositeView, CompositeViewDU} from "../type/composite"; import {ArrayCompositeType} from "../view/arrayComposite"; import {TreeViewDU} from "./abstract"; -import {ListIterator} from "../interface"; export type ArrayCompositeTreeViewDUCache = { nodes: Node[]; @@ -162,18 +161,13 @@ export class ArrayCompositeTreeViewDU< } /** - * Similar to getAllReadonly but support ListIterator interface. - * Use ReusableListIterator to reuse over multiple calls. + * Apply `fn` to each ViewDU in the array */ - getAllReadonlyIter(views?: ListIterator>): ListIterator> { + forEach(fn: (viewDU: CompositeViewDU, index: number) => void): void { this.populateAllNodes(); - - views = views ?? new Array>(); for (let i = 0; i < this._length; i++) { - const view = this.type.elementType.getViewDU(this.nodes[i], this.caches[i]); - views.push(view); + fn(this.type.elementType.getViewDU(this.nodes[i], this.caches[i]), i); } - return views; } /** @@ -193,18 +187,13 @@ export class ArrayCompositeTreeViewDU< } /** - * Similar to getAllReadonlyValues but support ListIterator interface. - * Use ReusableListIterator to reuse over multiple calls. + * Apply `fn` to each value in the array */ - getAllReadonlyValuesIter(values?: ListIterator>): ListIterator> { + forEachValue(fn: (value: ValueOf, index: number) => void): void { this.populateAllNodes(); - - values = values ?? new Array>(); for (let i = 0; i < this._length; i++) { - const value = this.type.elementType.tree_toValue(this.nodes[i]); - values.push(value); + fn(this.type.elementType.tree_toValue(this.nodes[i]), i); } - return values; } /** diff --git a/packages/ssz/test/perf/byType/listComposite.test.ts b/packages/ssz/test/perf/byType/listComposite.test.ts index 32762cfb..cef8ee6f 100644 --- a/packages/ssz/test/perf/byType/listComposite.test.ts +++ b/packages/ssz/test/perf/byType/listComposite.test.ts @@ -1,13 +1,5 @@ import {itBench} from "@dapplion/benchmark"; -import { - CompositeViewDU, - ContainerNodeStructType, - ContainerType, - ListCompositeType, - ReusableListIterator, - UintNumberType, - ValueOf, -} from "../../../src"; +import {ContainerNodeStructType, ContainerType, ListCompositeType, UintNumberType} from "../../../src"; const byteType = new UintNumberType(1); @@ -41,16 +33,6 @@ describe("ListCompositeType types", () => { } }); - const viewDUs = new ReusableListIterator>(); - itBench(`${listType.typeName} len ${len} ViewDU.getAllReadonlyIter() + iterate`, () => { - viewDUs.reset(); - viewDU.getAllReadonlyIter(viewDUs); - viewDUs.clean(); - for (const viewDU of viewDUs) { - viewDU; - } - }); - itBench(`${listType.typeName} len ${len} ViewDU.getAllReadonlyValues() + iterate`, () => { const values = viewDU.getAllReadonlyValues(); for (let i = 0; i < len; i++) { @@ -58,16 +40,6 @@ describe("ListCompositeType types", () => { } }); - const values = new ReusableListIterator>(); - itBench(`${listType.typeName} len ${len} ViewDU.getAllReadonlyValuesIter() + iterate`, () => { - values.clean(); - viewDU.getAllReadonlyValuesIter(values); - values.reset(); - for (const value of values) { - value; - } - }); - itBench(`${listType.typeName} len ${len} ViewDU.get(i)`, () => { for (let i = 0; i < len; i++) { viewDU.get(i); diff --git a/packages/ssz/test/perf/iterate.test.ts b/packages/ssz/test/perf/iterate.test.ts index f585dffd..92b923e3 100644 --- a/packages/ssz/test/perf/iterate.test.ts +++ b/packages/ssz/test/perf/iterate.test.ts @@ -1,6 +1,6 @@ import {itBench, setBenchOpts} from "@dapplion/benchmark"; -import {CompositeViewDU, ListBasicType, ReusableListIterator, UintNumberType} from "../../src"; -import {Validators, Validator} from "../lodestarTypes/phase0/sszTypes"; +import {ListBasicType, UintNumberType} from "../../src"; +import {Validators} from "../lodestarTypes/phase0/sszTypes"; describe("iterate", () => { setBenchOpts({noThreshold: true}); @@ -53,22 +53,6 @@ describe("readonly values - iterator vs array", () => { validatorsArray[i]; } }); - - const viewDUs = new ReusableListIterator>(); - itBench("compositeListValue.getAllReadonlyIter()", () => { - viewDUs.reset(); - validators.getAllReadonlyIter(viewDUs); - viewDUs.clean(); - }); - - itBench("compositeListValue.getAllReadonlyIter() + loop all", () => { - viewDUs.reset(); - validators.getAllReadonlyIter(viewDUs); - viewDUs.clean(); - for (const viewDU of viewDUs) { - viewDU; - } - }); }); // eslint-disable-next-line @typescript-eslint/explicit-function-return-type diff --git a/packages/ssz/test/unit/byType/listComposite/tree.test.ts b/packages/ssz/test/unit/byType/listComposite/tree.test.ts index 9f5e5f66..f1de130e 100644 --- a/packages/ssz/test/unit/byType/listComposite/tree.test.ts +++ b/packages/ssz/test/unit/byType/listComposite/tree.test.ts @@ -4,7 +4,6 @@ import { ContainerNodeStructType, ContainerType, ListCompositeType, - ReusableListIterator, toHexString, UintNumberType, ValueOf, @@ -121,21 +120,11 @@ describe("ListCompositeType tree reads", () => { // Only for viewDU if (view instanceof ArrayCompositeTreeViewDU) { expect(() => view.getAllReadonly()).to.throw("Must commit changes before reading all nodes"); - expect(() => view.getAllReadonlyIter()).to.throw("Must commit changes before reading all nodes"); view.commit(); } expect(view.getAllReadonly().map(elementToValue)).deep.equals(values, "Wrong getAllReadonly()"); - (view.getAllReadonlyIter() as CompositeView[]).map(elementToValue); expect(view.getAllReadonlyValues()).deep.equals(values, "Wrong getAllReadonlyValues()"); - const result = new ReusableListIterator>(); - view.getAllReadonlyValuesIter(result); - expect(result.toArray()).deep.equals(values, "Wrong getAllReadonlyValues()"); - // reuse ReusableListIterator - result.reset(); - view.getAllReadonlyValuesIter(result); - result.clean(); - expect(result.toArray()).deep.equals(values, "Wrong getAllReadonlyValues()"); // Only for viewDU if (view instanceof ArrayCompositeTreeViewDU) { From 91c735c49e52fcd19a9bad50141bcab914c2a5af Mon Sep 17 00:00:00 2001 From: twoeths Date: Fri, 13 Sep 2024 14:09:38 +0700 Subject: [PATCH 099/113] feat: ListUintNum64Type create ViewDU from existing tree (#402) * feat: create a ListUintNum64 viewdu from another viewdu * fix: publish ListBasicTreeViewDU * chore: more code comments --- .../persistent-merkle-tree/src/packedNode.ts | 11 +- packages/ssz/src/index.ts | 1 + packages/ssz/src/type/listUintNum64.ts | 108 ++++++++++++++++-- .../test/perf/byType/listUintNum64.test.ts | 25 ++++ .../byType/listBasic/listUintNum64.test.ts | 21 ++++ 5 files changed, 157 insertions(+), 9 deletions(-) create mode 100644 packages/ssz/test/perf/byType/listUintNum64.test.ts create mode 100644 packages/ssz/test/unit/byType/listBasic/listUintNum64.test.ts diff --git a/packages/persistent-merkle-tree/src/packedNode.ts b/packages/persistent-merkle-tree/src/packedNode.ts index 3c6413b8..459f6444 100644 --- a/packages/persistent-merkle-tree/src/packedNode.ts +++ b/packages/persistent-merkle-tree/src/packedNode.ts @@ -18,9 +18,16 @@ export function packedRootsBytesToNode(depth: number, dataView: DataView, start: * * h0 h1 h2 h3 h4 h5 h6 h7 * |------|------|------|------|------|------|------|------| + * + * @param values list of uint64 numbers + * @param leafNodes optional list of LeafNodes to reuse */ -export function packedUintNum64sToLeafNodes(values: number[]): LeafNode[] { - const leafNodes = new Array(Math.ceil(values.length / 4)); +export function packedUintNum64sToLeafNodes(values: number[], leafNodes?: LeafNode[]): LeafNode[] { + const nodeCount = Math.ceil(values.length / 4); + if (leafNodes && leafNodes.length !== nodeCount) { + throw new Error(`Invalid leafNode length: ${leafNodes.length} !== ${nodeCount}`); + } + leafNodes = leafNodes ?? new Array(Math.ceil(values.length / 4)); for (let i = 0; i < values.length; i++) { const nodeIndex = Math.floor(i / 4); const leafNode = leafNodes[nodeIndex] ?? new LeafNode(0, 0, 0, 0, 0, 0, 0, 0); diff --git a/packages/ssz/src/index.ts b/packages/ssz/src/index.ts index f2379e17..8c61fc8f 100644 --- a/packages/ssz/src/index.ts +++ b/packages/ssz/src/index.ts @@ -29,6 +29,7 @@ export {TreeView} from "./view/abstract"; export {ValueOfFields, ContainerTypeGeneric} from "./view/container"; export {TreeViewDU} from "./viewDU/abstract"; export {ListCompositeTreeViewDU} from "./viewDU/listComposite"; +export {ListBasicTreeViewDU} from "./viewDU/listBasic"; export {ArrayCompositeTreeViewDUCache} from "./viewDU/arrayComposite"; export {ContainerNodeStructTreeViewDU} from "./viewDU/containerNodeStruct"; diff --git a/packages/ssz/src/type/listUintNum64.ts b/packages/ssz/src/type/listUintNum64.ts index 663e637d..4ee4826b 100644 --- a/packages/ssz/src/type/listUintNum64.ts +++ b/packages/ssz/src/type/listUintNum64.ts @@ -1,24 +1,53 @@ -import {LeafNode, Node, packedUintNum64sToLeafNodes, subtreeFillToContents} from "@chainsafe/persistent-merkle-tree"; +import { + HashComputationGroup, + HashComputationLevel, + LeafNode, + Node, + executeHashComputations, + getNodesAtDepth, + levelAtIndex, + packedUintNum64sToLeafNodes, + setNodesAtDepth, + subtreeFillToContents, + zeroNode, +} from "@chainsafe/persistent-merkle-tree"; import {ListBasicTreeViewDU} from "../viewDU/listBasic"; import {ListBasicOpts, ListBasicType} from "./listBasic"; import {UintNumberType} from "./uint"; -import {addLengthNode} from "./arrayBasic"; +import {addLengthNode, getLengthFromRootNode} from "./arrayBasic"; /** * Specific implementation of ListBasicType for UintNumberType with some optimizations. */ export class ListUintNum64Type extends ListBasicType { + private hcGroup: HashComputationGroup | undefined; constructor(limit: number, opts?: ListBasicOpts) { super(new UintNumberType(8), limit, opts); } /** * Return a ListBasicTreeViewDU with nodes populated + * @param unusedViewDU optional, if provided we'll create ViewDU using the provided rootNode. Need to rehash the whole + * tree in this case to make it clean for consumers. */ - toViewDU(value: number[]): ListBasicTreeViewDU { + toViewDU(value: number[], unusedViewDU?: ListBasicTreeViewDU): ListBasicTreeViewDU { // no need to serialize and deserialize like in the abstract class - const {treeNode, leafNodes} = this.packedUintNum64sToNode(value); + const {treeNode, leafNodes} = this.packedUintNum64sToNode(value, unusedViewDU?.node); + + if (unusedViewDU) { + const hcGroup = this.getHcGroup(); + hcGroup.reset(); + forceGetHashComputations(treeNode, this.chunkDepth + 1, 0, hcGroup.byLevel); + hcGroup.clean(); + + treeNode.h0 = null as unknown as number; + executeHashComputations(hcGroup.byLevel); + // This makes sure the root node is computed by batch + if (treeNode.h0 === null) { + throw Error("Root is not computed by batch"); + } + } // cache leaf nodes in the ViewDU return this.getViewDU(treeNode, { nodes: leafNodes, @@ -29,21 +58,86 @@ export class ListUintNum64Type extends ListBasicType { /** * No need to serialize and deserialize like in the abstract class + * This should be conformed to parent's signature so cannot provide an `unusedViewDU` parameter here */ value_toTree(value: number[]): Node { const {treeNode} = this.packedUintNum64sToNode(value); return treeNode; } - private packedUintNum64sToNode(value: number[]): {treeNode: Node; leafNodes: LeafNode[]} { + private packedUintNum64sToNode(value: number[], unusedRootNode?: Node): {treeNode: Node; leafNodes: LeafNode[]} { if (value.length > this.limit) { throw new Error(`Exceeds limit: ${value.length} > ${this.limit}`); } + if (unusedRootNode) { + // create new tree from unusedRootNode + const oldLength = getLengthFromRootNode(unusedRootNode); + if (oldLength > value.length) { + throw new Error(`Cannot decrease length: ${oldLength} > ${value.length}`); + } + + const oldNodeCount = Math.ceil(oldLength / 4); + const oldChunksNode = unusedRootNode.left; + const oldLeafNodes = getNodesAtDepth(oldChunksNode, this.chunkDepth, 0, oldNodeCount) as LeafNode[]; + if (oldLeafNodes.length !== oldNodeCount) { + throw new Error(`oldLeafNodes.length ${oldLeafNodes.length} !== oldNodeCount ${oldNodeCount}`); + } + + const newNodeCount = Math.ceil(value.length / 4); + const count = newNodeCount - oldNodeCount; + const newLeafNodes = Array.from({length: count}, () => new LeafNode(0, 0, 0, 0, 0, 0, 0, 0)); + const leafNodes = [...oldLeafNodes, ...newLeafNodes]; + packedUintNum64sToLeafNodes(value, leafNodes); + + // middle nodes are not changed so consumer must recompute parent hashes + const newChunksNode = setNodesAtDepth( + oldChunksNode, + this.chunkDepth, + Array.from({length: count}, (_, i) => oldNodeCount + i), + newLeafNodes + ); + const treeNode = addLengthNode(newChunksNode, value.length); + + return {treeNode, leafNodes}; + } + + // create new tree from scratch const leafNodes = packedUintNum64sToLeafNodes(value); // subtreeFillToContents mutates the leafNodes array - const rootNode = subtreeFillToContents([...leafNodes], this.chunkDepth); - const treeNode = addLengthNode(rootNode, value.length); + const chunksNode = subtreeFillToContents([...leafNodes], this.chunkDepth); + const treeNode = addLengthNode(chunksNode, value.length); return {treeNode, leafNodes}; } + + private getHcGroup(): HashComputationGroup { + if (!this.hcGroup) { + this.hcGroup = new HashComputationGroup(); + } + return this.hcGroup; + } +} + +/** + * Consider moving this to persistent-merkle-tree. + * For now this is the only flow to force get hash computations. + */ +function forceGetHashComputations( + node: Node, + nodeDepth: number, + index: number, + hcByLevel: HashComputationLevel[] +): void { + // very important: never mutate zeroNode + if (node === zeroNode(nodeDepth) || node.isLeaf()) { + return; + } + + // if (node.h0 === null) { + const hashComputations = levelAtIndex(hcByLevel, index); + const {left, right} = node; + hashComputations.push(left, right, node); + // leaf nodes should have h0 to stop the recursion + forceGetHashComputations(left, nodeDepth - 1, index + 1, hcByLevel); + forceGetHashComputations(right, nodeDepth - 1, index + 1, hcByLevel); } diff --git a/packages/ssz/test/perf/byType/listUintNum64.test.ts b/packages/ssz/test/perf/byType/listUintNum64.test.ts new file mode 100644 index 00000000..19e8b7f7 --- /dev/null +++ b/packages/ssz/test/perf/byType/listUintNum64.test.ts @@ -0,0 +1,25 @@ +import {itBench} from "@dapplion/benchmark"; +import {ListUintNum64Type} from "../../../src/type/listUintNum64"; + +describe("ListUintNum64Type.toViewDU", () => { + const balancesType = new ListUintNum64Type(1099511627776); + const seedLength = 1_900_000; + const seedViewDU = balancesType.toViewDU(Array.from({length: seedLength}, () => 0)); + + const vc = 2_000_000; + const value = Array.from({length: vc}, (_, i) => 32 * 1e9 + i); + + itBench({ + id: `ListUintNum64Type.toViewDU ${seedLength} -> ${vc}`, + fn: () => { + balancesType.toViewDU(value, seedViewDU); + }, + }); + + itBench({ + id: "ListUintNum64Type.toViewDU()", + fn: () => { + balancesType.toViewDU(value); + }, + }); +}); diff --git a/packages/ssz/test/unit/byType/listBasic/listUintNum64.test.ts b/packages/ssz/test/unit/byType/listBasic/listUintNum64.test.ts new file mode 100644 index 00000000..ae867a5e --- /dev/null +++ b/packages/ssz/test/unit/byType/listBasic/listUintNum64.test.ts @@ -0,0 +1,21 @@ +import {expect} from "chai"; +import {ListUintNum64Type} from "../../../../src/type/listUintNum64"; + +describe("ListUintNum64Type.toViewDU", () => { + const type = new ListUintNum64Type(1024); + // seed ViewDU contains 16 leaf nodes = 64 uint64 + // but we test all cases + for (const seedLength of [61, 62, 63, 64]) { + const value = Array.from({length: seedLength}, (_, i) => i); + const unusedViewDU = type.toViewDU(value); + + it(`should create ViewDU from a seedViewDU with ${seedLength} uint64`, () => { + for (let i = seedLength; i < 1024; i++) { + const newValue = Array.from({length: i + 1}, (_, j) => j); + const expectedRoot = type.toViewDU(newValue).hashTreeRoot(); + const viewDUFromExistingTree = type.toViewDU(newValue, unusedViewDU); + expect(viewDUFromExistingTree.hashTreeRoot()).to.deep.equal(expectedRoot); + } + }); + } +}); From 806c4de13871f39eed9d08c39c9cd99dadf583b7 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Fri, 11 Oct 2024 14:13:03 +0700 Subject: [PATCH 100/113] fix: implement getChunkBytes() for StableContainer & Profile --- packages/ssz/src/type/composite.ts | 3 +- packages/ssz/src/type/profile.ts | 34 ++++++++----- packages/ssz/src/type/stableContainer.ts | 63 +++++++++++++----------- packages/ssz/src/util/merkleize.ts | 19 +------ packages/ssz/src/viewDU/container.ts | 6 +-- 5 files changed, 62 insertions(+), 63 deletions(-) diff --git a/packages/ssz/src/type/composite.ts b/packages/ssz/src/type/composite.ts index b35d80f7..eb370c7c 100644 --- a/packages/ssz/src/type/composite.ts +++ b/packages/ssz/src/type/composite.ts @@ -239,7 +239,8 @@ export abstract class CompositeType extends Type { const merkleBytes = this.getChunkBytes(value); merkleizeInto(merkleBytes, this.maxChunkCount, output, offset); if (this.cachePermanentRootStruct) { - (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = output.slice(offset, offset + 32); + const cachedRoot = Uint8Array.prototype.slice.call(output, offset, offset + 32); + (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = cachedRoot; } } diff --git a/packages/ssz/src/type/profile.ts b/packages/ssz/src/type/profile.ts index f9469fe0..335154fe 100644 --- a/packages/ssz/src/type/profile.ts +++ b/packages/ssz/src/type/profile.ts @@ -6,6 +6,7 @@ import { Gindex, toGindex, concatGindices, + merkleizeInto, getNode, BranchNode, zeroHash, @@ -33,6 +34,7 @@ import {Case} from "../util/strings"; import {BitArray} from "../value/bitArray"; import {mixInActiveFields, setActiveFields} from "./stableContainer"; import {NonOptionalFields, isOptionalType, toNonOptionalType} from "./optional"; +import {Uint8} from "../../test/lodestarTypes/sszTypes"; /* eslint-disable @typescript-eslint/member-ordering */ type BytesRange = {start: number; end: number}; @@ -154,6 +156,9 @@ export class ProfileType>> extends C // Refactor this constructor to allow customization without pollutin the options this.TreeView = opts?.getProfileTreeViewClass?.(this) ?? getProfileTreeViewClass(this); this.TreeViewDU = opts?.getProfileTreeViewDUClass?.(this) ?? getProfileTreeViewDUClass(this); + const fieldBytes = this.activeFields.bitLen * 32; + const chunkBytes = Math.ceil(fieldBytes / 64) * 64; + this.chunkBytesBuffer = new Uint8Array(chunkBytes); } static named>>( @@ -361,37 +366,40 @@ export class ProfileType>> extends C } // Merkleization - hashTreeRoot(value: ValueOfFields): Uint8Array { + // hashTreeRoot is the same to parent as it call hashTreeRootInto() + hashTreeRootInto(value: ValueOfFields, output: Uint8Array, offset: number): void { // Return cached mutable root if any if (this.cachePermanentRootStruct) { const cachedRoot = (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot]; if (cachedRoot) { - return cachedRoot; + output.set(cachedRoot, offset); + return; } } - const root = mixInActiveFields(super.hashTreeRoot(value), this.activeFields); + const merkleBytes = this.getChunkBytes(value); + const root = new Uint8Array(32); + merkleizeInto(merkleBytes, this.maxChunkCount, root, 0); + mixInActiveFields(root, this.activeFields, root, 0); + output.set(root, offset); if (this.cachePermanentRootStruct) { (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = root; } - - return root; } - protected getRoots(struct: ValueOfFields): Uint8Array[] { - const roots = new Array(this.activeFields.bitLen).fill(zeroHash(0)); - - // already asserted that # of active fields in bitvector === # of fields + protected getChunkBytes(struct: ValueOfFields): Uint8Array { + this.chunkBytesBuffer.fill(0); for (let i = 0; i < this.fieldsEntries.length; i++) { const {fieldName, fieldType, chunkIndex, optional} = this.fieldsEntries[i]; if (optional && struct[fieldName] == null) { - continue; + this.chunkBytesBuffer.set(zeroHash(0), chunkIndex * 32); + } else { + fieldType.hashTreeRootInto(struct[fieldName], this.chunkBytesBuffer, chunkIndex * 32); } - roots[chunkIndex] = fieldType.hashTreeRoot(struct[fieldName]); } - - return roots; + // remaining bytes are zeroed as we never write them + return this.chunkBytesBuffer; } // Proofs diff --git a/packages/ssz/src/type/stableContainer.ts b/packages/ssz/src/type/stableContainer.ts index bf8b94fa..50557cba 100644 --- a/packages/ssz/src/type/stableContainer.ts +++ b/packages/ssz/src/type/stableContainer.ts @@ -11,19 +11,13 @@ import { getNode, zeroNode, zeroHash, + merkleizeInto, countToDepth, getNodeH, setNode, setNodeWithFn, } from "@chainsafe/persistent-merkle-tree"; -import { - ValueWithCachedPermanentRoot, - hash64, - maxChunksToDepth, - merkleize, - splitIntoRootChunks, - symbolCachedPermanentRoot, -} from "../util/merkleize"; +import {ValueWithCachedPermanentRoot, maxChunksToDepth, symbolCachedPermanentRoot} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; import {JsonPath, Type, ValueOf} from "./abstract"; @@ -153,6 +147,9 @@ export class StableContainerType>> e // Refactor this constructor to allow customization without pollutin the options this.TreeView = opts?.getContainerTreeViewClass?.(this) ?? getContainerTreeViewClass(this); this.TreeViewDU = opts?.getContainerTreeViewDUClass?.(this) ?? getContainerTreeViewDUClass(this); + const fieldBytes = this.fieldsEntries.length * 32; + const chunkBytes = Math.ceil(fieldBytes / 64) * 64; + this.chunkBytesBuffer = new Uint8Array(chunkBytes); } static named>>( @@ -341,43 +338,45 @@ export class StableContainerType>> e } // Merkleization - hashTreeRoot(value: ValueOfFields): Uint8Array { + // hashTreeRoot is the same to parent as it call hashTreeRootInto() + hashTreeRootInto(value: ValueOfFields, output: Uint8Array, offset: number): void { // Return cached mutable root if any if (this.cachePermanentRootStruct) { const cachedRoot = (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot]; if (cachedRoot) { - return cachedRoot; + output.set(cachedRoot, offset); + return; } } + const merkleBytes = this.getChunkBytes(value); + const root = new Uint8Array(32); + merkleizeInto(merkleBytes, this.maxChunkCount, root, 0); // compute active field bitvector const activeFields = BitArray.fromBoolArray([ ...this.fieldsEntries.map(({fieldName}) => value[fieldName] != null), ...this.padActiveFields, ]); - const root = mixInActiveFields(super.hashTreeRoot(value), activeFields); + mixInActiveFields(root, activeFields, root, 0); + output.set(root, offset); if (this.cachePermanentRootStruct) { (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = root; } - - return root; } - protected getRoots(struct: ValueOfFields): Uint8Array[] { - const roots = new Array(this.fieldsEntries.length); - + protected getChunkBytes(struct: ValueOfFields): Uint8Array { + this.chunkBytesBuffer.fill(0); for (let i = 0; i < this.fieldsEntries.length; i++) { const {fieldName, fieldType, optional} = this.fieldsEntries[i]; if (optional && struct[fieldName] == null) { - roots[i] = zeroHash(0); - continue; + this.chunkBytesBuffer.set(zeroHash(0), i * 32); + } else { + fieldType.hashTreeRootInto(struct[fieldName], this.chunkBytesBuffer, i * 32); } - - roots[i] = fieldType.hashTreeRoot(struct[fieldName]); } - return roots; + return this.chunkBytesBuffer; } // Proofs @@ -815,15 +814,23 @@ export function setActiveField(rootNode: Node, bitLen: number, fieldIndex: numbe return new BranchNode(rootNode.left, newActiveFieldsNode); } -export function mixInActiveFields(root: Uint8Array, activeFields: BitArray): Uint8Array { +// This is a global buffer to avoid creating a new one for each call to getChunkBytes +const mixInActiveFieldsChunkBytes = new Uint8Array(64); +const activeFieldsSingleChunk = mixInActiveFieldsChunkBytes.subarray(32); + +export function mixInActiveFields(root: Uint8Array, activeFields: BitArray, output: Uint8Array, offset: number): void { // fast path for depth 1, the bitvector fits in one chunk + mixInActiveFieldsChunkBytes.set(root, 0); if (activeFields.bitLen <= 256) { - const activeFieldsChunk = new Uint8Array(32); - activeFieldsChunk.set(activeFields.uint8Array); - return hash64(root, activeFieldsChunk); + activeFieldsSingleChunk.fill(0); + activeFieldsSingleChunk.set(activeFields.uint8Array); + // 1 chunk for root, 1 chunk for activeFields + merkleizeInto(mixInActiveFieldsChunkBytes, 2, output, offset); + return; } - const activeFieldsChunks = splitIntoRootChunks(activeFields.uint8Array); - const activeFieldsRoot = merkleize(activeFieldsChunks, activeFieldsChunks.length); - return hash64(root, activeFieldsRoot); + const chunkCount = Math.ceil(activeFields.uint8Array.length / 32); + merkleizeInto(activeFields.uint8Array, chunkCount, activeFieldsSingleChunk, 0); + // 1 chunk for root, 1 chunk for activeFields + merkleizeInto(mixInActiveFieldsChunkBytes, 2, output, offset); } diff --git a/packages/ssz/src/util/merkleize.ts b/packages/ssz/src/util/merkleize.ts index 073dea5d..11301d92 100644 --- a/packages/ssz/src/util/merkleize.ts +++ b/packages/ssz/src/util/merkleize.ts @@ -1,5 +1,4 @@ -import {hasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/index"; -import {zeroHash} from "@chainsafe/persistent-merkle-tree"; +import {hasher, zeroHash} from "@chainsafe/persistent-merkle-tree"; /** Dedicated property to cache hashTreeRoot of immutable CompositeType values */ export const symbolCachedPermanentRoot = Symbol("ssz_cached_permanent_root"); @@ -43,22 +42,6 @@ export function merkleize(chunks: Uint8Array[], padFor: number): Uint8Array { return chunks[0]; } -/** - * Split a long Uint8Array into Uint8Array of exactly 32 bytes - */ -export function splitIntoRootChunks(longChunk: Uint8Array): Uint8Array[] { - const chunkCount = Math.ceil(longChunk.length / 32); - const chunks = new Array(chunkCount); - - for (let i = 0; i < chunkCount; i++) { - const chunk = new Uint8Array(32); - chunk.set(longChunk.slice(i * 32, (i + 1) * 32)); - chunks[i] = chunk; - } - - return chunks; -} - /** @ignore */ export function mixInLength(root: Uint8Array, length: number): Uint8Array { const lengthBuf = Buffer.alloc(32); diff --git a/packages/ssz/src/viewDU/container.ts b/packages/ssz/src/viewDU/container.ts index b112c4de..68b0a00b 100644 --- a/packages/ssz/src/viewDU/container.ts +++ b/packages/ssz/src/viewDU/container.ts @@ -101,12 +101,12 @@ export class BasicContainerTreeViewDU Date: Fri, 11 Oct 2024 14:20:07 +0700 Subject: [PATCH 101/113] fix: lint --- packages/ssz/src/type/profile.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/ssz/src/type/profile.ts b/packages/ssz/src/type/profile.ts index 335154fe..756a52ff 100644 --- a/packages/ssz/src/type/profile.ts +++ b/packages/ssz/src/type/profile.ts @@ -34,7 +34,6 @@ import {Case} from "../util/strings"; import {BitArray} from "../value/bitArray"; import {mixInActiveFields, setActiveFields} from "./stableContainer"; import {NonOptionalFields, isOptionalType, toNonOptionalType} from "./optional"; -import {Uint8} from "../../test/lodestarTypes/sszTypes"; /* eslint-disable @typescript-eslint/member-ordering */ type BytesRange = {start: number; end: number}; From 024538d4e343cb43b970bc776a3ff87ded4fc533 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Fri, 11 Oct 2024 14:59:54 +0700 Subject: [PATCH 102/113] feat: use allocUnsafe() for hashTreeRoot() --- packages/as-sha256/src/index.ts | 1 + packages/ssz/src/type/basic.ts | 1 + packages/ssz/src/type/bitList.ts | 3 ++- packages/ssz/src/type/byteList.ts | 4 ++-- packages/ssz/src/type/composite.ts | 3 ++- packages/ssz/src/type/containerNodeStruct.ts | 3 --- packages/ssz/src/type/listBasic.ts | 3 ++- packages/ssz/src/type/listComposite.ts | 3 ++- packages/ssz/src/type/optional.ts | 3 ++- packages/ssz/src/type/profile.ts | 3 ++- packages/ssz/src/type/stableContainer.ts | 15 ++++++++++----- packages/ssz/src/type/union.ts | 3 ++- 12 files changed, 28 insertions(+), 17 deletions(-) diff --git a/packages/as-sha256/src/index.ts b/packages/as-sha256/src/index.ts index 11f44cc3..46d60789 100644 --- a/packages/as-sha256/src/index.ts +++ b/packages/as-sha256/src/index.ts @@ -3,6 +3,7 @@ import {newInstance} from "./wasm"; import {HashObject, byteArrayIntoHashObject, byteArrayToHashObject, hashObjectToByteArray} from "./hashObject"; import SHA256 from "./sha256"; export {HashObject, byteArrayToHashObject, hashObjectToByteArray, byteArrayIntoHashObject, SHA256}; +export {allocUnsafe}; const ctx = newInstance(); const wasmInputValue = ctx.input.value; diff --git a/packages/ssz/src/type/basic.ts b/packages/ssz/src/type/basic.ts index add1d9f1..920c6d97 100644 --- a/packages/ssz/src/type/basic.ts +++ b/packages/ssz/src/type/basic.ts @@ -30,6 +30,7 @@ export abstract class BasicType extends Type { } hashTreeRoot(value: V): Uint8Array { + // cannot use allocUnsafe() here because hashTreeRootInto() may not fill the whole 32 bytes const root = new Uint8Array(32); this.hashTreeRootInto(value, root, 0); return root; diff --git a/packages/ssz/src/type/bitList.ts b/packages/ssz/src/type/bitList.ts index 8f2bb1b1..ba1c419a 100644 --- a/packages/ssz/src/type/bitList.ts +++ b/packages/ssz/src/type/bitList.ts @@ -1,3 +1,4 @@ +import {allocUnsafe} from "@chainsafe/as-sha256"; import { getNodesAtDepth, merkleizeInto, @@ -113,7 +114,7 @@ export class BitListType extends BitArrayType { // Merkleization: inherited from BitArrayType hashTreeRoot(value: BitArray): Uint8Array { - const root = new Uint8Array(32); + const root = allocUnsafe(32); this.hashTreeRootInto(value, root, 0); return root; } diff --git a/packages/ssz/src/type/byteList.ts b/packages/ssz/src/type/byteList.ts index a71a9c81..53c46d39 100644 --- a/packages/ssz/src/type/byteList.ts +++ b/packages/ssz/src/type/byteList.ts @@ -1,3 +1,4 @@ +import {allocUnsafe} from "@chainsafe/as-sha256"; import { getNodesAtDepth, Node, @@ -11,7 +12,6 @@ import {namedClass} from "../util/named"; import {addLengthNode, getChunksNodeFromRootNode, getLengthFromRootNode} from "./arrayBasic"; import {ByteViews} from "./composite"; import {ByteArrayType, ByteArray} from "./byteArray"; - /* eslint-disable @typescript-eslint/member-ordering */ export interface ByteListOptions { @@ -101,7 +101,7 @@ export class ByteListType extends ByteArrayType { // Merkleization: inherited from ByteArrayType hashTreeRoot(value: ByteArray): Uint8Array { - const root = new Uint8Array(32); + const root = allocUnsafe(32); this.hashTreeRootInto(value, root, 0); return root; } diff --git a/packages/ssz/src/type/composite.ts b/packages/ssz/src/type/composite.ts index eb370c7c..8ec9314d 100644 --- a/packages/ssz/src/type/composite.ts +++ b/packages/ssz/src/type/composite.ts @@ -1,3 +1,4 @@ +import {allocUnsafe} from "@chainsafe/as-sha256"; import { concatGindices, createProof, @@ -218,7 +219,7 @@ export abstract class CompositeType extends Type { } } - const root = new Uint8Array(32); + const root = allocUnsafe(32); this.hashTreeRootInto(value, root, 0); // hashTreeRootInto will cache the root if cachePermanentRootStruct is true diff --git a/packages/ssz/src/type/containerNodeStruct.ts b/packages/ssz/src/type/containerNodeStruct.ts index 86aa0ee4..76147f12 100644 --- a/packages/ssz/src/type/containerNodeStruct.ts +++ b/packages/ssz/src/type/containerNodeStruct.ts @@ -24,9 +24,6 @@ import {ValueOfFields} from "../view/container"; * This tradeoff is good for data that is read often, written rarely, and consumes a lot of memory (i.e. Validator) */ export class ContainerNodeStructType>> extends ContainerType { - // Temporary root to avoid allocating new Uint8Array every time - private temporaryRoot = new Uint8Array(32); - constructor(readonly fields: Fields, opts?: ContainerOptions) { super(fields, { // Overwrite default "Container" typeName diff --git a/packages/ssz/src/type/listBasic.ts b/packages/ssz/src/type/listBasic.ts index d509c065..5328f3fd 100644 --- a/packages/ssz/src/type/listBasic.ts +++ b/packages/ssz/src/type/listBasic.ts @@ -17,6 +17,7 @@ import {ArrayBasicType} from "../view/arrayBasic"; import {ListBasicTreeView} from "../view/listBasic"; import {ListBasicTreeViewDU} from "../viewDU/listBasic"; import {ArrayType} from "./array"; +import {allocUnsafe} from "@chainsafe/as-sha256"; /* eslint-disable @typescript-eslint/member-ordering */ @@ -174,7 +175,7 @@ export class ListBasicType> } } - const root = new Uint8Array(32); + const root = allocUnsafe(32); this.hashTreeRootInto(value, root, 0); // hashTreeRootInto will cache the root if cachePermanentRootStruct is true diff --git a/packages/ssz/src/type/listComposite.ts b/packages/ssz/src/type/listComposite.ts index 14a90821..17e21954 100644 --- a/packages/ssz/src/type/listComposite.ts +++ b/packages/ssz/src/type/listComposite.ts @@ -19,6 +19,7 @@ import {ArrayCompositeType} from "../view/arrayComposite"; import {ListCompositeTreeView} from "../view/listComposite"; import {ListCompositeTreeViewDU} from "../viewDU/listComposite"; import {ArrayType} from "./array"; +import {allocUnsafe} from "@chainsafe/as-sha256"; /* eslint-disable @typescript-eslint/member-ordering */ @@ -181,7 +182,7 @@ export class ListCompositeType< } } - const root = new Uint8Array(32); + const root = allocUnsafe(32); this.hashTreeRootInto(value, root, 0); // hashTreeRootInto will cache the root if cachePermanentRootStruct is true diff --git a/packages/ssz/src/type/optional.ts b/packages/ssz/src/type/optional.ts index 02723445..3d2925fb 100644 --- a/packages/ssz/src/type/optional.ts +++ b/packages/ssz/src/type/optional.ts @@ -13,6 +13,7 @@ import {namedClass} from "../util/named"; import {Type, ByteViews, JsonPath, JsonPathProp} from "./abstract"; import {CompositeType, isCompositeType} from "./composite"; import {addLengthNode, getLengthFromRootNode} from "./arrayBasic"; +import {allocUnsafe} from "@chainsafe/as-sha256"; /* eslint-disable @typescript-eslint/member-ordering */ export type NonOptionalType> = T extends OptionalType ? U : T; @@ -178,7 +179,7 @@ export class OptionalType> extends CompositeTy // Merkleization hashTreeRoot(value: ValueOfType): Uint8Array { - const root = new Uint8Array(32); + const root = allocUnsafe(32); this.hashTreeRootInto(value, root, 0); return root; } diff --git a/packages/ssz/src/type/profile.ts b/packages/ssz/src/type/profile.ts index 756a52ff..03941e4b 100644 --- a/packages/ssz/src/type/profile.ts +++ b/packages/ssz/src/type/profile.ts @@ -34,6 +34,7 @@ import {Case} from "../util/strings"; import {BitArray} from "../value/bitArray"; import {mixInActiveFields, setActiveFields} from "./stableContainer"; import {NonOptionalFields, isOptionalType, toNonOptionalType} from "./optional"; +import {allocUnsafe} from "@chainsafe/as-sha256"; /* eslint-disable @typescript-eslint/member-ordering */ type BytesRange = {start: number; end: number}; @@ -377,7 +378,7 @@ export class ProfileType>> extends C } const merkleBytes = this.getChunkBytes(value); - const root = new Uint8Array(32); + const root = allocUnsafe(32); merkleizeInto(merkleBytes, this.maxChunkCount, root, 0); mixInActiveFields(root, this.activeFields, root, 0); output.set(root, offset); diff --git a/packages/ssz/src/type/stableContainer.ts b/packages/ssz/src/type/stableContainer.ts index 50557cba..334fa66a 100644 --- a/packages/ssz/src/type/stableContainer.ts +++ b/packages/ssz/src/type/stableContainer.ts @@ -38,6 +38,7 @@ import { import {Case} from "../util/strings"; import {isOptionalType, toNonOptionalType, NonOptionalFields} from "./optional"; import {BitArray} from "../value/bitArray"; +import {allocUnsafe} from "@chainsafe/as-sha256"; /* eslint-disable @typescript-eslint/member-ordering */ type BytesRange = {start: number; end: number}; @@ -350,7 +351,7 @@ export class StableContainerType>> e } const merkleBytes = this.getChunkBytes(value); - const root = new Uint8Array(32); + const root = allocUnsafe(32); merkleizeInto(merkleBytes, this.maxChunkCount, root, 0); // compute active field bitvector const activeFields = BitArray.fromBoolArray([ @@ -750,12 +751,15 @@ export function getActiveFields(rootNode: Node, bitLen: number): BitArray { return new BitArray(activeFieldsBuf, bitLen); } +// This is a global buffer to avoid creating a new one for each call to getActiveFields +const singleChunkActiveFieldsBuf = new Uint8Array(32); + export function setActiveFields(rootNode: Node, activeFields: BitArray): Node { // fast path for depth 1, the bitvector fits in one chunk if (activeFields.bitLen <= 256) { - const activeFieldsBuf = new Uint8Array(32); - activeFieldsBuf.set(activeFields.uint8Array); - return new BranchNode(rootNode.left, LeafNode.fromRoot(activeFieldsBuf)); + singleChunkActiveFieldsBuf.fill(0); + singleChunkActiveFieldsBuf.set(activeFields.uint8Array); + return new BranchNode(rootNode.left, LeafNode.fromRoot(singleChunkActiveFieldsBuf)); } const activeFieldsChunkCount = Math.ceil(activeFields.bitLen / 256); @@ -825,7 +829,8 @@ export function mixInActiveFields(root: Uint8Array, activeFields: BitArray, outp activeFieldsSingleChunk.fill(0); activeFieldsSingleChunk.set(activeFields.uint8Array); // 1 chunk for root, 1 chunk for activeFields - merkleizeInto(mixInActiveFieldsChunkBytes, 2, output, offset); + const chunkCount = 2; + merkleizeInto(mixInActiveFieldsChunkBytes, chunkCount, output, offset); return; } diff --git a/packages/ssz/src/type/union.ts b/packages/ssz/src/type/union.ts index 97b5672a..6a6117dd 100644 --- a/packages/ssz/src/type/union.ts +++ b/packages/ssz/src/type/union.ts @@ -14,6 +14,7 @@ import {Type, ByteViews} from "./abstract"; import {CompositeType, isCompositeType} from "./composite"; import {addLengthNode, getLengthFromRootNode} from "./arrayBasic"; import {NoneType} from "./none"; +import {allocUnsafe} from "@chainsafe/as-sha256"; /* eslint-disable @typescript-eslint/member-ordering */ @@ -177,7 +178,7 @@ export class UnionType[]> extends CompositeType< // Merkleization hashTreeRoot(value: ValueOfTypes): Uint8Array { - const root = new Uint8Array(32); + const root = allocUnsafe(32); this.hashTreeRootInto(value, root, 0); return root; } From 95cc29ea47aec5bf3aca95255c65b8bcefdbe5e8 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Mon, 14 Oct 2024 14:19:43 +0700 Subject: [PATCH 103/113] chore: revert unnecessary change to master --- packages/ssz/src/viewDU/arrayComposite.ts | 20 ++++++++------------ 1 file changed, 8 insertions(+), 12 deletions(-) diff --git a/packages/ssz/src/viewDU/arrayComposite.ts b/packages/ssz/src/viewDU/arrayComposite.ts index 37868bb6..bced3024 100644 --- a/packages/ssz/src/viewDU/arrayComposite.ts +++ b/packages/ssz/src/viewDU/arrayComposite.ts @@ -215,24 +215,15 @@ export class ArrayCompositeTreeViewDU< // Depth includes the extra level for the length node const byLevelView = hcByLevel != null && isOldRootHashed ? hcByLevel : null; - const indexesChanged = Array.from(this.viewsChanged.keys()).sort((a, b) => a - b); - const indexes: number[] = []; - const nodes: Node[] = []; - for (const index of indexesChanged) { - const view = this.viewsChanged.get(index); - if (!view) { - // should not happen - throw Error("View not found in viewsChanged, index=" + index); - } + const nodesChanged: {index: number; node: Node}[] = []; + for (const [index, view] of this.viewsChanged) { const node = this.type.elementType.commitViewDU(view, offsetView, byLevelView); // there's a chance the view is not changed, no need to rebind nodes in that case if (this.nodes[index] !== node) { // Set new node in nodes array to ensure data represented in the tree and fast nodes access is equal this.nodes[index] = node; - // nodesChanged.push({index, node}); - indexes.push(index); - nodes.push(node); + nodesChanged.push({index, node}); } // Cache the view's caches to preserve it's data after 'this.viewsChanged.clear()' @@ -240,6 +231,11 @@ export class ArrayCompositeTreeViewDU< if (cache) this.caches[index] = cache; } + // TODO: Optimize to loop only once, Numerical sort ascending + const nodesChangedSorted = nodesChanged.sort((a, b) => a.index - b.index); + const indexes = nodesChangedSorted.map((entry) => entry.index); + const nodes = nodesChangedSorted.map((entry) => entry.node); + const chunksNode = this.type.tree_getChunksNode(this._rootNode); const offsetThis = hcOffset + this.type.tree_chunksNodeOffset(); const byLevelThis = hcByLevel != null && isOldRootHashed ? hcByLevel : null; From 931511976a642e03fdf746aa7556a8e9281878ad Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Mon, 14 Oct 2024 14:46:27 +0700 Subject: [PATCH 104/113] chore: remove ListIterator --- packages/ssz/src/index.ts | 1 - packages/ssz/src/interface.ts | 6 - packages/ssz/src/util/reusableListIterator.ts | 145 ------------------ packages/ssz/src/view/arrayComposite.ts | 32 ---- .../perf/util/reusableListIterator.test.ts | 44 ------ .../unit/util/reusableListIterator.test.ts | 64 -------- 6 files changed, 292 deletions(-) delete mode 100644 packages/ssz/src/util/reusableListIterator.ts delete mode 100644 packages/ssz/test/perf/util/reusableListIterator.test.ts delete mode 100644 packages/ssz/test/unit/util/reusableListIterator.test.ts diff --git a/packages/ssz/src/index.ts b/packages/ssz/src/index.ts index 5a74ae26..3a94a1bc 100644 --- a/packages/ssz/src/index.ts +++ b/packages/ssz/src/index.ts @@ -41,7 +41,6 @@ export {BitArray, getUint8ByteToBitBooleanArray} from "./value/bitArray"; // Utils export {fromHexString, toHexString, byteArrayEquals} from "./util/byteArray"; -export {ReusableListIterator} from "./util/reusableListIterator"; export {Snapshot} from "./util/types"; export {hash64, symbolCachedPermanentRoot} from "./util/merkleize"; diff --git a/packages/ssz/src/interface.ts b/packages/ssz/src/interface.ts index 05240a04..240c2795 100644 --- a/packages/ssz/src/interface.ts +++ b/packages/ssz/src/interface.ts @@ -16,12 +16,6 @@ export interface List extends ArrayLike { pop(): T | undefined; } -export interface ListIterator { - readonly length: number; - push(...values: T[]): void; - [Symbol.iterator](): Iterator; -} - export type Container> = T; export type ByteVector = Vector; diff --git a/packages/ssz/src/util/reusableListIterator.ts b/packages/ssz/src/util/reusableListIterator.ts deleted file mode 100644 index 7e1c35a7..00000000 --- a/packages/ssz/src/util/reusableListIterator.ts +++ /dev/null @@ -1,145 +0,0 @@ -import {ListIterator} from "../interface"; - -class LinkedNode { - data: T; - next: LinkedNode | null = null; - - constructor(data: T) { - this.data = data; - } -} - -/** - * A LinkedList that's designed to be reused overtime. - * Before every run, reset() should be called. - * After every run, clean() should be called. - */ -export class ReusableListIterator implements ListIterator { - private head: LinkedNode; - private tail: LinkedNode | null; - private _length = 0; - private _totalLength = 0; - private pointer: LinkedNode | null; - // this avoids memory allocation - private iteratorResult: IteratorResult; - - constructor() { - this.head = { - data: null as unknown as T, - next: null, - }; - this.tail = null; - this.pointer = null; - this.iteratorResult = {} as IteratorResult; - } - - get length(): number { - return this._length; - } - - get totalLength(): number { - return this._totalLength; - } - - /** - * run before every run - */ - reset(): void { - // keep this.head object, only release the data - this.head.data = null as unknown as T; - this.tail = null; - this._length = 0; - // totalLength is not reset - this.pointer = null; - // no need to reset iteratorResult - } - - /** - * Append new data to the tail - * This will overwrite the existing data if it is not null, or grow the list if needed. - */ - push(value: T): void { - if (this.tail !== null) { - let newTail = this.tail.next; - if (newTail !== null) { - newTail.data = value; - } else { - // grow the list - newTail = {data: value, next: null}; - this.tail.next = newTail; - this._totalLength++; - } - this.tail = newTail; - this._length++; - return; - } - - // first item - this.head.data = value; - this.tail = this.head; - this._length = 1; - if (this._totalLength === 0) { - this._totalLength = 1; - } - // else _totalLength > 0, do not set - } - - /** - * run after every run - * hashComps may still refer to the old Nodes, we should release them to avoid memory leak. - */ - clean(): void { - let node = this.tail?.next ?? null; - while (node !== null && node.data !== null) { - node.data = null as unknown as T; - node = node.next; - } - } - - /** - * Implement Iterator for this class - */ - next(): IteratorResult { - if (!this.pointer || this.tail === null) { - return {done: true, value: undefined}; - } - - // never yield value beyond the tail - const value = this.pointer.data; - this.pointer = this.pointer.next; - // should not allocate new object here - const isNull = value === null; - this.iteratorResult.done = isNull; - this.iteratorResult.value = isNull ? undefined : value; - return this.iteratorResult; - } - - /** - * This is convenient method to consume HashComputationLevel with for-of loop - * See "next" method above for the actual implementation - */ - [Symbol.iterator](): IterableIterator { - this.pointer = this.head; - return this; - } - - toArray(): T[] { - const result: T[] = []; - for (const data of this) { - result.push(data); - } - return result; - } - - /** - * For testing only - */ - dump(): T[] { - const result: T[] = []; - let node: LinkedNode | null = this.head; - for (; node !== null; node = node.next) { - result.push(node.data); - } - return result; - } -} diff --git a/packages/ssz/src/view/arrayComposite.ts b/packages/ssz/src/view/arrayComposite.ts index 19f1be40..35c633df 100644 --- a/packages/ssz/src/view/arrayComposite.ts +++ b/packages/ssz/src/view/arrayComposite.ts @@ -2,7 +2,6 @@ import {getNodesAtDepth, Node, toGindexBitstring, Tree} from "@chainsafe/persist import {ValueOf} from "../type/abstract"; import {CompositeType, CompositeView, CompositeViewDU} from "../type/composite"; import {TreeView} from "./abstract"; -import {ListIterator} from "../interface"; import {ArrayType} from "./arrayBasic"; /** Expected API of this View's type. This interface allows to break a recursive dependency between types and views */ @@ -90,22 +89,6 @@ export class ArrayCompositeTreeView< return views; } - /** - * Similar to getAllReadonly but support ListIterator interface. - * Use ReusableListIterator to reuse over multiple calls. - */ - getAllReadonlyIter(views?: ListIterator>): ListIterator> { - const length = this.length; - const chunksNode = this.type.tree_getChunksNode(this.node); - const nodes = getNodesAtDepth(chunksNode, this.type.chunkDepth, 0, length); - views = views ?? new Array>(); - for (let i = 0; i < length; i++) { - // TODO: Optimize - views.push(this.type.elementType.getView(new Tree(nodes[i]))); - } - return views; - } - /** * Returns an array of values of all elements in the array, from index zero to `this.length - 1`. * The returned values are not Views so any changes won't be propagated upwards. @@ -124,19 +107,4 @@ export class ArrayCompositeTreeView< } return values; } - - /** - * Similar to getAllReadonlyValues but support ListIterator interface. - * Use ReusableListIterator to reuse over multiple calls. - */ - getAllReadonlyValuesIter(values?: ListIterator>): ListIterator> { - const length = this.length; - const chunksNode = this.type.tree_getChunksNode(this.node); - const nodes = getNodesAtDepth(chunksNode, this.type.chunkDepth, 0, length); - values = values ?? new Array>(); - for (let i = 0; i < length; i++) { - values.push(this.type.elementType.tree_toValue(nodes[i])); - } - return values; - } } diff --git a/packages/ssz/test/perf/util/reusableListIterator.test.ts b/packages/ssz/test/perf/util/reusableListIterator.test.ts deleted file mode 100644 index 4417077e..00000000 --- a/packages/ssz/test/perf/util/reusableListIterator.test.ts +++ /dev/null @@ -1,44 +0,0 @@ -import {itBench} from "@dapplion/benchmark"; -import {ReusableListIterator} from "../../../src"; -import {Validator} from "../../lodestarTypes/phase0"; -import {getValidator} from "../../utils/generateEth2Objs"; - -/** - * This test create validator object every time intentionally, this mimics an environment where there are a lot of memory allocation. - * On average, Array is very fast, however it's pretty expensive to allocate a big array and it may cause a spike due to gc randomly. - * ReusableListIterator is faster in average and it's more stable due to no memory allocation involved. - * ReusableListIterator - ✓ ReusableListIterator 2000000 items 0.5724982 ops/s 1.746731 s/op - 13 runs 24.1 s - ✓ Array 2000000 items 0.4655988 ops/s 2.147772 s/op - 14 runs 32.0 s - */ -describe("ReusableListIterator", function () { - const length = 2_000_000; - const list = new ReusableListIterator(); - itBench({ - id: `ReusableListIterator ${length} items`, - fn: () => { - // reusable, just reset - list.reset(); - for (let i = 0; i < length; i++) { - list.push(getValidator(i)); - } - for (const a of list) { - a; - } - }, - }); - - itBench({ - id: `Array ${length} items`, - fn: () => { - // allocate every time - const arr = new Array(length); - for (let i = 0; i < length; i++) { - arr[i] = getValidator(i); - } - for (const a of arr) { - a; - } - }, - }); -}); diff --git a/packages/ssz/test/unit/util/reusableListIterator.test.ts b/packages/ssz/test/unit/util/reusableListIterator.test.ts deleted file mode 100644 index 8af8140e..00000000 --- a/packages/ssz/test/unit/util/reusableListIterator.test.ts +++ /dev/null @@ -1,64 +0,0 @@ -import {expect} from "chai"; -import {ReusableListIterator} from "../../../src/util/reusableListIterator"; - -describe("ReusableListIterator", () => { - let list: ReusableListIterator; - - beforeEach(() => { - list = new ReusableListIterator(); - list.push(0); - }); - - it("should reset", () => { - list.reset(); - expect(list.length).to.be.equal(0); - expect(list.totalLength).to.be.equal(1); - expect(list.toArray()).to.be.deep.equal([]); - }); - - it("should push", () => { - list.push(1); - expect(list.length).to.be.equal(2); - expect(list.totalLength).to.be.equal(2); - const arr = list.toArray(); - expect(arr.length).to.be.equal(2); - expect(arr).to.be.deep.equal([0, 1]); - }); - - it("reset then push full", () => { - list.push(1); - list.reset(); - list.push(1); - list.push(2); - list.clean(); - expect(list.length).to.be.equal(2); - expect(list.totalLength).to.be.equal(2); - const arr = list.toArray(); - expect(arr).to.be.deep.equal([1, 2]); - }); - - it("reset then push partial", () => { - list.push(1); - // totalLength = 2 now - list.reset(); - list.push(1); - list.clean(); - expect(list.length).to.be.equal(1); - expect(list.totalLength).to.be.equal(2); - const arr = list.toArray(); - expect(arr).to.be.deep.equal([1]); - }); - - it("clean", () => { - list.push(1); - list.reset(); - list.push(1); - list.clean(); - expect(list.length).to.be.equal(1); - expect(list.totalLength).to.be.equal(2); - const arr = list.toArray(); - expect(arr).to.be.deep.equal([1]); - const all = list.dump(); - expect(all).to.be.deep.equal([1, null]); - }); -}); From f17019e23458481c1d75d567359acfedc3117cd4 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Mon, 14 Oct 2024 14:54:51 +0700 Subject: [PATCH 105/113] fix: dedup executeHashComputations - hashtree perf test --- .../test/perf/hasher.test.ts | 39 ------------------- 1 file changed, 39 deletions(-) diff --git a/packages/persistent-merkle-tree/test/perf/hasher.test.ts b/packages/persistent-merkle-tree/test/perf/hasher.test.ts index 8e61634f..3c5dba2c 100644 --- a/packages/persistent-merkle-tree/test/perf/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/perf/hasher.test.ts @@ -64,42 +64,3 @@ describe("hasher", function () { }); } }); - -describe("hashtree", function () { - itBench({ - id: `getHashComputations`, - beforeEach: () => { - const [tree] = buildComparisonTrees(16); - return tree; - }, - fn: (tree) => { - const hcByLevel: HashComputationLevel[] = []; - getHashComputations(tree, 0, hcByLevel); - }, - }); - - itBench({ - id: `executeHashComputations - hashtree`, - beforeEach: () => { - const [tree] = buildComparisonTrees(16); - return tree; - }, - fn: (tree) => { - const hcByLevel: HashComputationLevel[] = []; - getHashComputations(tree, 0, hcByLevel); - hashtreeHasher.executeHashComputations(hcByLevel); - }, - }); - - itBench({ - id: `root - hashtree`, - beforeEach: () => { - const [tree] = buildComparisonTrees(16); - setHasher(hashtreeHasher); - return tree; - }, - fn: (tree) => { - tree.root; - }, - }); -}); From 79152c47e401023ac5a8073b0adb7c9098381f86 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Mon, 14 Oct 2024 15:52:53 +0700 Subject: [PATCH 106/113] fix: no memory allocation in StableContainer Profile hashTreeRootInto() --- packages/ssz/src/type/composite.ts | 1 + packages/ssz/src/type/listComposite.ts | 7 ++++++- packages/ssz/src/type/profile.ts | 11 +++++------ packages/ssz/src/type/stableContainer.ts | 11 +++++------ 4 files changed, 17 insertions(+), 13 deletions(-) diff --git a/packages/ssz/src/type/composite.ts b/packages/ssz/src/type/composite.ts index 8ec9314d..e2cd16bb 100644 --- a/packages/ssz/src/type/composite.ts +++ b/packages/ssz/src/type/composite.ts @@ -240,6 +240,7 @@ export abstract class CompositeType extends Type { const merkleBytes = this.getChunkBytes(value); merkleizeInto(merkleBytes, this.maxChunkCount, output, offset); if (this.cachePermanentRootStruct) { + // Buffer.prototype.slice does not copy memory, Enforce Uint8Array usage https://github.com/nodejs/node/issues/28087 const cachedRoot = Uint8Array.prototype.slice.call(output, offset, offset + 32); (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = cachedRoot; } diff --git a/packages/ssz/src/type/listComposite.ts b/packages/ssz/src/type/listComposite.ts index 17e21954..a8ca7db5 100644 --- a/packages/ssz/src/type/listComposite.ts +++ b/packages/ssz/src/type/listComposite.ts @@ -207,7 +207,12 @@ export class ListCompositeType< merkleizeInto(this.mixInLengthChunkBytes, chunkCount, output, offset); if (this.cachePermanentRootStruct) { - (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = output.slice(offset, offset + 32); + // Buffer.prototype.slice does not copy memory, Enforce Uint8Array usage https://github.com/nodejs/node/issues/28087 + (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = Uint8Array.prototype.slice.call( + output, + offset, + offset + 32 + ); } } diff --git a/packages/ssz/src/type/profile.ts b/packages/ssz/src/type/profile.ts index 03941e4b..fb34bfa2 100644 --- a/packages/ssz/src/type/profile.ts +++ b/packages/ssz/src/type/profile.ts @@ -34,7 +34,6 @@ import {Case} from "../util/strings"; import {BitArray} from "../value/bitArray"; import {mixInActiveFields, setActiveFields} from "./stableContainer"; import {NonOptionalFields, isOptionalType, toNonOptionalType} from "./optional"; -import {allocUnsafe} from "@chainsafe/as-sha256"; /* eslint-disable @typescript-eslint/member-ordering */ type BytesRange = {start: number; end: number}; @@ -89,6 +88,8 @@ export class ProfileType>> extends C protected readonly TreeView: ContainerTreeViewTypeConstructor; protected readonly TreeViewDU: ContainerTreeViewDUTypeConstructor; private optionalFieldsCount: number; + // temporary root to avoid memory allocation + private tempRoot = new Uint8Array(32); constructor(readonly fields: Fields, activeFields: BitArray, readonly opts?: ProfileOptions) { super(); @@ -378,13 +379,11 @@ export class ProfileType>> extends C } const merkleBytes = this.getChunkBytes(value); - const root = allocUnsafe(32); - merkleizeInto(merkleBytes, this.maxChunkCount, root, 0); - mixInActiveFields(root, this.activeFields, root, 0); - output.set(root, offset); + merkleizeInto(merkleBytes, this.maxChunkCount, this.tempRoot, 0); + mixInActiveFields(this.tempRoot, this.activeFields, output, offset); if (this.cachePermanentRootStruct) { - (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = root; + (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = this.tempRoot.slice(); } } diff --git a/packages/ssz/src/type/stableContainer.ts b/packages/ssz/src/type/stableContainer.ts index 334fa66a..4a4f0186 100644 --- a/packages/ssz/src/type/stableContainer.ts +++ b/packages/ssz/src/type/stableContainer.ts @@ -38,7 +38,6 @@ import { import {Case} from "../util/strings"; import {isOptionalType, toNonOptionalType, NonOptionalFields} from "./optional"; import {BitArray} from "../value/bitArray"; -import {allocUnsafe} from "@chainsafe/as-sha256"; /* eslint-disable @typescript-eslint/member-ordering */ type BytesRange = {start: number; end: number}; @@ -94,6 +93,8 @@ export class StableContainerType>> e protected readonly TreeView: ContainerTreeViewTypeConstructor; protected readonly TreeViewDU: ContainerTreeViewDUTypeConstructor; private padActiveFields: boolean[]; + // temporary root to avoid memory allocation + private tempRoot = new Uint8Array(32); constructor(fields: Fields, readonly maxFields: number, readonly opts?: StableContainerOptions) { super(); @@ -351,18 +352,16 @@ export class StableContainerType>> e } const merkleBytes = this.getChunkBytes(value); - const root = allocUnsafe(32); - merkleizeInto(merkleBytes, this.maxChunkCount, root, 0); + merkleizeInto(merkleBytes, this.maxChunkCount, this.tempRoot, 0); // compute active field bitvector const activeFields = BitArray.fromBoolArray([ ...this.fieldsEntries.map(({fieldName}) => value[fieldName] != null), ...this.padActiveFields, ]); - mixInActiveFields(root, activeFields, root, 0); - output.set(root, offset); + mixInActiveFields(this.tempRoot, activeFields, output, offset); if (this.cachePermanentRootStruct) { - (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = root; + (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = this.tempRoot.slice(); } } From d03c72012a198a474c9866c4d3aace1121e89d35 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Tue, 15 Oct 2024 09:05:06 +0700 Subject: [PATCH 107/113] chore: revert unnecessary change --- .../ssz/test/perf/byType/listComposite.test.ts | 18 +++++++++--------- packages/ssz/test/unit/load_state.test.ts | 18 ------------------ 2 files changed, 9 insertions(+), 27 deletions(-) delete mode 100644 packages/ssz/test/unit/load_state.test.ts diff --git a/packages/ssz/test/perf/byType/listComposite.test.ts b/packages/ssz/test/perf/byType/listComposite.test.ts index cef8ee6f..dbf75988 100644 --- a/packages/ssz/test/perf/byType/listComposite.test.ts +++ b/packages/ssz/test/perf/byType/listComposite.test.ts @@ -20,33 +20,33 @@ describe("ListCompositeType types", () => { }); } - for (const [i, type] of [containerType, containerNodeStructType].entries()) { - const listType = new ListCompositeType(type, 2 ** 40, { - typeName: `List(${i === 0 ? "Container" : "ContainerNodeStruct"})`, - }); - const viewDU = listType.toViewDU(newFilledArray(len, {a: 1, b: 2})); + for (const type of [ + new ListCompositeType(containerType, 2 ** 40, {typeName: "List(Container)"}), + new ListCompositeType(containerNodeStructType, 2 ** 40, {typeName: "List(ContainerNodeStruct)"}), + ]) { + const viewDU = type.toViewDU(newFilledArray(len, {a: 1, b: 2})); - itBench(`${listType.typeName} len ${len} ViewDU.getAllReadonly() + iterate`, () => { + itBench(`${type.typeName} len ${len} ViewDU.getAllReadonly() + iterate`, () => { const values = viewDU.getAllReadonly(); for (let i = 0; i < len; i++) { values[i]; } }); - itBench(`${listType.typeName} len ${len} ViewDU.getAllReadonlyValues() + iterate`, () => { + itBench(`${type.typeName} len ${len} ViewDU.getAllReadonlyValues() + iterate`, () => { const values = viewDU.getAllReadonlyValues(); for (let i = 0; i < len; i++) { values[i]; } }); - itBench(`${listType.typeName} len ${len} ViewDU.get(i)`, () => { + itBench(`${type.typeName} len ${len} ViewDU.get(i)`, () => { for (let i = 0; i < len; i++) { viewDU.get(i); } }); - itBench(`${listType.typeName} len ${len} ViewDU.getReadonly(i)`, () => { + itBench(`${type.typeName} len ${len} ViewDU.getReadonly(i)`, () => { for (let i = 0; i < len; i++) { viewDU.getReadonly(i); } diff --git a/packages/ssz/test/unit/load_state.test.ts b/packages/ssz/test/unit/load_state.test.ts deleted file mode 100644 index cfee3a4c..00000000 --- a/packages/ssz/test/unit/load_state.test.ts +++ /dev/null @@ -1,18 +0,0 @@ -import fs from "fs"; -import {BeaconState} from "../lodestarTypes/deneb/sszTypes"; -import {toHexString} from "../../src/util/byteArray"; - -describe.skip("load holesky state", function () { - this.timeout(0); - const stateFilePath = "/Users/tuyennguyen/Downloads/holesky_finalized_state.ssz"; - it("should load state from file", function () { - const stateBytes = fs.readFileSync(stateFilePath); - console.log("@@@ stateBytes", stateBytes.length); - const now = Date.now(); - const wsState = BeaconState.deserializeToViewDU(stateBytes); - console.log("@@@ got wsState slot", wsState.slot, "in", Date.now() - now, "ms"); - wsState.node.root; - // now = Date.now(); - console.log("@@@ hashTreeRoot", toHexString(wsState.hashTreeRoot()), "in", Date.now() - now, "ms"); - }); -}); From c7e03b8a74745e1d5fc0ee4aa3c506e7597067aa Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Tue, 15 Oct 2024 09:50:34 +0700 Subject: [PATCH 108/113] fix: cache same root instance from CompositeType.hashTreeRoot() --- packages/ssz/src/type/composite.ts | 11 +++++----- packages/ssz/src/type/listBasic.ts | 9 ++++---- packages/ssz/src/type/listComposite.ts | 14 +++++------- packages/ssz/src/type/profile.ts | 6 ++--- packages/ssz/src/type/stableContainer.ts | 6 ++--- packages/ssz/src/util/merkleize.ts | 22 +++++++++++++++++++ .../unit/cachePermanentRootStruct.test.ts | 6 ++--- 7 files changed, 45 insertions(+), 29 deletions(-) diff --git a/packages/ssz/src/type/composite.ts b/packages/ssz/src/type/composite.ts index e2cd16bb..9f33ca5e 100644 --- a/packages/ssz/src/type/composite.ts +++ b/packages/ssz/src/type/composite.ts @@ -12,7 +12,7 @@ import { HashComputationLevel, } from "@chainsafe/persistent-merkle-tree"; import {byteArrayEquals} from "../util/byteArray"; -import {symbolCachedPermanentRoot, ValueWithCachedPermanentRoot} from "../util/merkleize"; +import {cacheRoot, symbolCachedPermanentRoot, ValueWithCachedPermanentRoot} from "../util/merkleize"; import {treePostProcessFromProofNode} from "../util/proof/treePostProcessFromProofNode"; import {Type, ByteViews, JsonPath, JsonPathProp} from "./abstract"; export {ByteViews}; @@ -220,14 +220,15 @@ export abstract class CompositeType extends Type { } const root = allocUnsafe(32); - this.hashTreeRootInto(value, root, 0); + const safeCache = true; + this.hashTreeRootInto(value, root, 0, safeCache); // hashTreeRootInto will cache the root if cachePermanentRootStruct is true return root; } - hashTreeRootInto(value: V, output: Uint8Array, offset: number): void { + hashTreeRootInto(value: V, output: Uint8Array, offset: number, safeCache = false): void { // Return cached mutable root if any if (this.cachePermanentRootStruct) { const cachedRoot = (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot]; @@ -240,9 +241,7 @@ export abstract class CompositeType extends Type { const merkleBytes = this.getChunkBytes(value); merkleizeInto(merkleBytes, this.maxChunkCount, output, offset); if (this.cachePermanentRootStruct) { - // Buffer.prototype.slice does not copy memory, Enforce Uint8Array usage https://github.com/nodejs/node/issues/28087 - const cachedRoot = Uint8Array.prototype.slice.call(output, offset, offset + 32); - (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = cachedRoot; + cacheRoot(value as ValueWithCachedPermanentRoot, output, offset, safeCache); } } diff --git a/packages/ssz/src/type/listBasic.ts b/packages/ssz/src/type/listBasic.ts index 5328f3fd..4dd63f08 100644 --- a/packages/ssz/src/type/listBasic.ts +++ b/packages/ssz/src/type/listBasic.ts @@ -10,7 +10,7 @@ import { addLengthNode, setChunksNode, } from "./arrayBasic"; -import {maxChunksToDepth, symbolCachedPermanentRoot, ValueWithCachedPermanentRoot} from "../util/merkleize"; +import {cacheRoot, maxChunksToDepth, symbolCachedPermanentRoot, ValueWithCachedPermanentRoot} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; import {ArrayBasicType} from "../view/arrayBasic"; @@ -176,14 +176,15 @@ export class ListBasicType> } const root = allocUnsafe(32); - this.hashTreeRootInto(value, root, 0); + const safeCache = true; + this.hashTreeRootInto(value, root, 0, safeCache); // hashTreeRootInto will cache the root if cachePermanentRootStruct is true return root; } - hashTreeRootInto(value: ValueOf[], output: Uint8Array, offset: number): void { + hashTreeRootInto(value: ValueOf[], output: Uint8Array, offset: number, safeCache = false): void { if (this.cachePermanentRootStruct) { const cachedRoot = (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot]; if (cachedRoot) { @@ -200,7 +201,7 @@ export class ListBasicType> merkleizeInto(this.mixInLengthChunkBytes, chunkCount, output, offset); if (this.cachePermanentRootStruct) { - (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = output.subarray(offset, offset + 32).slice(); + cacheRoot(value as ValueWithCachedPermanentRoot, output, offset, safeCache); } } diff --git a/packages/ssz/src/type/listComposite.ts b/packages/ssz/src/type/listComposite.ts index a8ca7db5..821b9504 100644 --- a/packages/ssz/src/type/listComposite.ts +++ b/packages/ssz/src/type/listComposite.ts @@ -1,5 +1,5 @@ import {HashComputationLevel, Node, Tree, merkleizeInto} from "@chainsafe/persistent-merkle-tree"; -import {maxChunksToDepth, symbolCachedPermanentRoot, ValueWithCachedPermanentRoot} from "../util/merkleize"; +import {cacheRoot, maxChunksToDepth, symbolCachedPermanentRoot, ValueWithCachedPermanentRoot} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; import {ValueOf, ByteViews} from "./abstract"; @@ -183,14 +183,15 @@ export class ListCompositeType< } const root = allocUnsafe(32); - this.hashTreeRootInto(value, root, 0); + const safeCache = true; + this.hashTreeRootInto(value, root, 0, safeCache); // hashTreeRootInto will cache the root if cachePermanentRootStruct is true return root; } - hashTreeRootInto(value: ValueOf[], output: Uint8Array, offset: number): void { + hashTreeRootInto(value: ValueOf[], output: Uint8Array, offset: number, safeCache = false): void { if (this.cachePermanentRootStruct) { const cachedRoot = (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot]; if (cachedRoot) { @@ -207,12 +208,7 @@ export class ListCompositeType< merkleizeInto(this.mixInLengthChunkBytes, chunkCount, output, offset); if (this.cachePermanentRootStruct) { - // Buffer.prototype.slice does not copy memory, Enforce Uint8Array usage https://github.com/nodejs/node/issues/28087 - (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = Uint8Array.prototype.slice.call( - output, - offset, - offset + 32 - ); + cacheRoot(value as ValueWithCachedPermanentRoot, output, offset, safeCache); } } diff --git a/packages/ssz/src/type/profile.ts b/packages/ssz/src/type/profile.ts index fb34bfa2..1ac440d7 100644 --- a/packages/ssz/src/type/profile.ts +++ b/packages/ssz/src/type/profile.ts @@ -12,7 +12,7 @@ import { zeroHash, zeroNode, } from "@chainsafe/persistent-merkle-tree"; -import {ValueWithCachedPermanentRoot, maxChunksToDepth, symbolCachedPermanentRoot} from "../util/merkleize"; +import {ValueWithCachedPermanentRoot, cacheRoot, maxChunksToDepth, symbolCachedPermanentRoot} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; import {Type, ValueOf} from "./abstract"; @@ -368,7 +368,7 @@ export class ProfileType>> extends C // Merkleization // hashTreeRoot is the same to parent as it call hashTreeRootInto() - hashTreeRootInto(value: ValueOfFields, output: Uint8Array, offset: number): void { + hashTreeRootInto(value: ValueOfFields, output: Uint8Array, offset: number, safeCache = false): void { // Return cached mutable root if any if (this.cachePermanentRootStruct) { const cachedRoot = (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot]; @@ -383,7 +383,7 @@ export class ProfileType>> extends C mixInActiveFields(this.tempRoot, this.activeFields, output, offset); if (this.cachePermanentRootStruct) { - (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = this.tempRoot.slice(); + cacheRoot(value as ValueWithCachedPermanentRoot, output, offset, safeCache); } } diff --git a/packages/ssz/src/type/stableContainer.ts b/packages/ssz/src/type/stableContainer.ts index 4a4f0186..35415ad8 100644 --- a/packages/ssz/src/type/stableContainer.ts +++ b/packages/ssz/src/type/stableContainer.ts @@ -17,7 +17,7 @@ import { setNode, setNodeWithFn, } from "@chainsafe/persistent-merkle-tree"; -import {ValueWithCachedPermanentRoot, maxChunksToDepth, symbolCachedPermanentRoot} from "../util/merkleize"; +import {ValueWithCachedPermanentRoot, cacheRoot, maxChunksToDepth, symbolCachedPermanentRoot} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; import {JsonPath, Type, ValueOf} from "./abstract"; @@ -341,7 +341,7 @@ export class StableContainerType>> e // Merkleization // hashTreeRoot is the same to parent as it call hashTreeRootInto() - hashTreeRootInto(value: ValueOfFields, output: Uint8Array, offset: number): void { + hashTreeRootInto(value: ValueOfFields, output: Uint8Array, offset: number, safeCache = false): void { // Return cached mutable root if any if (this.cachePermanentRootStruct) { const cachedRoot = (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot]; @@ -361,7 +361,7 @@ export class StableContainerType>> e mixInActiveFields(this.tempRoot, activeFields, output, offset); if (this.cachePermanentRootStruct) { - (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = this.tempRoot.slice(); + cacheRoot(value as ValueWithCachedPermanentRoot, output, offset, safeCache); } } diff --git a/packages/ssz/src/util/merkleize.ts b/packages/ssz/src/util/merkleize.ts index 11301d92..932e80d7 100644 --- a/packages/ssz/src/util/merkleize.ts +++ b/packages/ssz/src/util/merkleize.ts @@ -8,6 +8,28 @@ export type ValueWithCachedPermanentRoot = { [symbolCachedPermanentRoot]?: Uint8Array; }; +/** + * Cache a root for a ValueWithCachedPermanentRoot instance + * - if safeCache is true and output is 32 bytes and offset is 0, use output directly + * - if safeCache, use output subarray + * - otherwise, need to clone the root at output offset + */ +export function cacheRoot( + value: ValueWithCachedPermanentRoot, + output: Uint8Array, + offset: number, + safeCache: boolean +): void { + const cachedRoot = + safeCache && output.length === 32 && offset === 0 + ? output + : safeCache + ? output.subarray(offset, offset + 32) + : // Buffer.prototype.slice does not copy memory, Enforce Uint8Array usage https://github.com/nodejs/node/issues/28087 + Uint8Array.prototype.slice.call(output, offset, offset + 32); + value[symbolCachedPermanentRoot] = cachedRoot; +} + export function hash64(bytes32A: Uint8Array, bytes32B: Uint8Array): Uint8Array { return hasher.digest64(bytes32A, bytes32B); } diff --git a/packages/ssz/test/unit/cachePermanentRootStruct.test.ts b/packages/ssz/test/unit/cachePermanentRootStruct.test.ts index 1e3fc0cf..169557a7 100644 --- a/packages/ssz/test/unit/cachePermanentRootStruct.test.ts +++ b/packages/ssz/test/unit/cachePermanentRootStruct.test.ts @@ -11,10 +11,8 @@ describe("cachePermanentRootStruct", () => { const root = type.hashTreeRoot(value); const root2 = type.hashTreeRoot(value); - // previously this is the same reference, since we move to merkleizeInto() it is not anymore - // this should not be an issue anyway - expect(root).to.deep.equal(root2, "Second hashTreeRoot should return the same Uint8Array"); + expect(root).to.equal(root2, "Second hashTreeRoot should return the same Uint8Array"); - expect(type["getCachedPermanentRoot"](value)).to.deep.equal(root, "Should have cached root"); + expect(type["getCachedPermanentRoot"](value)).to.equal(root, "Should have cached root"); }); }); From e0e3173a70ff8ea642b05448ba73e10eb7449364 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Fri, 18 Oct 2024 17:06:50 +0700 Subject: [PATCH 109/113] feat: hashtree as default hasher --- packages/persistent-merkle-tree/src/hasher/index.ts | 4 ++-- packages/ssz/src/type/listUintNum64.ts | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/packages/persistent-merkle-tree/src/hasher/index.ts b/packages/persistent-merkle-tree/src/hasher/index.ts index 75442232..d55e7421 100644 --- a/packages/persistent-merkle-tree/src/hasher/index.ts +++ b/packages/persistent-merkle-tree/src/hasher/index.ts @@ -1,5 +1,5 @@ import {Hasher} from "./types"; -import {hasher as nobleHasher} from "./noble"; +import {hasher as hashtreeHasher} from "./hashtree"; import type {HashComputationLevel} from "../hashComputation"; export * from "./types"; @@ -8,7 +8,7 @@ export * from "./util"; /** * Hasher used across the SSZ codebase, by default, this does not support batch hash. */ -export let hasher: Hasher = nobleHasher; +export let hasher: Hasher = hashtreeHasher; /** * Set the hasher to be used across the SSZ codebase diff --git a/packages/ssz/src/type/listUintNum64.ts b/packages/ssz/src/type/listUintNum64.ts index 4ee4826b..154493ca 100644 --- a/packages/ssz/src/type/listUintNum64.ts +++ b/packages/ssz/src/type/listUintNum64.ts @@ -133,7 +133,6 @@ function forceGetHashComputations( return; } - // if (node.h0 === null) { const hashComputations = levelAtIndex(hcByLevel, index); const {left, right} = node; hashComputations.push(left, right, node); From 4f7a0a48467f99bd40d793ecb07a0a03d28c83d3 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Thu, 31 Oct 2024 13:11:50 +0700 Subject: [PATCH 110/113] Revert "feat: hashtree as default hasher" This reverts commit e0e3173a70ff8ea642b05448ba73e10eb7449364. --- packages/persistent-merkle-tree/src/hasher/index.ts | 4 ++-- packages/ssz/src/type/listUintNum64.ts | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/persistent-merkle-tree/src/hasher/index.ts b/packages/persistent-merkle-tree/src/hasher/index.ts index d55e7421..75442232 100644 --- a/packages/persistent-merkle-tree/src/hasher/index.ts +++ b/packages/persistent-merkle-tree/src/hasher/index.ts @@ -1,5 +1,5 @@ import {Hasher} from "./types"; -import {hasher as hashtreeHasher} from "./hashtree"; +import {hasher as nobleHasher} from "./noble"; import type {HashComputationLevel} from "../hashComputation"; export * from "./types"; @@ -8,7 +8,7 @@ export * from "./util"; /** * Hasher used across the SSZ codebase, by default, this does not support batch hash. */ -export let hasher: Hasher = hashtreeHasher; +export let hasher: Hasher = nobleHasher; /** * Set the hasher to be used across the SSZ codebase diff --git a/packages/ssz/src/type/listUintNum64.ts b/packages/ssz/src/type/listUintNum64.ts index 154493ca..4ee4826b 100644 --- a/packages/ssz/src/type/listUintNum64.ts +++ b/packages/ssz/src/type/listUintNum64.ts @@ -133,6 +133,7 @@ function forceGetHashComputations( return; } + // if (node.h0 === null) { const hashComputations = levelAtIndex(hcByLevel, index); const {left, right} = node; hashComputations.push(left, right, node); From 3249906cce78f80b5a8aaa44931856e2ec0e8919 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Thu, 31 Oct 2024 13:51:28 +0700 Subject: [PATCH 111/113] refactor: chunkBytesBuffer to blocksBuffer to reflect SHA256 blocks --- .../src/hasher/as-sha256.ts | 4 +-- .../src/hasher/hashtree.ts | 4 +-- .../src/hasher/index.ts | 4 +-- .../src/hasher/noble.ts | 4 +-- .../src/hasher/types.ts | 6 ++-- .../persistent-merkle-tree/src/hasher/util.ts | 30 +++++++++++-------- packages/ssz/src/type/bitArray.ts | 12 ++++---- packages/ssz/src/type/byteArray.ts | 24 +++++++-------- packages/ssz/src/type/composite.ts | 10 +++---- packages/ssz/src/type/container.ts | 8 ++--- packages/ssz/src/type/listBasic.ts | 10 +++---- packages/ssz/src/type/listComposite.ts | 10 +++---- packages/ssz/src/type/optional.ts | 10 +++---- packages/ssz/src/type/profile.ts | 16 +++++----- packages/ssz/src/type/stableContainer.ts | 16 +++++----- packages/ssz/src/type/union.ts | 8 ++--- packages/ssz/src/type/vectorBasic.ts | 8 ++--- packages/ssz/src/type/vectorComposite.ts | 6 ++-- packages/ssz/test/spec/runValidTest.ts | 4 +-- 19 files changed, 99 insertions(+), 95 deletions(-) diff --git a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts index 943f9b49..48d76304 100644 --- a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts +++ b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts @@ -14,8 +14,8 @@ export const hasher: Hasher = { name: "as-sha256", digest64: digest2Bytes32, digest64HashObjects: digest64HashObjectsInto, - merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { - return doMerkleizeInto(data, padFor, output, offset, hashInto); + merkleizeInto(blocksBytes: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { + return doMerkleizeInto(blocksBytes, padFor, output, offset, hashInto); }, digestNLevel(data: Uint8Array, nLevel: number): Uint8Array { return doDigestNLevel(data, nLevel, hashInto); diff --git a/packages/persistent-merkle-tree/src/hasher/hashtree.ts b/packages/persistent-merkle-tree/src/hasher/hashtree.ts index f578d50c..e96fecdf 100644 --- a/packages/persistent-merkle-tree/src/hasher/hashtree.ts +++ b/packages/persistent-merkle-tree/src/hasher/hashtree.ts @@ -40,8 +40,8 @@ export const hasher: Hasher = { hashInto(hash64Input, hash64Output); byteArrayIntoHashObject(hash64Output, 0, parent); }, - merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { - return doMerkleizeInto(data, padFor, output, offset, hashInto); + merkleizeInto(blocksBytes: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { + return doMerkleizeInto(blocksBytes, padFor, output, offset, hashInto); }, digestNLevel(data: Uint8Array, nLevel: number): Uint8Array { return doDigestNLevel(data, nLevel, hashInto); diff --git a/packages/persistent-merkle-tree/src/hasher/index.ts b/packages/persistent-merkle-tree/src/hasher/index.ts index 75442232..6093e5d8 100644 --- a/packages/persistent-merkle-tree/src/hasher/index.ts +++ b/packages/persistent-merkle-tree/src/hasher/index.ts @@ -27,8 +27,8 @@ export function digestNLevel(data: Uint8Array, nLevel: number): Uint8Array { return hasher.digestNLevel(data, nLevel); } -export function merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { - hasher.merkleizeInto(data, padFor, output, offset); +export function merkleizeInto(blocksBytes: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { + hasher.merkleizeInto(blocksBytes, padFor, output, offset); } export function executeHashComputations(hashComputations: HashComputationLevel[]): void { diff --git a/packages/persistent-merkle-tree/src/hasher/noble.ts b/packages/persistent-merkle-tree/src/hasher/noble.ts index eb1224b6..43c40aa6 100644 --- a/packages/persistent-merkle-tree/src/hasher/noble.ts +++ b/packages/persistent-merkle-tree/src/hasher/noble.ts @@ -28,8 +28,8 @@ export const hasher: Hasher = { digest64HashObjects: (left, right, parent) => { byteArrayIntoHashObject(digest64(hashObjectToUint8Array(left), hashObjectToUint8Array(right)), 0, parent); }, - merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { - return doMerkleizeInto(data, padFor, output, offset, hashInto); + merkleizeInto(blocksBytes: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { + return doMerkleizeInto(blocksBytes, padFor, output, offset, hashInto); }, digestNLevel(data: Uint8Array, nLevel: number): Uint8Array { return doDigestNLevel(data, nLevel, hashInto); diff --git a/packages/persistent-merkle-tree/src/hasher/types.ts b/packages/persistent-merkle-tree/src/hasher/types.ts index 9f5813f0..dcbd2a3d 100644 --- a/packages/persistent-merkle-tree/src/hasher/types.ts +++ b/packages/persistent-merkle-tree/src/hasher/types.ts @@ -15,11 +15,11 @@ export type Hasher = { */ digest64HashObjects(left: HashObject, right: HashObject, parent: HashObject): void; /** - * Merkleize n chunk of data, 32 bytes each + * Merkleize n SHA256 blocks, each block is 64 bytes * padFor is maxChunkCount, use it to compute layers to hash - * data is mutated after the function + * blocksBytes is mutated after the function */ - merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Array, offset: number): void; + merkleizeInto(blocksBytes: Uint8Array, padFor: number, output: Uint8Array, offset: number): void; /** * Hash multiple chunks (1 chunk = 32 bytes) at multiple levels * With nLevel = 3, hash multiple of 256 bytes, return multiple of 32 bytes. diff --git a/packages/persistent-merkle-tree/src/hasher/util.ts b/packages/persistent-merkle-tree/src/hasher/util.ts index a028253c..8f259e14 100644 --- a/packages/persistent-merkle-tree/src/hasher/util.ts +++ b/packages/persistent-merkle-tree/src/hasher/util.ts @@ -14,13 +14,15 @@ export function uint8ArrayToHashObject(byteArr: Uint8Array): HashObject { type HashIntoFn = (input: Uint8Array, output: Uint8Array) => void; /** - * Input data is unsafe because it's modified - * If its chunk count is not even, need to be appended with zero hash at layer 0 so that we don't need - * a new memory allocation here (even through we don't need it if padFor = 1) + * A SHA256 block is 64 bytes + * - if padFor > 1 blocksBytes need to be multiple of 64 bytes. + * - if padFor = 1, blocksBytes need to be at least 32 bytes + * - if padFor = 0, throw error + * blocksBytes is unsafe because it's modified * The Uint8Array(32) will be written to output at offset */ export function doMerkleizeInto( - data: Uint8Array, + blocksBytes: Uint8Array, padFor: number, output: Uint8Array, offset: number, @@ -31,32 +33,34 @@ export function doMerkleizeInto( } const layerCount = Math.ceil(Math.log2(padFor)); - if (data.length === 0) { + if (blocksBytes.length === 0) { output.set(zeroHash(layerCount), offset); return; } - if (data.length % 32 !== 0) { - throw new Error(`Invalid input length, expect to be multiple of 32 bytes, got ${data.length}`); + if (blocksBytes.length % 32 !== 0) { + throw new Error(`Invalid input length, expect to be multiple of 32 bytes, got ${blocksBytes.length}`); } // if padFor = 1, only need 32 bytes - if (padFor > 1 && data.length % 64 !== 0) { - throw new Error(`Invalid input length, expect to be multiple of 64 bytes, got ${data.length}, padFor=${padFor}`); + if (padFor > 1 && blocksBytes.length % 64 !== 0) { + throw new Error( + `Invalid input length, expect to be multiple of 64 bytes, got ${blocksBytes.length}, padFor=${padFor}` + ); } - let inputLength = data.length; + let inputLength = blocksBytes.length; let outputLength = Math.floor(inputLength / 2); - let bufferIn = data; + let bufferIn = blocksBytes; // hash into the same buffer for (let i = 0; i < layerCount; i++) { - const bufferOut = data.subarray(0, outputLength); + const bufferOut = blocksBytes.subarray(0, outputLength); hashInto(bufferIn, bufferOut); const chunkCount = Math.floor(outputLength / 32); if (chunkCount % 2 === 1 && i < layerCount - 1) { // extend to 1 more chunk inputLength = outputLength + 32; - bufferIn = data.subarray(0, inputLength); + bufferIn = blocksBytes.subarray(0, inputLength); bufferIn.set(zeroHash(i + 1), outputLength); } else { bufferIn = bufferOut; diff --git a/packages/ssz/src/type/bitArray.ts b/packages/ssz/src/type/bitArray.ts index d485de27..27a61d2a 100644 --- a/packages/ssz/src/type/bitArray.ts +++ b/packages/ssz/src/type/bitArray.ts @@ -4,7 +4,7 @@ import {CompositeType, LENGTH_GINDEX} from "./composite"; import {BitArray} from "../value/bitArray"; import {BitArrayTreeView} from "../view/bitArray"; import {BitArrayTreeViewDU} from "../viewDU/bitArray"; -import {getChunkBytes} from "./byteArray"; +import {getBlockBytes} from "./byteArray"; /* eslint-disable @typescript-eslint/member-ordering */ @@ -40,15 +40,15 @@ export abstract class BitArrayType extends CompositeType this.chunkBytesBuffer.length) { + protected getBlocksBytes(value: BitArray): Uint8Array { + // reallocate this.blocksBuffer if needed + if (value.uint8Array.length > this.blocksBuffer.length) { const chunkCount = Math.ceil(value.bitLen / 8 / 32); const chunkBytes = chunkCount * 32; // pad 1 chunk if maxChunkCount is not even - this.chunkBytesBuffer = chunkCount % 2 === 1 ? new Uint8Array(chunkBytes + 32) : new Uint8Array(chunkBytes); + this.blocksBuffer = chunkCount % 2 === 1 ? new Uint8Array(chunkBytes + 32) : new Uint8Array(chunkBytes); } - return getChunkBytes(value.uint8Array, this.chunkBytesBuffer); + return getBlockBytes(value.uint8Array, this.blocksBuffer); } // Proofs diff --git a/packages/ssz/src/type/byteArray.ts b/packages/ssz/src/type/byteArray.ts index 78e6ae30..4a3edb19 100644 --- a/packages/ssz/src/type/byteArray.ts +++ b/packages/ssz/src/type/byteArray.ts @@ -89,15 +89,15 @@ export abstract class ByteArrayType extends CompositeType this.chunkBytesBuffer.length) { + protected getBlocksBytes(value: ByteArray): Uint8Array { + // reallocate this.blocksBuffer if needed + if (value.length > this.blocksBuffer.length) { const chunkCount = Math.ceil(value.length / 32); const chunkBytes = chunkCount * 32; // pad 1 chunk if maxChunkCount is not even - this.chunkBytesBuffer = chunkCount % 2 === 1 ? new Uint8Array(chunkBytes + 32) : new Uint8Array(chunkBytes); + this.blocksBuffer = chunkCount % 2 === 1 ? new Uint8Array(chunkBytes + 32) : new Uint8Array(chunkBytes); } - return getChunkBytes(value, this.chunkBytesBuffer); + return getBlockBytes(value, this.blocksBuffer); } // Proofs @@ -162,15 +162,15 @@ export abstract class ByteArrayType extends CompositeType merkleBytesBuffer.length) { - throw new Error(`data length ${data.length} exceeds merkleBytesBuffer length ${merkleBytesBuffer.length}`); +export function getBlockBytes(value: Uint8Array, blocksBuffer: Uint8Array): Uint8Array { + if (value.length > blocksBuffer.length) { + throw new Error(`data length ${value.length} exceeds blocksBuffer length ${blocksBuffer.length}`); } - merkleBytesBuffer.set(data); - const valueLen = data.length; + blocksBuffer.set(value); + const valueLen = value.length; const chunkByteLen = Math.ceil(valueLen / 64) * 64; // all padding bytes must be zero, this is similar to set zeroHash(0) - merkleBytesBuffer.subarray(valueLen, chunkByteLen).fill(0); - return merkleBytesBuffer.subarray(0, chunkByteLen); + blocksBuffer.subarray(valueLen, chunkByteLen).fill(0); + return blocksBuffer.subarray(0, chunkByteLen); } diff --git a/packages/ssz/src/type/composite.ts b/packages/ssz/src/type/composite.ts index 9f33ca5e..d5d1821e 100644 --- a/packages/ssz/src/type/composite.ts +++ b/packages/ssz/src/type/composite.ts @@ -61,7 +61,7 @@ export abstract class CompositeType extends Type { * Required for ContainerNodeStruct to ensure no dangerous types are constructed. */ abstract readonly isViewMutable: boolean; - protected chunkBytesBuffer = new Uint8Array(0); + protected blocksBuffer = new Uint8Array(0); constructor( /** @@ -238,8 +238,8 @@ export abstract class CompositeType extends Type { } } - const merkleBytes = this.getChunkBytes(value); - merkleizeInto(merkleBytes, this.maxChunkCount, output, offset); + const blocksBuffer = this.getBlocksBytes(value); + merkleizeInto(blocksBuffer, this.maxChunkCount, output, offset); if (this.cachePermanentRootStruct) { cacheRoot(value as ValueWithCachedPermanentRoot, output, offset, safeCache); } @@ -258,10 +258,10 @@ export abstract class CompositeType extends Type { // to hashObject and back. /** - * Get merkle bytes of each value, the returned Uint8Array should be multiple of 64 bytes. + * Get multiple SHA256 blocks, each is 64 bytes long. * If chunk count is not even, need to append zeroHash(0) */ - protected abstract getChunkBytes(value: V): Uint8Array; + protected abstract getBlocksBytes(value: V): Uint8Array; // Proofs API diff --git a/packages/ssz/src/type/container.ts b/packages/ssz/src/type/container.ts index 1ed46a89..ac3b5912 100644 --- a/packages/ssz/src/type/container.ts +++ b/packages/ssz/src/type/container.ts @@ -132,7 +132,7 @@ export class ContainerType>> extends this.TreeViewDU = opts?.getContainerTreeViewDUClass?.(this) ?? getContainerTreeViewDUClass(this); const fieldBytes = this.fieldsEntries.length * 32; const chunkBytes = Math.ceil(fieldBytes / 64) * 64; - this.chunkBytesBuffer = new Uint8Array(chunkBytes); + this.blocksBuffer = new Uint8Array(chunkBytes); } static named>>( @@ -275,13 +275,13 @@ export class ContainerType>> extends // Merkleization - protected getChunkBytes(struct: ValueOfFields): Uint8Array { + protected getBlocksBytes(struct: ValueOfFields): Uint8Array { for (let i = 0; i < this.fieldsEntries.length; i++) { const {fieldName, fieldType} = this.fieldsEntries[i]; - fieldType.hashTreeRootInto(struct[fieldName], this.chunkBytesBuffer, i * 32); + fieldType.hashTreeRootInto(struct[fieldName], this.blocksBuffer, i * 32); } // remaining bytes are zeroed as we never write them - return this.chunkBytesBuffer; + return this.blocksBuffer; } // Proofs diff --git a/packages/ssz/src/type/listBasic.ts b/packages/ssz/src/type/listBasic.ts index 4dd63f08..69fc0670 100644 --- a/packages/ssz/src/type/listBasic.ts +++ b/packages/ssz/src/type/listBasic.ts @@ -205,21 +205,21 @@ export class ListBasicType> } } - protected getChunkBytes(value: ValueOf[]): Uint8Array { + protected getBlocksBytes(value: ValueOf[]): Uint8Array { const byteLen = this.value_serializedSize(value); const chunkByteLen = Math.ceil(byteLen / 64) * 64; // reallocate this.verkleBytes if needed - if (byteLen > this.chunkBytesBuffer.length) { + if (byteLen > this.blocksBuffer.length) { // pad 1 chunk if maxChunkCount is not even - this.chunkBytesBuffer = new Uint8Array(chunkByteLen); + this.blocksBuffer = new Uint8Array(chunkByteLen); } - const chunkBytes = this.chunkBytesBuffer.subarray(0, chunkByteLen); + const chunkBytes = this.blocksBuffer.subarray(0, chunkByteLen); const uint8Array = chunkBytes.subarray(0, byteLen); const dataView = new DataView(uint8Array.buffer, uint8Array.byteOffset, uint8Array.byteLength); value_serializeToBytesArrayBasic(this.elementType, value.length, {uint8Array, dataView}, 0, value); // all padding bytes must be zero, this is similar to set zeroHash(0) - this.chunkBytesBuffer.subarray(byteLen, chunkByteLen).fill(0); + this.blocksBuffer.subarray(byteLen, chunkByteLen).fill(0); return chunkBytes; } diff --git a/packages/ssz/src/type/listComposite.ts b/packages/ssz/src/type/listComposite.ts index 821b9504..38e7a6e5 100644 --- a/packages/ssz/src/type/listComposite.ts +++ b/packages/ssz/src/type/listComposite.ts @@ -212,13 +212,13 @@ export class ListCompositeType< } } - protected getChunkBytes(value: ValueOf[]): Uint8Array { + protected getBlocksBytes(value: ValueOf[]): Uint8Array { const byteLen = value.length * 32; - const chunkByteLen = this.chunkBytesBuffer.byteLength; - if (byteLen > chunkByteLen) { - this.chunkBytesBuffer = new Uint8Array(Math.ceil(byteLen / 64) * 64); + const blockByteLen = this.blocksBuffer.byteLength; + if (byteLen > blockByteLen) { + this.blocksBuffer = new Uint8Array(Math.ceil(byteLen / 64) * 64); } - return value_getChunkBytesArrayComposite(this.elementType, value.length, value, this.chunkBytesBuffer); + return value_getChunkBytesArrayComposite(this.elementType, value.length, value, this.blocksBuffer); } // JSON: inherited from ArrayType diff --git a/packages/ssz/src/type/optional.ts b/packages/ssz/src/type/optional.ts index 3d2925fb..7e33553e 100644 --- a/packages/ssz/src/type/optional.ts +++ b/packages/ssz/src/type/optional.ts @@ -66,7 +66,7 @@ export class OptionalType> extends CompositeTy this.minSize = 0; // Max size includes prepended 0x01 byte this.maxSize = elementType.maxSize + 1; - this.chunkBytesBuffer = new Uint8Array(32); + this.blocksBuffer = new Uint8Array(32); } static named>( @@ -193,13 +193,13 @@ export class OptionalType> extends CompositeTy merkleizeInto(this.mixInLengthChunkBytes, chunkCount, output, offset); } - protected getChunkBytes(value: ValueOfType): Uint8Array { + protected getBlocksBytes(value: ValueOfType): Uint8Array { if (value === null) { - this.chunkBytesBuffer.fill(0); + this.blocksBuffer.fill(0); } else { - this.elementType.hashTreeRootInto(value, this.chunkBytesBuffer, 0); + this.elementType.hashTreeRootInto(value, this.blocksBuffer, 0); } - return this.chunkBytesBuffer; + return this.blocksBuffer; } // Proofs diff --git a/packages/ssz/src/type/profile.ts b/packages/ssz/src/type/profile.ts index 1ac440d7..e72c2a44 100644 --- a/packages/ssz/src/type/profile.ts +++ b/packages/ssz/src/type/profile.ts @@ -159,7 +159,7 @@ export class ProfileType>> extends C this.TreeViewDU = opts?.getProfileTreeViewDUClass?.(this) ?? getProfileTreeViewDUClass(this); const fieldBytes = this.activeFields.bitLen * 32; const chunkBytes = Math.ceil(fieldBytes / 64) * 64; - this.chunkBytesBuffer = new Uint8Array(chunkBytes); + this.blocksBuffer = new Uint8Array(chunkBytes); } static named>>( @@ -378,8 +378,8 @@ export class ProfileType>> extends C } } - const merkleBytes = this.getChunkBytes(value); - merkleizeInto(merkleBytes, this.maxChunkCount, this.tempRoot, 0); + const blocksBytes = this.getBlocksBytes(value); + merkleizeInto(blocksBytes, this.maxChunkCount, this.tempRoot, 0); mixInActiveFields(this.tempRoot, this.activeFields, output, offset); if (this.cachePermanentRootStruct) { @@ -387,18 +387,18 @@ export class ProfileType>> extends C } } - protected getChunkBytes(struct: ValueOfFields): Uint8Array { - this.chunkBytesBuffer.fill(0); + protected getBlocksBytes(struct: ValueOfFields): Uint8Array { + this.blocksBuffer.fill(0); for (let i = 0; i < this.fieldsEntries.length; i++) { const {fieldName, fieldType, chunkIndex, optional} = this.fieldsEntries[i]; if (optional && struct[fieldName] == null) { - this.chunkBytesBuffer.set(zeroHash(0), chunkIndex * 32); + this.blocksBuffer.set(zeroHash(0), chunkIndex * 32); } else { - fieldType.hashTreeRootInto(struct[fieldName], this.chunkBytesBuffer, chunkIndex * 32); + fieldType.hashTreeRootInto(struct[fieldName], this.blocksBuffer, chunkIndex * 32); } } // remaining bytes are zeroed as we never write them - return this.chunkBytesBuffer; + return this.blocksBuffer; } // Proofs diff --git a/packages/ssz/src/type/stableContainer.ts b/packages/ssz/src/type/stableContainer.ts index 35415ad8..e9dad622 100644 --- a/packages/ssz/src/type/stableContainer.ts +++ b/packages/ssz/src/type/stableContainer.ts @@ -151,7 +151,7 @@ export class StableContainerType>> e this.TreeViewDU = opts?.getContainerTreeViewDUClass?.(this) ?? getContainerTreeViewDUClass(this); const fieldBytes = this.fieldsEntries.length * 32; const chunkBytes = Math.ceil(fieldBytes / 64) * 64; - this.chunkBytesBuffer = new Uint8Array(chunkBytes); + this.blocksBuffer = new Uint8Array(chunkBytes); } static named>>( @@ -351,8 +351,8 @@ export class StableContainerType>> e } } - const merkleBytes = this.getChunkBytes(value); - merkleizeInto(merkleBytes, this.maxChunkCount, this.tempRoot, 0); + const blockBytes = this.getBlocksBytes(value); + merkleizeInto(blockBytes, this.maxChunkCount, this.tempRoot, 0); // compute active field bitvector const activeFields = BitArray.fromBoolArray([ ...this.fieldsEntries.map(({fieldName}) => value[fieldName] != null), @@ -365,18 +365,18 @@ export class StableContainerType>> e } } - protected getChunkBytes(struct: ValueOfFields): Uint8Array { - this.chunkBytesBuffer.fill(0); + protected getBlocksBytes(struct: ValueOfFields): Uint8Array { + this.blocksBuffer.fill(0); for (let i = 0; i < this.fieldsEntries.length; i++) { const {fieldName, fieldType, optional} = this.fieldsEntries[i]; if (optional && struct[fieldName] == null) { - this.chunkBytesBuffer.set(zeroHash(0), i * 32); + this.blocksBuffer.set(zeroHash(0), i * 32); } else { - fieldType.hashTreeRootInto(struct[fieldName], this.chunkBytesBuffer, i * 32); + fieldType.hashTreeRootInto(struct[fieldName], this.blocksBuffer, i * 32); } } - return this.chunkBytesBuffer; + return this.blocksBuffer; } // Proofs diff --git a/packages/ssz/src/type/union.ts b/packages/ssz/src/type/union.ts index 6a6117dd..b2cd72ad 100644 --- a/packages/ssz/src/type/union.ts +++ b/packages/ssz/src/type/union.ts @@ -92,7 +92,7 @@ export class UnionType[]> extends CompositeType< this.minSize = 1 + Math.min(...minLens); this.maxSize = 1 + Math.max(...maxLens); this.maxSelector = this.types.length - 1; - this.chunkBytesBuffer = new Uint8Array(32); + this.blocksBuffer = new Uint8Array(32); } static named[]>(types: Types, opts: Require): UnionType { @@ -190,9 +190,9 @@ export class UnionType[]> extends CompositeType< merkleizeInto(this.mixInLengthChunkBytes, chunkCount, output, offset); } - protected getChunkBytes(value: ValueOfTypes): Uint8Array { - this.types[value.selector].hashTreeRootInto(value.value, this.chunkBytesBuffer, 0); - return this.chunkBytesBuffer; + protected getBlocksBytes(value: ValueOfTypes): Uint8Array { + this.types[value.selector].hashTreeRootInto(value.value, this.blocksBuffer, 0); + return this.blocksBuffer; } // Proofs diff --git a/packages/ssz/src/type/vectorBasic.ts b/packages/ssz/src/type/vectorBasic.ts index bb189044..9660b20a 100644 --- a/packages/ssz/src/type/vectorBasic.ts +++ b/packages/ssz/src/type/vectorBasic.ts @@ -60,7 +60,7 @@ export class VectorBasicType> this.maxSize = this.fixedSize; this.defaultLen = length; // pad 1 chunk if maxChunkCount is not even - this.chunkBytesBuffer = new Uint8Array( + this.blocksBuffer = new Uint8Array( this.maxChunkCount % 2 === 1 ? this.maxChunkCount * 32 + 32 : this.maxChunkCount * 32 ); } @@ -150,13 +150,13 @@ export class VectorBasicType> // Merkleization - protected getChunkBytes(value: ValueOf[]): Uint8Array { - const uint8Array = this.chunkBytesBuffer.subarray(0, this.fixedSize); + protected getBlocksBytes(value: ValueOf[]): Uint8Array { + const uint8Array = this.blocksBuffer.subarray(0, this.fixedSize); const dataView = new DataView(uint8Array.buffer, uint8Array.byteOffset, uint8Array.byteLength); value_serializeToBytesArrayBasic(this.elementType, this.length, {uint8Array, dataView}, 0, value); // remaining bytes from this.fixedSize to this.chunkBytesBuffer.length must be zeroed - return this.chunkBytesBuffer; + return this.blocksBuffer; } // JSON: inherited from ArrayType diff --git a/packages/ssz/src/type/vectorComposite.ts b/packages/ssz/src/type/vectorComposite.ts index 28990c43..4cff0cb9 100644 --- a/packages/ssz/src/type/vectorComposite.ts +++ b/packages/ssz/src/type/vectorComposite.ts @@ -65,7 +65,7 @@ export class VectorCompositeType< this.minSize = minSizeArrayComposite(elementType, length); this.maxSize = maxSizeArrayComposite(elementType, length); this.defaultLen = length; - this.chunkBytesBuffer = + this.blocksBuffer = this.maxChunkCount % 2 === 1 ? new Uint8Array(this.maxChunkCount * 32 + 32) : new Uint8Array(this.maxChunkCount * 32); @@ -157,8 +157,8 @@ export class VectorCompositeType< // Merkleization - protected getChunkBytes(value: ValueOf[]): Uint8Array { - return value_getChunkBytesArrayComposite(this.elementType, this.length, value, this.chunkBytesBuffer); + protected getBlocksBytes(value: ValueOf[]): Uint8Array { + return value_getChunkBytesArrayComposite(this.elementType, this.length, value, this.blocksBuffer); } // JSON: inherited from ArrayType diff --git a/packages/ssz/test/spec/runValidTest.ts b/packages/ssz/test/spec/runValidTest.ts index 5ea219eb..eab25843 100644 --- a/packages/ssz/test/spec/runValidTest.ts +++ b/packages/ssz/test/spec/runValidTest.ts @@ -103,8 +103,8 @@ export function runValidSszTest(type: Type, testData: ValidTestCaseData if (type.isBasic) { console.log("Chunk Bytes Basic", toHexString(type.serialize(testDataValue))); } else { - const chunkBytes = (type as CompositeType)["getChunkBytes"](testDataValue); - console.log("Chunk Bytes Composite", toHexString(chunkBytes)); + const blocksBytes = (type as CompositeType)["getBlocksBytes"](testDataValue); + console.log("Chunk Bytes Composite", toHexString(blocksBytes)); } } From 025a0699efda7b214b846d2c88e2cfc51eb4acee Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Thu, 31 Oct 2024 14:00:42 +0700 Subject: [PATCH 112/113] refactor: merkleizeInto to merkleizeBlocksBytes --- .../persistent-merkle-tree/src/hasher/as-sha256.ts | 6 +++--- packages/persistent-merkle-tree/src/hasher/hashtree.ts | 6 +++--- packages/persistent-merkle-tree/src/hasher/index.ts | 9 +++++++-- packages/persistent-merkle-tree/src/hasher/noble.ts | 6 +++--- packages/persistent-merkle-tree/src/hasher/types.ts | 4 ++-- packages/persistent-merkle-tree/src/hasher/util.ts | 2 +- .../persistent-merkle-tree/test/unit/hasher.test.ts | 8 ++++---- packages/ssz/src/type/bitList.ts | 4 ++-- packages/ssz/src/type/byteList.ts | 4 ++-- packages/ssz/src/type/composite.ts | 4 ++-- packages/ssz/src/type/listBasic.ts | 4 ++-- packages/ssz/src/type/listComposite.ts | 4 ++-- packages/ssz/src/type/optional.ts | 4 ++-- packages/ssz/src/type/profile.ts | 4 ++-- packages/ssz/src/type/stableContainer.ts | 10 +++++----- packages/ssz/src/type/union.ts | 4 ++-- packages/ssz/test/perf/merkleize.test.ts | 8 ++++---- packages/ssz/test/unit/merkleize.test.ts | 8 ++++---- 18 files changed, 52 insertions(+), 47 deletions(-) diff --git a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts index 48d76304..817000c7 100644 --- a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts +++ b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts @@ -8,14 +8,14 @@ import { import type {Hasher} from "./types"; import {Node} from "../node"; import type {HashComputationLevel} from "../hashComputation"; -import {doDigestNLevel, doMerkleizeInto} from "./util"; +import {doDigestNLevel, doMerkleizeBlocksBytes} from "./util"; export const hasher: Hasher = { name: "as-sha256", digest64: digest2Bytes32, digest64HashObjects: digest64HashObjectsInto, - merkleizeInto(blocksBytes: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { - return doMerkleizeInto(blocksBytes, padFor, output, offset, hashInto); + merkleizeBlocksBytes(blocksBytes: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { + return doMerkleizeBlocksBytes(blocksBytes, padFor, output, offset, hashInto); }, digestNLevel(data: Uint8Array, nLevel: number): Uint8Array { return doDigestNLevel(data, nLevel, hashInto); diff --git a/packages/persistent-merkle-tree/src/hasher/hashtree.ts b/packages/persistent-merkle-tree/src/hasher/hashtree.ts index e96fecdf..b2624013 100644 --- a/packages/persistent-merkle-tree/src/hasher/hashtree.ts +++ b/packages/persistent-merkle-tree/src/hasher/hashtree.ts @@ -3,7 +3,7 @@ import {Hasher, HashObject} from "./types"; import {Node} from "../node"; import type {HashComputationLevel} from "../hashComputation"; import {byteArrayIntoHashObject} from "@chainsafe/as-sha256/lib/hashObject"; -import {doDigestNLevel, doMerkleizeInto} from "./util"; +import {doDigestNLevel, doMerkleizeBlocksBytes} from "./util"; /** * Best SIMD implementation is in 512 bits = 64 bytes @@ -40,8 +40,8 @@ export const hasher: Hasher = { hashInto(hash64Input, hash64Output); byteArrayIntoHashObject(hash64Output, 0, parent); }, - merkleizeInto(blocksBytes: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { - return doMerkleizeInto(blocksBytes, padFor, output, offset, hashInto); + merkleizeBlocksBytes(blocksBytes: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { + return doMerkleizeBlocksBytes(blocksBytes, padFor, output, offset, hashInto); }, digestNLevel(data: Uint8Array, nLevel: number): Uint8Array { return doDigestNLevel(data, nLevel, hashInto); diff --git a/packages/persistent-merkle-tree/src/hasher/index.ts b/packages/persistent-merkle-tree/src/hasher/index.ts index 6093e5d8..9b7da978 100644 --- a/packages/persistent-merkle-tree/src/hasher/index.ts +++ b/packages/persistent-merkle-tree/src/hasher/index.ts @@ -27,8 +27,13 @@ export function digestNLevel(data: Uint8Array, nLevel: number): Uint8Array { return hasher.digestNLevel(data, nLevel); } -export function merkleizeInto(blocksBytes: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { - hasher.merkleizeInto(blocksBytes, padFor, output, offset); +export function merkleizeBlocksBytes( + blocksBytes: Uint8Array, + padFor: number, + output: Uint8Array, + offset: number +): void { + hasher.merkleizeBlocksBytes(blocksBytes, padFor, output, offset); } export function executeHashComputations(hashComputations: HashComputationLevel[]): void { diff --git a/packages/persistent-merkle-tree/src/hasher/noble.ts b/packages/persistent-merkle-tree/src/hasher/noble.ts index 43c40aa6..e37d0b5a 100644 --- a/packages/persistent-merkle-tree/src/hasher/noble.ts +++ b/packages/persistent-merkle-tree/src/hasher/noble.ts @@ -1,7 +1,7 @@ import {sha256} from "@noble/hashes/sha256"; import {digest64HashObjects, byteArrayIntoHashObject} from "@chainsafe/as-sha256"; import type {Hasher} from "./types"; -import {doDigestNLevel, doMerkleizeInto, hashObjectToUint8Array} from "./util"; +import {doDigestNLevel, doMerkleizeBlocksBytes, hashObjectToUint8Array} from "./util"; const digest64 = (a: Uint8Array, b: Uint8Array): Uint8Array => sha256.create().update(a).update(b).digest(); const hashInto = (input: Uint8Array, output: Uint8Array): void => { @@ -28,8 +28,8 @@ export const hasher: Hasher = { digest64HashObjects: (left, right, parent) => { byteArrayIntoHashObject(digest64(hashObjectToUint8Array(left), hashObjectToUint8Array(right)), 0, parent); }, - merkleizeInto(blocksBytes: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { - return doMerkleizeInto(blocksBytes, padFor, output, offset, hashInto); + merkleizeBlocksBytes(blocksBytes: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { + return doMerkleizeBlocksBytes(blocksBytes, padFor, output, offset, hashInto); }, digestNLevel(data: Uint8Array, nLevel: number): Uint8Array { return doDigestNLevel(data, nLevel, hashInto); diff --git a/packages/persistent-merkle-tree/src/hasher/types.ts b/packages/persistent-merkle-tree/src/hasher/types.ts index dcbd2a3d..92fb8436 100644 --- a/packages/persistent-merkle-tree/src/hasher/types.ts +++ b/packages/persistent-merkle-tree/src/hasher/types.ts @@ -15,11 +15,11 @@ export type Hasher = { */ digest64HashObjects(left: HashObject, right: HashObject, parent: HashObject): void; /** - * Merkleize n SHA256 blocks, each block is 64 bytes + * Merkleize n SHA256 blocks in a single Uint8Array, each block is 64 bytes * padFor is maxChunkCount, use it to compute layers to hash * blocksBytes is mutated after the function */ - merkleizeInto(blocksBytes: Uint8Array, padFor: number, output: Uint8Array, offset: number): void; + merkleizeBlocksBytes(blocksBytes: Uint8Array, padFor: number, output: Uint8Array, offset: number): void; /** * Hash multiple chunks (1 chunk = 32 bytes) at multiple levels * With nLevel = 3, hash multiple of 256 bytes, return multiple of 32 bytes. diff --git a/packages/persistent-merkle-tree/src/hasher/util.ts b/packages/persistent-merkle-tree/src/hasher/util.ts index 8f259e14..302e168f 100644 --- a/packages/persistent-merkle-tree/src/hasher/util.ts +++ b/packages/persistent-merkle-tree/src/hasher/util.ts @@ -21,7 +21,7 @@ type HashIntoFn = (input: Uint8Array, output: Uint8Array) => void; * blocksBytes is unsafe because it's modified * The Uint8Array(32) will be written to output at offset */ -export function doMerkleizeInto( +export function doMerkleizeBlocksBytes( blocksBytes: Uint8Array, padFor: number, output: Uint8Array, diff --git a/packages/persistent-merkle-tree/test/unit/hasher.test.ts b/packages/persistent-merkle-tree/test/unit/hasher.test.ts index ee129fd0..cceaae1e 100644 --- a/packages/persistent-merkle-tree/test/unit/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/unit/hasher.test.ts @@ -89,24 +89,24 @@ describe("hasher.digestNLevel", function () { }); -describe("hasher.merkleizeInto", function () { +describe("hasher.merkleizeBlocksBytes", function () { const numNodes = [0, 1, 2, 3, 4, 5, 6, 7, 8]; for (const hasher of [nobleHasher, hashtreeHasher, asSha256Hasher]) { it (`${hasher.name} should throw error if not multiple of 64 bytes`, () => { const data = Buffer.alloc(63, 0); const output = Buffer.alloc(32); - expect(() => hasher.merkleizeInto(data, 2, output, 0)).to.throw("Invalid input length"); + expect(() => hasher.merkleizeBlocksBytes(data, 2, output, 0)).to.throw("Invalid input length"); }); for (const numNode of numNodes) { - it(`${hasher.name}.merkleizeInto for ${numNode} nodes`, () => { + it(`${hasher.name}.merkleizeBlocksBytes for ${numNode} nodes`, () => { const nodes = Array.from({length: numNode}, (_, i) => LeafNode.fromRoot(Buffer.alloc(32, i))); const data = Buffer.concat(nodes.map((node) => node.root)); const output = Buffer.alloc(32); const chunkCount = Math.max(numNode, 1); const padData = numNode % 2 === 1 ? Buffer.concat([data, zeroHash(0)]) : data; - hasher.merkleizeInto(padData, chunkCount, output, 0); + hasher.merkleizeBlocksBytes(padData, chunkCount, output, 0); const depth = Math.ceil(Math.log2(chunkCount)); const root = subtreeFillToContents(nodes, depth).root; expectEqualHex(output, root); diff --git a/packages/ssz/src/type/bitList.ts b/packages/ssz/src/type/bitList.ts index ba1c419a..fec09c8f 100644 --- a/packages/ssz/src/type/bitList.ts +++ b/packages/ssz/src/type/bitList.ts @@ -1,7 +1,7 @@ import {allocUnsafe} from "@chainsafe/as-sha256"; import { getNodesAtDepth, - merkleizeInto, + merkleizeBlocksBytes, Node, packedNodeRootsToBytes, packedRootsBytesToNode, @@ -125,7 +125,7 @@ export class BitListType extends BitArrayType { this.mixInLengthBuffer.writeUIntLE(value.bitLen, 32, 6); // one for hashTreeRoot(value), one for length const chunkCount = 2; - merkleizeInto(this.mixInLengthChunkBytes, chunkCount, output, offset); + merkleizeBlocksBytes(this.mixInLengthChunkBytes, chunkCount, output, offset); } // Proofs: inherited from BitArrayType diff --git a/packages/ssz/src/type/byteList.ts b/packages/ssz/src/type/byteList.ts index 53c46d39..c8893e8d 100644 --- a/packages/ssz/src/type/byteList.ts +++ b/packages/ssz/src/type/byteList.ts @@ -4,7 +4,7 @@ import { Node, packedNodeRootsToBytes, packedRootsBytesToNode, - merkleizeInto, + merkleizeBlocksBytes, } from "@chainsafe/persistent-merkle-tree"; import {maxChunksToDepth} from "../util/merkleize"; import {Require} from "../util/types"; @@ -112,7 +112,7 @@ export class ByteListType extends ByteArrayType { this.mixInLengthBuffer.writeUIntLE(value.length, 32, 6); // one for hashTreeRoot(value), one for length const chunkCount = 2; - merkleizeInto(this.mixInLengthChunkBytes, chunkCount, output, offset); + merkleizeBlocksBytes(this.mixInLengthChunkBytes, chunkCount, output, offset); } // Proofs: inherited from BitArrayType diff --git a/packages/ssz/src/type/composite.ts b/packages/ssz/src/type/composite.ts index d5d1821e..ce70be4b 100644 --- a/packages/ssz/src/type/composite.ts +++ b/packages/ssz/src/type/composite.ts @@ -8,7 +8,7 @@ import { Proof, ProofType, Tree, - merkleizeInto, + merkleizeBlocksBytes, HashComputationLevel, } from "@chainsafe/persistent-merkle-tree"; import {byteArrayEquals} from "../util/byteArray"; @@ -239,7 +239,7 @@ export abstract class CompositeType extends Type { } const blocksBuffer = this.getBlocksBytes(value); - merkleizeInto(blocksBuffer, this.maxChunkCount, output, offset); + merkleizeBlocksBytes(blocksBuffer, this.maxChunkCount, output, offset); if (this.cachePermanentRootStruct) { cacheRoot(value as ValueWithCachedPermanentRoot, output, offset, safeCache); } diff --git a/packages/ssz/src/type/listBasic.ts b/packages/ssz/src/type/listBasic.ts index 69fc0670..940e6266 100644 --- a/packages/ssz/src/type/listBasic.ts +++ b/packages/ssz/src/type/listBasic.ts @@ -1,4 +1,4 @@ -import {HashComputationLevel, LeafNode, Node, Tree, merkleizeInto} from "@chainsafe/persistent-merkle-tree"; +import {HashComputationLevel, LeafNode, Node, Tree, merkleizeBlocksBytes} from "@chainsafe/persistent-merkle-tree"; import {ValueOf} from "./abstract"; import {BasicType} from "./basic"; import {ByteViews} from "./composite"; @@ -198,7 +198,7 @@ export class ListBasicType> this.mixInLengthBuffer.writeUIntLE(value.length, 32, 6); // one for hashTreeRoot(value), one for length const chunkCount = 2; - merkleizeInto(this.mixInLengthChunkBytes, chunkCount, output, offset); + merkleizeBlocksBytes(this.mixInLengthChunkBytes, chunkCount, output, offset); if (this.cachePermanentRootStruct) { cacheRoot(value as ValueWithCachedPermanentRoot, output, offset, safeCache); diff --git a/packages/ssz/src/type/listComposite.ts b/packages/ssz/src/type/listComposite.ts index 38e7a6e5..5e5c760f 100644 --- a/packages/ssz/src/type/listComposite.ts +++ b/packages/ssz/src/type/listComposite.ts @@ -1,4 +1,4 @@ -import {HashComputationLevel, Node, Tree, merkleizeInto} from "@chainsafe/persistent-merkle-tree"; +import {HashComputationLevel, Node, Tree, merkleizeBlocksBytes} from "@chainsafe/persistent-merkle-tree"; import {cacheRoot, maxChunksToDepth, symbolCachedPermanentRoot, ValueWithCachedPermanentRoot} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; @@ -205,7 +205,7 @@ export class ListCompositeType< this.mixInLengthBuffer.writeUIntLE(value.length, 32, 6); // one for hashTreeRoot(value), one for length const chunkCount = 2; - merkleizeInto(this.mixInLengthChunkBytes, chunkCount, output, offset); + merkleizeBlocksBytes(this.mixInLengthChunkBytes, chunkCount, output, offset); if (this.cachePermanentRootStruct) { cacheRoot(value as ValueWithCachedPermanentRoot, output, offset, safeCache); diff --git a/packages/ssz/src/type/optional.ts b/packages/ssz/src/type/optional.ts index 7e33553e..a81fd6ed 100644 --- a/packages/ssz/src/type/optional.ts +++ b/packages/ssz/src/type/optional.ts @@ -1,7 +1,7 @@ import { concatGindices, Gindex, - merkleizeInto, + merkleizeBlocksBytes, Node, Tree, zeroNode, @@ -190,7 +190,7 @@ export class OptionalType> extends CompositeTy this.mixInLengthBuffer.writeUIntLE(selector, 32, 6); // one for hashTreeRoot(value), one for selector const chunkCount = 2; - merkleizeInto(this.mixInLengthChunkBytes, chunkCount, output, offset); + merkleizeBlocksBytes(this.mixInLengthChunkBytes, chunkCount, output, offset); } protected getBlocksBytes(value: ValueOfType): Uint8Array { diff --git a/packages/ssz/src/type/profile.ts b/packages/ssz/src/type/profile.ts index e72c2a44..5e511a17 100644 --- a/packages/ssz/src/type/profile.ts +++ b/packages/ssz/src/type/profile.ts @@ -6,7 +6,7 @@ import { Gindex, toGindex, concatGindices, - merkleizeInto, + merkleizeBlocksBytes, getNode, BranchNode, zeroHash, @@ -379,7 +379,7 @@ export class ProfileType>> extends C } const blocksBytes = this.getBlocksBytes(value); - merkleizeInto(blocksBytes, this.maxChunkCount, this.tempRoot, 0); + merkleizeBlocksBytes(blocksBytes, this.maxChunkCount, this.tempRoot, 0); mixInActiveFields(this.tempRoot, this.activeFields, output, offset); if (this.cachePermanentRootStruct) { diff --git a/packages/ssz/src/type/stableContainer.ts b/packages/ssz/src/type/stableContainer.ts index e9dad622..5be2c3c2 100644 --- a/packages/ssz/src/type/stableContainer.ts +++ b/packages/ssz/src/type/stableContainer.ts @@ -11,7 +11,7 @@ import { getNode, zeroNode, zeroHash, - merkleizeInto, + merkleizeBlocksBytes, countToDepth, getNodeH, setNode, @@ -352,7 +352,7 @@ export class StableContainerType>> e } const blockBytes = this.getBlocksBytes(value); - merkleizeInto(blockBytes, this.maxChunkCount, this.tempRoot, 0); + merkleizeBlocksBytes(blockBytes, this.maxChunkCount, this.tempRoot, 0); // compute active field bitvector const activeFields = BitArray.fromBoolArray([ ...this.fieldsEntries.map(({fieldName}) => value[fieldName] != null), @@ -829,12 +829,12 @@ export function mixInActiveFields(root: Uint8Array, activeFields: BitArray, outp activeFieldsSingleChunk.set(activeFields.uint8Array); // 1 chunk for root, 1 chunk for activeFields const chunkCount = 2; - merkleizeInto(mixInActiveFieldsChunkBytes, chunkCount, output, offset); + merkleizeBlocksBytes(mixInActiveFieldsChunkBytes, chunkCount, output, offset); return; } const chunkCount = Math.ceil(activeFields.uint8Array.length / 32); - merkleizeInto(activeFields.uint8Array, chunkCount, activeFieldsSingleChunk, 0); + merkleizeBlocksBytes(activeFields.uint8Array, chunkCount, activeFieldsSingleChunk, 0); // 1 chunk for root, 1 chunk for activeFields - merkleizeInto(mixInActiveFieldsChunkBytes, 2, output, offset); + merkleizeBlocksBytes(mixInActiveFieldsChunkBytes, 2, output, offset); } diff --git a/packages/ssz/src/type/union.ts b/packages/ssz/src/type/union.ts index b2cd72ad..1a20fcee 100644 --- a/packages/ssz/src/type/union.ts +++ b/packages/ssz/src/type/union.ts @@ -4,7 +4,7 @@ import { Gindex, Node, Tree, - merkleizeInto, + merkleizeBlocksBytes, getHashComputations, HashComputationLevel, } from "@chainsafe/persistent-merkle-tree"; @@ -187,7 +187,7 @@ export class UnionType[]> extends CompositeType< super.hashTreeRootInto(value, this.mixInLengthChunkBytes, 0); this.mixInLengthBuffer.writeUIntLE(value.selector, 32, 6); const chunkCount = 2; - merkleizeInto(this.mixInLengthChunkBytes, chunkCount, output, offset); + merkleizeBlocksBytes(this.mixInLengthChunkBytes, chunkCount, output, offset); } protected getBlocksBytes(value: ValueOfTypes): Uint8Array { diff --git a/packages/ssz/test/perf/merkleize.test.ts b/packages/ssz/test/perf/merkleize.test.ts index a900015a..70bc719b 100644 --- a/packages/ssz/test/perf/merkleize.test.ts +++ b/packages/ssz/test/perf/merkleize.test.ts @@ -1,6 +1,6 @@ import {itBench} from "@dapplion/benchmark"; import {bitLength, merkleize} from "../../src/util/merkleize"; -import {merkleizeInto} from "@chainsafe/persistent-merkle-tree"; +import {merkleizeBlocksBytes} from "@chainsafe/persistent-merkle-tree"; describe("merkleize / bitLength", () => { for (const n of [50, 8000, 250000]) { @@ -14,15 +14,15 @@ describe("merkleize / bitLength", () => { } }); -describe("merkleize vs persistent-merkle-tree merkleizeInto", () => { +describe("merkleize vs persistent-merkle-tree merkleizeBlocksBytes", () => { const chunkCounts = [4, 8, 16, 32]; for (const chunkCount of chunkCounts) { const rootArr = Array.from({length: chunkCount}, (_, i) => Buffer.alloc(32, i)); const roots = Buffer.concat(rootArr); const result = Buffer.alloc(32); - itBench(`merkleizeInto ${chunkCount} chunks`, () => { - merkleizeInto(roots, chunkCount, result, 0); + itBench(`merkleizeBlocksBytes ${chunkCount} chunks`, () => { + merkleizeBlocksBytes(roots, chunkCount, result, 0); }); itBench(`merkleize ${chunkCount} chunks`, () => { diff --git a/packages/ssz/test/unit/merkleize.test.ts b/packages/ssz/test/unit/merkleize.test.ts index d1e611b7..dccb33c6 100644 --- a/packages/ssz/test/unit/merkleize.test.ts +++ b/packages/ssz/test/unit/merkleize.test.ts @@ -1,6 +1,6 @@ import {expect} from "chai"; import {bitLength, maxChunksToDepth, merkleize, mixInLength, nextPowerOf2} from "../../src/util/merkleize"; -import {merkleizeInto, LeafNode, zeroHash} from "@chainsafe/persistent-merkle-tree"; +import {merkleizeBlocksBytes, LeafNode, zeroHash} from "@chainsafe/persistent-merkle-tree"; describe("util / merkleize / bitLength", () => { const bitLengthByIndex = [0, 1, 2, 2, 3, 3, 3, 3, 4, 4]; @@ -41,14 +41,14 @@ describe("util / merkleize / mixInLength", () => { mixInLengthBuffer.set(root, 0); mixInLengthBuffer.writeUIntLE(length, 32, 6); const finalRoot = new Uint8Array(32); - merkleizeInto(mixInLengthBuffer, 2, finalRoot, 0); + merkleizeBlocksBytes(mixInLengthBuffer, 2, finalRoot, 0); const expectedRoot = mixInLength(root, length); expect(finalRoot).to.be.deep.equal(expectedRoot); }); } }); -describe("merkleize should be equal to merkleizeInto of hasher", () => { +describe("merkleize should be equal to merkleizeBlocksBytes of hasher", () => { const numNodes = [0, 1, 2, 3, 4, 5, 6, 7, 8]; for (const numNode of numNodes) { it(`merkleize for ${numNode} nodes`, () => { @@ -58,7 +58,7 @@ describe("merkleize should be equal to merkleizeInto of hasher", () => { const roots = nodes.map((node) => node.root); const expectedRoot = Buffer.alloc(32); const chunkCount = Math.max(numNode, 1); - merkleizeInto(padData, chunkCount, expectedRoot, 0); + merkleizeBlocksBytes(padData, chunkCount, expectedRoot, 0); expect(merkleize(roots, chunkCount)).to.be.deep.equal(expectedRoot); }); } From fbbf91895f013c37544de73aaeb8f335dcdb08a0 Mon Sep 17 00:00:00 2001 From: twoeths Date: Wed, 6 Nov 2024 15:31:25 +0800 Subject: [PATCH 113/113] feat: merkleize block array (#420) * feat: implement merkleizeBlockArray * fix: support padFor=1 for merkleizeBlockArray * feat: add blockLimit param to merkleizeBlockArray() api * feat: implement ByteListType.hashTreeRoot() using merkleizeBlockArray() * fix: assign this.blocksBuffer in a more straightforward way * chore: refactor chunkBytes to blockBytes * fix: blockLimit usage in doMerkleizeBlockArray * feat: implement ListComposite.hashTreeRoot() using merkleizeBlockArray api --- .../src/hasher/as-sha256.ts | 8 +- .../src/hasher/hashtree.ts | 5 +- .../src/hasher/index.ts | 10 ++ .../src/hasher/noble.ts | 14 +- .../src/hasher/types.ts | 12 ++ .../persistent-merkle-tree/src/hasher/util.ts | 125 +++++++++++++++++- .../test/unit/hasher.test.ts | 53 ++++++++ packages/ssz/src/type/arrayComposite.ts | 20 +-- packages/ssz/src/type/bitArray.ts | 8 +- packages/ssz/src/type/bitList.ts | 12 +- packages/ssz/src/type/byteArray.ts | 14 +- packages/ssz/src/type/byteList.ts | 46 ++++++- packages/ssz/src/type/container.ts | 3 +- packages/ssz/src/type/listBasic.ts | 26 ++-- packages/ssz/src/type/listComposite.ts | 59 ++++++--- packages/ssz/src/type/optional.ts | 13 +- packages/ssz/src/type/profile.ts | 3 +- packages/ssz/src/type/stableContainer.ts | 15 +-- packages/ssz/src/type/union.ts | 13 +- packages/ssz/src/type/vectorBasic.ts | 7 +- packages/ssz/src/type/vectorComposite.ts | 9 +- packages/ssz/test/perf/merkleize.test.ts | 25 +++- .../test/unit/byType/byteList/value.test.ts | 28 ++++ .../test/unit/byType/container/tree.test.ts | 11 +- .../unit/byType/listComposite/tree.test.ts | 123 ++--------------- packages/ssz/test/unit/merkleize.test.ts | 30 ++++- 26 files changed, 463 insertions(+), 229 deletions(-) create mode 100644 packages/ssz/test/unit/byType/byteList/value.test.ts diff --git a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts index 817000c7..9b8f8565 100644 --- a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts +++ b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts @@ -8,7 +8,10 @@ import { import type {Hasher} from "./types"; import {Node} from "../node"; import type {HashComputationLevel} from "../hashComputation"; -import {doDigestNLevel, doMerkleizeBlocksBytes} from "./util"; +import {BLOCK_SIZE, doDigestNLevel, doMerkleizeBlockArray, doMerkleizeBlocksBytes} from "./util"; + +/** hashInto() function of as-sha256 loop through every 256 bytes */ +const buffer = new Uint8Array(4 * BLOCK_SIZE); export const hasher: Hasher = { name: "as-sha256", @@ -17,6 +20,9 @@ export const hasher: Hasher = { merkleizeBlocksBytes(blocksBytes: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { return doMerkleizeBlocksBytes(blocksBytes, padFor, output, offset, hashInto); }, + merkleizeBlockArray(blocks, blockLimit, padFor, output, offset) { + return doMerkleizeBlockArray(blocks, blockLimit, padFor, output, offset, hashInto, buffer); + }, digestNLevel(data: Uint8Array, nLevel: number): Uint8Array { return doDigestNLevel(data, nLevel, hashInto); }, diff --git a/packages/persistent-merkle-tree/src/hasher/hashtree.ts b/packages/persistent-merkle-tree/src/hasher/hashtree.ts index b2624013..9d04eb07 100644 --- a/packages/persistent-merkle-tree/src/hasher/hashtree.ts +++ b/packages/persistent-merkle-tree/src/hasher/hashtree.ts @@ -3,7 +3,7 @@ import {Hasher, HashObject} from "./types"; import {Node} from "../node"; import type {HashComputationLevel} from "../hashComputation"; import {byteArrayIntoHashObject} from "@chainsafe/as-sha256/lib/hashObject"; -import {doDigestNLevel, doMerkleizeBlocksBytes} from "./util"; +import {doDigestNLevel, doMerkleizeBlockArray, doMerkleizeBlocksBytes} from "./util"; /** * Best SIMD implementation is in 512 bits = 64 bytes @@ -43,6 +43,9 @@ export const hasher: Hasher = { merkleizeBlocksBytes(blocksBytes: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { return doMerkleizeBlocksBytes(blocksBytes, padFor, output, offset, hashInto); }, + merkleizeBlockArray(blocks, blockLimit, padFor, output, offset) { + return doMerkleizeBlockArray(blocks, blockLimit, padFor, output, offset, hashInto, uint8Input); + }, digestNLevel(data: Uint8Array, nLevel: number): Uint8Array { return doDigestNLevel(data, nLevel, hashInto); }, diff --git a/packages/persistent-merkle-tree/src/hasher/index.ts b/packages/persistent-merkle-tree/src/hasher/index.ts index 9b7da978..414ee703 100644 --- a/packages/persistent-merkle-tree/src/hasher/index.ts +++ b/packages/persistent-merkle-tree/src/hasher/index.ts @@ -36,6 +36,16 @@ export function merkleizeBlocksBytes( hasher.merkleizeBlocksBytes(blocksBytes, padFor, output, offset); } +export function merkleizeBlockArray( + blocks: Uint8Array[], + blockLimit: number, + padFor: number, + output: Uint8Array, + offset: number +): void { + hasher.merkleizeBlockArray(blocks, blockLimit, padFor, output, offset); +} + export function executeHashComputations(hashComputations: HashComputationLevel[]): void { hasher.executeHashComputations(hashComputations); } diff --git a/packages/persistent-merkle-tree/src/hasher/noble.ts b/packages/persistent-merkle-tree/src/hasher/noble.ts index e37d0b5a..e4a98bc4 100644 --- a/packages/persistent-merkle-tree/src/hasher/noble.ts +++ b/packages/persistent-merkle-tree/src/hasher/noble.ts @@ -1,7 +1,13 @@ import {sha256} from "@noble/hashes/sha256"; import {digest64HashObjects, byteArrayIntoHashObject} from "@chainsafe/as-sha256"; import type {Hasher} from "./types"; -import {doDigestNLevel, doMerkleizeBlocksBytes, hashObjectToUint8Array} from "./util"; +import { + BLOCK_SIZE, + doDigestNLevel, + doMerkleizeBlockArray, + doMerkleizeBlocksBytes, + hashObjectToUint8Array, +} from "./util"; const digest64 = (a: Uint8Array, b: Uint8Array): Uint8Array => sha256.create().update(a).update(b).digest(); const hashInto = (input: Uint8Array, output: Uint8Array): void => { @@ -22,6 +28,9 @@ const hashInto = (input: Uint8Array, output: Uint8Array): void => { } }; +/** should be multiple of 64, make it the same to as-sha256 */ +const buffer = new Uint8Array(4 * BLOCK_SIZE); + export const hasher: Hasher = { name: "noble", digest64, @@ -31,6 +40,9 @@ export const hasher: Hasher = { merkleizeBlocksBytes(blocksBytes: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { return doMerkleizeBlocksBytes(blocksBytes, padFor, output, offset, hashInto); }, + merkleizeBlockArray(blocks, blockLimit, padFor, output, offset) { + return doMerkleizeBlockArray(blocks, blockLimit, padFor, output, offset, hashInto, buffer); + }, digestNLevel(data: Uint8Array, nLevel: number): Uint8Array { return doDigestNLevel(data, nLevel, hashInto); }, diff --git a/packages/persistent-merkle-tree/src/hasher/types.ts b/packages/persistent-merkle-tree/src/hasher/types.ts index 92fb8436..dc1b6289 100644 --- a/packages/persistent-merkle-tree/src/hasher/types.ts +++ b/packages/persistent-merkle-tree/src/hasher/types.ts @@ -20,6 +20,18 @@ export type Hasher = { * blocksBytes is mutated after the function */ merkleizeBlocksBytes(blocksBytes: Uint8Array, padFor: number, output: Uint8Array, offset: number): void; + /** + * Merkleize n SHA256 blocks, each is 64 bytes Uint8Array + * padFor is maxChunkCount, use it to compute layers to hash + * blocks are mutated after the function + */ + merkleizeBlockArray( + blocks: Uint8Array[], + blockLimit: number, + padFor: number, + output: Uint8Array, + offset: number + ): void; /** * Hash multiple chunks (1 chunk = 32 bytes) at multiple levels * With nLevel = 3, hash multiple of 256 bytes, return multiple of 32 bytes. diff --git a/packages/persistent-merkle-tree/src/hasher/util.ts b/packages/persistent-merkle-tree/src/hasher/util.ts index 302e168f..e51d785e 100644 --- a/packages/persistent-merkle-tree/src/hasher/util.ts +++ b/packages/persistent-merkle-tree/src/hasher/util.ts @@ -13,13 +13,15 @@ export function uint8ArrayToHashObject(byteArr: Uint8Array): HashObject { type HashIntoFn = (input: Uint8Array, output: Uint8Array) => void; +/** a SHA256 block is 64 bytes */ +export const BLOCK_SIZE = 64; + /** - * A SHA256 block is 64 bytes + * Merkleize multiple SHA256 blocks in a single Uint8Array into ${output} at ${offset} * - if padFor > 1 blocksBytes need to be multiple of 64 bytes. * - if padFor = 1, blocksBytes need to be at least 32 bytes * - if padFor = 0, throw error * blocksBytes is unsafe because it's modified - * The Uint8Array(32) will be written to output at offset */ export function doMerkleizeBlocksBytes( blocksBytes: Uint8Array, @@ -43,7 +45,7 @@ export function doMerkleizeBlocksBytes( } // if padFor = 1, only need 32 bytes - if (padFor > 1 && blocksBytes.length % 64 !== 0) { + if (padFor > 1 && blocksBytes.length % BLOCK_SIZE !== 0) { throw new Error( `Invalid input length, expect to be multiple of 64 bytes, got ${blocksBytes.length}, padFor=${padFor}` ); @@ -52,16 +54,16 @@ export function doMerkleizeBlocksBytes( let inputLength = blocksBytes.length; let outputLength = Math.floor(inputLength / 2); let bufferIn = blocksBytes; - // hash into the same buffer - for (let i = 0; i < layerCount; i++) { + // hash into the same buffer to save memory allocation + for (let layer = 0; layer < layerCount; layer++) { const bufferOut = blocksBytes.subarray(0, outputLength); hashInto(bufferIn, bufferOut); const chunkCount = Math.floor(outputLength / 32); - if (chunkCount % 2 === 1 && i < layerCount - 1) { + if (chunkCount % 2 === 1 && layer < layerCount - 1) { // extend to 1 more chunk inputLength = outputLength + 32; bufferIn = blocksBytes.subarray(0, inputLength); - bufferIn.set(zeroHash(i + 1), outputLength); + bufferIn.set(zeroHash(layer + 1), outputLength); } else { bufferIn = bufferOut; inputLength = outputLength; @@ -72,6 +74,115 @@ export function doMerkleizeBlocksBytes( output.set(bufferIn.subarray(0, 32), offset); } +/** + * Merkleize multiple SHA256 blocks into ${output} at ${offset} + * @param padFor is maxChunkCount, should be >= 2 + * @param blocks is unsafe because it's modified + * @param blockLimit number of blocks, should be <= blocks.length so that consumer can reuse memory + */ +export function doMerkleizeBlockArray( + blocks: Uint8Array[], + blockLimit: number, + padFor: number, + output: Uint8Array, + offset: number, + hashInto: HashIntoFn, + buffer: Uint8Array +): void { + if (padFor < 1) { + throw new Error(`Invalid padFor, expect to be at least 1, got ${padFor}`); + } + + if (blockLimit > blocks.length) { + throw new Error( + `Invalid blockLimit, expect to be less than or equal blocks.length ${blocks.length}, got ${blockLimit}` + ); + } + + const layerCount = Math.ceil(Math.log2(padFor)); + if (blockLimit === 0) { + output.set(zeroHash(layerCount), offset); + return; + } + + for (const block of blocks) { + if (block.length !== BLOCK_SIZE) { + throw new Error(`Invalid block length, expect to be 64 bytes, got ${block.length}`); + } + } + + // as-sha256 has a buffer of 4 * 64 bytes + // hashtree has a buffer of 16 * 64 bytes + if (buffer.length === 0 || buffer.length % (4 * BLOCK_SIZE) !== 0) { + throw new Error(`Invalid buffer length, expect to be multiple of 64 bytes, got ${buffer.length}`); + } + + // batchSize is 4 for as-sha256, 16 for hashtree + const batchSize = Math.floor(buffer.length / BLOCK_SIZE); + const halfBatchSize = Math.floor(batchSize / 2); + let bufferIn = buffer; + // hash into the same buffer + let bufferOut = buffer.subarray(0, halfBatchSize * BLOCK_SIZE); + // ignore remaining blocks + let blockCount = blockLimit; + // hash into the same blocks to save memory allocation + for (let layer = 0; layer < layerCount; layer++) { + let outBlockIndex = 0; + const sameLayerLoop = Math.floor(blockCount / batchSize); + for (let i = 0; i < sameLayerLoop; i++) { + // populate bufferIn + for (let j = 0; j < batchSize; j++) { + const blockIndex = i * batchSize + j; + bufferIn.set(blocks[blockIndex], j * BLOCK_SIZE); + } + + // hash into bufferOut + hashInto(bufferIn, bufferOut); + + // copy bufferOut to blocks, bufferOut.len = halfBatchSize * BLOCK_SIZE + for (let j = 0; j < halfBatchSize; j++) { + blocks[outBlockIndex].set(bufferOut.subarray(j * BLOCK_SIZE, (j + 1) * BLOCK_SIZE)); + outBlockIndex++; + } + } + + // remaining blocks + const remainingBlocks = blockCount % batchSize; + bufferIn = buffer.subarray(0, remainingBlocks * BLOCK_SIZE); + bufferOut = buffer.subarray(0, Math.floor(bufferIn.length / 2)); + + // populate bufferIn + for (let blockIndex = Math.floor(blockCount / batchSize) * batchSize; blockIndex < blockCount; blockIndex++) { + bufferIn.set(blocks[blockIndex], (blockIndex % batchSize) * BLOCK_SIZE); + } + + // hash into bufferOut + hashInto(bufferIn, bufferOut); + + // copy bufferOut to blocks, note that bufferOut.len may not be divisible by BLOCK_SIZE + for (let j = 0; j < Math.floor(bufferOut.length / BLOCK_SIZE); j++) { + blocks[outBlockIndex].set(bufferOut.subarray(j * BLOCK_SIZE, (j + 1) * BLOCK_SIZE)); + outBlockIndex++; + } + + if (bufferOut.length % BLOCK_SIZE !== 0) { + // set the last 32 bytes of bufferOut + blocks[outBlockIndex].set(bufferOut.subarray(bufferOut.length - 32, bufferOut.length), 0); + // add zeroHash + blocks[outBlockIndex].set(zeroHash(layer + 1), 32); + outBlockIndex++; + } + + // end of layer, update blockCount, bufferIn, bufferOut + blockCount = outBlockIndex; + bufferIn = buffer.subarray(0, blockCount * BLOCK_SIZE); + bufferOut = buffer.subarray(0, Math.floor(bufferIn.length / 2)); + } + + // the end result stays in blocks[0] + output.set(blocks[0].subarray(0, 32), offset); +} + /** * Input data is unsafe because it's modified * given nLevel = 3 diff --git a/packages/persistent-merkle-tree/test/unit/hasher.test.ts b/packages/persistent-merkle-tree/test/unit/hasher.test.ts index cceaae1e..6205bd56 100644 --- a/packages/persistent-merkle-tree/test/unit/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/unit/hasher.test.ts @@ -115,3 +115,56 @@ describe("hasher.merkleizeBlocksBytes", function () { } }); +/** + * The same to the previous test, but using the merkleizeBlockArray method + */ +describe("hasher.merkleizeBlockArray", function () { + for (const hasher of [nobleHasher, hashtreeHasher, asSha256Hasher]) { + it (`${hasher.name} should throw error if invalid blockLimit`, () => { + const data = Buffer.alloc(64, 0); + const output = Buffer.alloc(32); + expect(() => hasher.merkleizeBlockArray([data], 2, 2, output, 0)).to.throw("Invalid blockLimit, expect to be less than or equal blocks.length 1, got 2"); + }); + + it (`${hasher.name} should throw error if not multiple of 64 bytes`, () => { + const data = Buffer.alloc(63, 0); + const output = Buffer.alloc(32); + expect(() => hasher.merkleizeBlockArray([data], 1, 2, output, 0)).to.throw("Invalid block length, expect to be 64 bytes, got 63"); + }); + + it (`${hasher.name} should throw error if chunkCount < 1`, () => { + const data = Buffer.alloc(64, 0); + const output = Buffer.alloc(32); + const chunkCount = 0; + expect(() => hasher.merkleizeBlockArray([data], 1, chunkCount, output, 0)).to.throw("Invalid padFor, expect to be at least 1, got 0"); + }); + + // hashtree has a buffer of 16 * 64 bytes = 32 nodes + const numNodes = [64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79]; + for (const numNode of numNodes) { + it(`${hasher.name}.merkleizeBlockArray for ${numNode} nodes`, () => { + + const nodes = Array.from({length: numNode}, (_, i) => LeafNode.fromRoot(Buffer.alloc(32, i))); + const data = Buffer.concat(nodes.map((node) => node.root)); + const output = Buffer.alloc(32); + // depth of 79 nodes are 7, make it 10 to test the padding + const chunkCount = Math.max(numNode, 10); + const padData = numNode % 2 === 1 ? Buffer.concat([data, zeroHash(0)]) : data; + expect(padData.length % 64).to.equal(0); + const blocks: Uint8Array[] = []; + for (let i = 0; i < padData.length; i += 64) { + blocks.push(padData.slice(i, i + 64)); + } + const blockLimit = blocks.length; + // should be able to run with above blocks, however add some redundant blocks similar to the consumer + blocks.push(Buffer.alloc(64, 1)); + blocks.push(Buffer.alloc(64, 2)); + hasher.merkleizeBlockArray(blocks, blockLimit, chunkCount, output, 0); + const depth = Math.ceil(Math.log2(chunkCount)); + const root = subtreeFillToContents(nodes, depth).root; + expectEqualHex(output, root); + }); + } + } +}); + diff --git a/packages/ssz/src/type/arrayComposite.ts b/packages/ssz/src/type/arrayComposite.ts index 986b0e0a..d77e89dc 100644 --- a/packages/ssz/src/type/arrayComposite.ts +++ b/packages/ssz/src/type/arrayComposite.ts @@ -211,29 +211,29 @@ export function tree_deserializeFromBytesArrayComposite>( +export function value_getBlocksBytesArrayComposite>( elementType: ElementType, length: number, value: ValueOf[], - chunkBytesBuffer: Uint8Array + blocksBuffer: Uint8Array ): Uint8Array { - const isOddChunk = length % 2 === 1; - const chunkBytesLen = isOddChunk ? length * 32 + 32 : length * 32; - if (chunkBytesLen > chunkBytesBuffer.length) { - throw new Error(`chunkBytesBuffer is too small: ${chunkBytesBuffer.length} < ${chunkBytesLen}`); + const blockBytesLen = Math.ceil(length / 2) * 64; + if (blockBytesLen > blocksBuffer.length) { + throw new Error(`blocksBuffer is too small: ${blocksBuffer.length} < ${blockBytesLen}`); } - const chunkBytes = chunkBytesBuffer.subarray(0, chunkBytesLen); + const blocksBytes = blocksBuffer.subarray(0, blockBytesLen); for (let i = 0; i < length; i++) { - elementType.hashTreeRootInto(value[i], chunkBytes, i * 32); + elementType.hashTreeRootInto(value[i], blocksBytes, i * 32); } + const isOddChunk = length % 2 === 1; if (isOddChunk) { // similar to append zeroHash(0) - chunkBytes.subarray(length * 32, chunkBytesLen).fill(0); + blocksBytes.subarray(length * 32, blockBytesLen).fill(0); } - return chunkBytes; + return blocksBytes; } function readOffsetsArrayComposite( diff --git a/packages/ssz/src/type/bitArray.ts b/packages/ssz/src/type/bitArray.ts index 27a61d2a..469cd131 100644 --- a/packages/ssz/src/type/bitArray.ts +++ b/packages/ssz/src/type/bitArray.ts @@ -4,7 +4,7 @@ import {CompositeType, LENGTH_GINDEX} from "./composite"; import {BitArray} from "../value/bitArray"; import {BitArrayTreeView} from "../view/bitArray"; import {BitArrayTreeViewDU} from "../viewDU/bitArray"; -import {getBlockBytes} from "./byteArray"; +import {getBlocksBytes} from "./byteArray"; /* eslint-disable @typescript-eslint/member-ordering */ @@ -44,11 +44,9 @@ export abstract class BitArrayType extends CompositeType this.blocksBuffer.length) { const chunkCount = Math.ceil(value.bitLen / 8 / 32); - const chunkBytes = chunkCount * 32; - // pad 1 chunk if maxChunkCount is not even - this.blocksBuffer = chunkCount % 2 === 1 ? new Uint8Array(chunkBytes + 32) : new Uint8Array(chunkBytes); + this.blocksBuffer = new Uint8Array(Math.ceil(chunkCount / 2) * 64); } - return getBlockBytes(value.uint8Array, this.blocksBuffer); + return getBlocksBytes(value.uint8Array, this.blocksBuffer); } // Proofs diff --git a/packages/ssz/src/type/bitList.ts b/packages/ssz/src/type/bitList.ts index fec09c8f..c343ac48 100644 --- a/packages/ssz/src/type/bitList.ts +++ b/packages/ssz/src/type/bitList.ts @@ -36,11 +36,11 @@ export class BitListType extends BitArrayType { readonly maxSize: number; readonly maxChunkCount: number; readonly isList = true; - readonly mixInLengthChunkBytes = new Uint8Array(64); + readonly mixInLengthBlockBytes = new Uint8Array(64); readonly mixInLengthBuffer = Buffer.from( - this.mixInLengthChunkBytes.buffer, - this.mixInLengthChunkBytes.byteOffset, - this.mixInLengthChunkBytes.byteLength + this.mixInLengthBlockBytes.buffer, + this.mixInLengthBlockBytes.byteOffset, + this.mixInLengthBlockBytes.byteLength ); constructor(readonly limitBits: number, opts?: BitListOptions) { @@ -120,12 +120,12 @@ export class BitListType extends BitArrayType { } hashTreeRootInto(value: BitArray, output: Uint8Array, offset: number): void { - super.hashTreeRootInto(value, this.mixInLengthChunkBytes, 0); + super.hashTreeRootInto(value, this.mixInLengthBlockBytes, 0); // mixInLength this.mixInLengthBuffer.writeUIntLE(value.bitLen, 32, 6); // one for hashTreeRoot(value), one for length const chunkCount = 2; - merkleizeBlocksBytes(this.mixInLengthChunkBytes, chunkCount, output, offset); + merkleizeBlocksBytes(this.mixInLengthBlockBytes, chunkCount, output, offset); } // Proofs: inherited from BitArrayType diff --git a/packages/ssz/src/type/byteArray.ts b/packages/ssz/src/type/byteArray.ts index 4a3edb19..fc67037b 100644 --- a/packages/ssz/src/type/byteArray.ts +++ b/packages/ssz/src/type/byteArray.ts @@ -93,11 +93,9 @@ export abstract class ByteArrayType extends CompositeType this.blocksBuffer.length) { const chunkCount = Math.ceil(value.length / 32); - const chunkBytes = chunkCount * 32; - // pad 1 chunk if maxChunkCount is not even - this.blocksBuffer = chunkCount % 2 === 1 ? new Uint8Array(chunkBytes + 32) : new Uint8Array(chunkBytes); + this.blocksBuffer = new Uint8Array(Math.ceil(chunkCount / 2) * 64); } - return getBlockBytes(value, this.blocksBuffer); + return getBlocksBytes(value, this.blocksBuffer); } // Proofs @@ -162,15 +160,15 @@ export abstract class ByteArrayType extends CompositeType blocksBuffer.length) { throw new Error(`data length ${value.length} exceeds blocksBuffer length ${blocksBuffer.length}`); } blocksBuffer.set(value); const valueLen = value.length; - const chunkByteLen = Math.ceil(valueLen / 64) * 64; + const blockByteLen = Math.ceil(valueLen / 64) * 64; // all padding bytes must be zero, this is similar to set zeroHash(0) - blocksBuffer.subarray(valueLen, chunkByteLen).fill(0); - return blocksBuffer.subarray(0, chunkByteLen); + blocksBuffer.subarray(valueLen, blockByteLen).fill(0); + return blocksBuffer.subarray(0, blockByteLen); } diff --git a/packages/ssz/src/type/byteList.ts b/packages/ssz/src/type/byteList.ts index c8893e8d..0ceaab7a 100644 --- a/packages/ssz/src/type/byteList.ts +++ b/packages/ssz/src/type/byteList.ts @@ -5,6 +5,7 @@ import { packedNodeRootsToBytes, packedRootsBytesToNode, merkleizeBlocksBytes, + merkleizeBlockArray, } from "@chainsafe/persistent-merkle-tree"; import {maxChunksToDepth} from "../util/merkleize"; import {Require} from "../util/types"; @@ -40,11 +41,13 @@ export class ByteListType extends ByteArrayType { readonly maxSize: number; readonly maxChunkCount: number; readonly isList = true; - readonly mixInLengthChunkBytes = new Uint8Array(64); + readonly blockArray: Uint8Array[] = []; + private blockBytesLen = 0; + readonly mixInLengthBlockBytes = new Uint8Array(64); readonly mixInLengthBuffer = Buffer.from( - this.mixInLengthChunkBytes.buffer, - this.mixInLengthChunkBytes.byteOffset, - this.mixInLengthChunkBytes.byteLength + this.mixInLengthBlockBytes.buffer, + this.mixInLengthBlockBytes.byteOffset, + this.mixInLengthBlockBytes.byteLength ); constructor(readonly limitBytes: number, opts?: ByteListOptions) { @@ -106,13 +109,44 @@ export class ByteListType extends ByteArrayType { return root; } + /** + * Use merkleizeBlockArray() instead of merkleizeBlocksBytes() to avoid big memory allocation + */ hashTreeRootInto(value: Uint8Array, output: Uint8Array, offset: number): void { - super.hashTreeRootInto(value, this.mixInLengthChunkBytes, 0); + // should not call super.hashTreeRoot() here + // use merkleizeBlockArray() instead of merkleizeBlocksBytes() to avoid big memory allocation + // reallocate this.blockArray if needed + if (value.length > this.blockBytesLen) { + const newBlockCount = Math.ceil(value.length / 64); + // this.blockBytesLen should be a multiple of 64 + const oldBlockCount = Math.ceil(this.blockBytesLen / 64); + const blockDiff = newBlockCount - oldBlockCount; + const newBlocksBytes = new Uint8Array(blockDiff * 64); + for (let i = 0; i < blockDiff; i++) { + this.blockArray.push(newBlocksBytes.subarray(i * 64, (i + 1) * 64)); + this.blockBytesLen += 64; + } + } + + // populate this.blockArray + for (let i = 0; i < value.length; i += 64) { + const block = this.blockArray[i / 64]; + // zero out the last block if it's over value.length + if (i + 64 > value.length) { + block.fill(0); + } + block.set(value.subarray(i, Math.min(i + 64, value.length))); + } + + // compute hashTreeRoot + const blockLimit = Math.ceil(value.length / 64); + merkleizeBlockArray(this.blockArray, blockLimit, this.maxChunkCount, this.mixInLengthBlockBytes, 0); + // mixInLength this.mixInLengthBuffer.writeUIntLE(value.length, 32, 6); // one for hashTreeRoot(value), one for length const chunkCount = 2; - merkleizeBlocksBytes(this.mixInLengthChunkBytes, chunkCount, output, offset); + merkleizeBlocksBytes(this.mixInLengthBlockBytes, chunkCount, output, offset); } // Proofs: inherited from BitArrayType diff --git a/packages/ssz/src/type/container.ts b/packages/ssz/src/type/container.ts index ac3b5912..2d6505ea 100644 --- a/packages/ssz/src/type/container.ts +++ b/packages/ssz/src/type/container.ts @@ -131,8 +131,7 @@ export class ContainerType>> extends this.TreeView = opts?.getContainerTreeViewClass?.(this) ?? getContainerTreeViewClass(this); this.TreeViewDU = opts?.getContainerTreeViewDUClass?.(this) ?? getContainerTreeViewDUClass(this); const fieldBytes = this.fieldsEntries.length * 32; - const chunkBytes = Math.ceil(fieldBytes / 64) * 64; - this.blocksBuffer = new Uint8Array(chunkBytes); + this.blocksBuffer = new Uint8Array(Math.ceil(fieldBytes / 64) * 64); } static named>>( diff --git a/packages/ssz/src/type/listBasic.ts b/packages/ssz/src/type/listBasic.ts index 940e6266..9af933d7 100644 --- a/packages/ssz/src/type/listBasic.ts +++ b/packages/ssz/src/type/listBasic.ts @@ -47,11 +47,11 @@ export class ListBasicType> readonly maxSize: number; readonly isList = true; readonly isViewMutable = true; - readonly mixInLengthChunkBytes = new Uint8Array(64); + readonly mixInLengthBlockBytes = new Uint8Array(64); readonly mixInLengthBuffer = Buffer.from( - this.mixInLengthChunkBytes.buffer, - this.mixInLengthChunkBytes.byteOffset, - this.mixInLengthChunkBytes.byteLength + this.mixInLengthBlockBytes.buffer, + this.mixInLengthBlockBytes.byteOffset, + this.mixInLengthBlockBytes.byteLength ); protected readonly defaultLen = 0; @@ -193,12 +193,12 @@ export class ListBasicType> } } - super.hashTreeRootInto(value, this.mixInLengthChunkBytes, 0); + super.hashTreeRootInto(value, this.mixInLengthBlockBytes, 0); // mixInLength this.mixInLengthBuffer.writeUIntLE(value.length, 32, 6); // one for hashTreeRoot(value), one for length const chunkCount = 2; - merkleizeBlocksBytes(this.mixInLengthChunkBytes, chunkCount, output, offset); + merkleizeBlocksBytes(this.mixInLengthBlockBytes, chunkCount, output, offset); if (this.cachePermanentRootStruct) { cacheRoot(value as ValueWithCachedPermanentRoot, output, offset, safeCache); @@ -207,20 +207,20 @@ export class ListBasicType> protected getBlocksBytes(value: ValueOf[]): Uint8Array { const byteLen = this.value_serializedSize(value); - const chunkByteLen = Math.ceil(byteLen / 64) * 64; - // reallocate this.verkleBytes if needed + const blockByteLen = Math.ceil(byteLen / 64) * 64; + // reallocate this.blocksBuffer if needed if (byteLen > this.blocksBuffer.length) { // pad 1 chunk if maxChunkCount is not even - this.blocksBuffer = new Uint8Array(chunkByteLen); + this.blocksBuffer = new Uint8Array(blockByteLen); } - const chunkBytes = this.blocksBuffer.subarray(0, chunkByteLen); - const uint8Array = chunkBytes.subarray(0, byteLen); + const blockBytes = this.blocksBuffer.subarray(0, blockByteLen); + const uint8Array = blockBytes.subarray(0, byteLen); const dataView = new DataView(uint8Array.buffer, uint8Array.byteOffset, uint8Array.byteLength); value_serializeToBytesArrayBasic(this.elementType, value.length, {uint8Array, dataView}, 0, value); // all padding bytes must be zero, this is similar to set zeroHash(0) - this.blocksBuffer.subarray(byteLen, chunkByteLen).fill(0); - return chunkBytes; + this.blocksBuffer.subarray(byteLen, blockByteLen).fill(0); + return blockBytes; } // JSON: inherited from ArrayType diff --git a/packages/ssz/src/type/listComposite.ts b/packages/ssz/src/type/listComposite.ts index 5e5c760f..5487f700 100644 --- a/packages/ssz/src/type/listComposite.ts +++ b/packages/ssz/src/type/listComposite.ts @@ -1,4 +1,10 @@ -import {HashComputationLevel, Node, Tree, merkleizeBlocksBytes} from "@chainsafe/persistent-merkle-tree"; +import { + HashComputationLevel, + Node, + Tree, + merkleizeBlocksBytes, + merkleizeBlockArray, +} from "@chainsafe/persistent-merkle-tree"; import {cacheRoot, maxChunksToDepth, symbolCachedPermanentRoot, ValueWithCachedPermanentRoot} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; @@ -13,7 +19,6 @@ import { tree_deserializeFromBytesArrayComposite, tree_serializeToBytesArrayComposite, maxSizeArrayComposite, - value_getChunkBytesArrayComposite, } from "./arrayComposite"; import {ArrayCompositeType} from "../view/arrayComposite"; import {ListCompositeTreeView} from "../view/listComposite"; @@ -52,11 +57,12 @@ export class ListCompositeType< readonly maxSize: number; readonly isList = true; readonly isViewMutable = true; - readonly mixInLengthChunkBytes = new Uint8Array(64); + readonly blockArray: Uint8Array[] = []; + readonly mixInLengthBlockBytes = new Uint8Array(64); readonly mixInLengthBuffer = Buffer.from( - this.mixInLengthChunkBytes.buffer, - this.mixInLengthChunkBytes.byteOffset, - this.mixInLengthChunkBytes.byteLength + this.mixInLengthBlockBytes.buffer, + this.mixInLengthBlockBytes.byteOffset, + this.mixInLengthBlockBytes.byteLength ); protected readonly defaultLen = 0; @@ -200,25 +206,48 @@ export class ListCompositeType< } } - super.hashTreeRootInto(value, this.mixInLengthChunkBytes, 0); + // should not call super.hashTreeRootInto() here + // use merkleizeBlockArray() instead of merkleizeBlocksBytes() to avoid big memory allocation + // reallocate this.blockArray if needed + if (value.length > this.blockArray.length) { + const blockDiff = value.length - this.blockArray.length; + const newBlocksBytes = new Uint8Array(blockDiff * 64); + for (let i = 0; i < blockDiff; i++) { + this.blockArray.push(newBlocksBytes.subarray(i * 64, (i + 1) * 64)); + } + } + + // populate this.blockArray + for (let i = 0; i < value.length; i++) { + // 2 values share a block + const block = this.blockArray[Math.floor(i / 2)]; + const offset = i % 2 === 0 ? 0 : 32; + this.elementType.hashTreeRootInto(value[i], block, offset); + } + + const blockLimit = Math.ceil(value.length / 2); + // zero out the last block if needed + if (value.length % 2 === 1) { + this.blockArray[blockLimit - 1].fill(0, 32); + } + + // compute hashTreeRoot + merkleizeBlockArray(this.blockArray, blockLimit, this.maxChunkCount, this.mixInLengthBlockBytes, 0); + // mixInLength this.mixInLengthBuffer.writeUIntLE(value.length, 32, 6); // one for hashTreeRoot(value), one for length const chunkCount = 2; - merkleizeBlocksBytes(this.mixInLengthChunkBytes, chunkCount, output, offset); + merkleizeBlocksBytes(this.mixInLengthBlockBytes, chunkCount, output, offset); if (this.cachePermanentRootStruct) { cacheRoot(value as ValueWithCachedPermanentRoot, output, offset, safeCache); } } - protected getBlocksBytes(value: ValueOf[]): Uint8Array { - const byteLen = value.length * 32; - const blockByteLen = this.blocksBuffer.byteLength; - if (byteLen > blockByteLen) { - this.blocksBuffer = new Uint8Array(Math.ceil(byteLen / 64) * 64); - } - return value_getChunkBytesArrayComposite(this.elementType, value.length, value, this.blocksBuffer); + protected getBlocksBytes(): Uint8Array { + // we use merkleizeBlockArray for hashTreeRoot() computation + throw Error("getBlockBytes should not be called for ListCompositeType"); } // JSON: inherited from ArrayType diff --git a/packages/ssz/src/type/optional.ts b/packages/ssz/src/type/optional.ts index a81fd6ed..1473e6d7 100644 --- a/packages/ssz/src/type/optional.ts +++ b/packages/ssz/src/type/optional.ts @@ -48,11 +48,11 @@ export class OptionalType> extends CompositeTy readonly maxSize: number; readonly isList = true; readonly isViewMutable = true; - readonly mixInLengthChunkBytes = new Uint8Array(64); + readonly mixInLengthBlockBytes = new Uint8Array(64); readonly mixInLengthBuffer = Buffer.from( - this.mixInLengthChunkBytes.buffer, - this.mixInLengthChunkBytes.byteOffset, - this.mixInLengthChunkBytes.byteLength + this.mixInLengthBlockBytes.buffer, + this.mixInLengthBlockBytes.byteOffset, + this.mixInLengthBlockBytes.byteLength ); constructor(readonly elementType: ElementType, opts?: OptionalOpts) { @@ -66,6 +66,7 @@ export class OptionalType> extends CompositeTy this.minSize = 0; // Max size includes prepended 0x01 byte this.maxSize = elementType.maxSize + 1; + // maxChunkCount = 1 so this.blocksBuffer.length = 32 in this case this.blocksBuffer = new Uint8Array(32); } @@ -185,12 +186,12 @@ export class OptionalType> extends CompositeTy } hashTreeRootInto(value: ValueOfType, output: Uint8Array, offset: number): void { - super.hashTreeRootInto(value, this.mixInLengthChunkBytes, 0); + super.hashTreeRootInto(value, this.mixInLengthBlockBytes, 0); const selector = value === null ? 0 : 1; this.mixInLengthBuffer.writeUIntLE(selector, 32, 6); // one for hashTreeRoot(value), one for selector const chunkCount = 2; - merkleizeBlocksBytes(this.mixInLengthChunkBytes, chunkCount, output, offset); + merkleizeBlocksBytes(this.mixInLengthBlockBytes, chunkCount, output, offset); } protected getBlocksBytes(value: ValueOfType): Uint8Array { diff --git a/packages/ssz/src/type/profile.ts b/packages/ssz/src/type/profile.ts index 5e511a17..06b2cf76 100644 --- a/packages/ssz/src/type/profile.ts +++ b/packages/ssz/src/type/profile.ts @@ -158,8 +158,7 @@ export class ProfileType>> extends C this.TreeView = opts?.getProfileTreeViewClass?.(this) ?? getProfileTreeViewClass(this); this.TreeViewDU = opts?.getProfileTreeViewDUClass?.(this) ?? getProfileTreeViewDUClass(this); const fieldBytes = this.activeFields.bitLen * 32; - const chunkBytes = Math.ceil(fieldBytes / 64) * 64; - this.blocksBuffer = new Uint8Array(chunkBytes); + this.blocksBuffer = new Uint8Array(Math.ceil(fieldBytes / 64) * 64); } static named>>( diff --git a/packages/ssz/src/type/stableContainer.ts b/packages/ssz/src/type/stableContainer.ts index 5be2c3c2..b64ed331 100644 --- a/packages/ssz/src/type/stableContainer.ts +++ b/packages/ssz/src/type/stableContainer.ts @@ -150,8 +150,7 @@ export class StableContainerType>> e this.TreeView = opts?.getContainerTreeViewClass?.(this) ?? getContainerTreeViewClass(this); this.TreeViewDU = opts?.getContainerTreeViewDUClass?.(this) ?? getContainerTreeViewDUClass(this); const fieldBytes = this.fieldsEntries.length * 32; - const chunkBytes = Math.ceil(fieldBytes / 64) * 64; - this.blocksBuffer = new Uint8Array(chunkBytes); + this.blocksBuffer = new Uint8Array(Math.ceil(fieldBytes / 64) * 64); } static named>>( @@ -817,24 +816,24 @@ export function setActiveField(rootNode: Node, bitLen: number, fieldIndex: numbe return new BranchNode(rootNode.left, newActiveFieldsNode); } -// This is a global buffer to avoid creating a new one for each call to getChunkBytes -const mixInActiveFieldsChunkBytes = new Uint8Array(64); -const activeFieldsSingleChunk = mixInActiveFieldsChunkBytes.subarray(32); +// This is a global buffer to avoid creating a new one for each call to getBlocksBytes +const mixInActiveFieldsBlockBytes = new Uint8Array(64); +const activeFieldsSingleChunk = mixInActiveFieldsBlockBytes.subarray(32); export function mixInActiveFields(root: Uint8Array, activeFields: BitArray, output: Uint8Array, offset: number): void { // fast path for depth 1, the bitvector fits in one chunk - mixInActiveFieldsChunkBytes.set(root, 0); + mixInActiveFieldsBlockBytes.set(root, 0); if (activeFields.bitLen <= 256) { activeFieldsSingleChunk.fill(0); activeFieldsSingleChunk.set(activeFields.uint8Array); // 1 chunk for root, 1 chunk for activeFields const chunkCount = 2; - merkleizeBlocksBytes(mixInActiveFieldsChunkBytes, chunkCount, output, offset); + merkleizeBlocksBytes(mixInActiveFieldsBlockBytes, chunkCount, output, offset); return; } const chunkCount = Math.ceil(activeFields.uint8Array.length / 32); merkleizeBlocksBytes(activeFields.uint8Array, chunkCount, activeFieldsSingleChunk, 0); // 1 chunk for root, 1 chunk for activeFields - merkleizeBlocksBytes(mixInActiveFieldsChunkBytes, 2, output, offset); + merkleizeBlocksBytes(mixInActiveFieldsBlockBytes, 2, output, offset); } diff --git a/packages/ssz/src/type/union.ts b/packages/ssz/src/type/union.ts index 1a20fcee..908d9604 100644 --- a/packages/ssz/src/type/union.ts +++ b/packages/ssz/src/type/union.ts @@ -49,11 +49,11 @@ export class UnionType[]> extends CompositeType< readonly maxSize: number; readonly isList = true; readonly isViewMutable = true; - readonly mixInLengthChunkBytes = new Uint8Array(64); + readonly mixInLengthBlockBytes = new Uint8Array(64); readonly mixInLengthBuffer = Buffer.from( - this.mixInLengthChunkBytes.buffer, - this.mixInLengthChunkBytes.byteOffset, - this.mixInLengthChunkBytes.byteLength + this.mixInLengthBlockBytes.buffer, + this.mixInLengthBlockBytes.byteOffset, + this.mixInLengthBlockBytes.byteLength ); protected readonly maxSelector: number; @@ -92,6 +92,7 @@ export class UnionType[]> extends CompositeType< this.minSize = 1 + Math.min(...minLens); this.maxSize = 1 + Math.max(...maxLens); this.maxSelector = this.types.length - 1; + // maxChunkCount = 1 so this.blocksBuffer.length = 32 in this case this.blocksBuffer = new Uint8Array(32); } @@ -184,10 +185,10 @@ export class UnionType[]> extends CompositeType< } hashTreeRootInto(value: ValueOfTypes, output: Uint8Array, offset: number): void { - super.hashTreeRootInto(value, this.mixInLengthChunkBytes, 0); + super.hashTreeRootInto(value, this.mixInLengthBlockBytes, 0); this.mixInLengthBuffer.writeUIntLE(value.selector, 32, 6); const chunkCount = 2; - merkleizeBlocksBytes(this.mixInLengthChunkBytes, chunkCount, output, offset); + merkleizeBlocksBytes(this.mixInLengthBlockBytes, chunkCount, output, offset); } protected getBlocksBytes(value: ValueOfTypes): Uint8Array { diff --git a/packages/ssz/src/type/vectorBasic.ts b/packages/ssz/src/type/vectorBasic.ts index 9660b20a..0c528c96 100644 --- a/packages/ssz/src/type/vectorBasic.ts +++ b/packages/ssz/src/type/vectorBasic.ts @@ -59,10 +59,7 @@ export class VectorBasicType> this.minSize = this.fixedSize; this.maxSize = this.fixedSize; this.defaultLen = length; - // pad 1 chunk if maxChunkCount is not even - this.blocksBuffer = new Uint8Array( - this.maxChunkCount % 2 === 1 ? this.maxChunkCount * 32 + 32 : this.maxChunkCount * 32 - ); + this.blocksBuffer = new Uint8Array(Math.ceil(this.maxChunkCount / 2) * 64); } static named>( @@ -155,7 +152,7 @@ export class VectorBasicType> const dataView = new DataView(uint8Array.buffer, uint8Array.byteOffset, uint8Array.byteLength); value_serializeToBytesArrayBasic(this.elementType, this.length, {uint8Array, dataView}, 0, value); - // remaining bytes from this.fixedSize to this.chunkBytesBuffer.length must be zeroed + // remaining bytes from this.fixedSize to this.blocksBuffer.length must be zeroed return this.blocksBuffer; } diff --git a/packages/ssz/src/type/vectorComposite.ts b/packages/ssz/src/type/vectorComposite.ts index 4cff0cb9..908f4f9b 100644 --- a/packages/ssz/src/type/vectorComposite.ts +++ b/packages/ssz/src/type/vectorComposite.ts @@ -13,7 +13,7 @@ import { tree_serializeToBytesArrayComposite, maxSizeArrayComposite, minSizeArrayComposite, - value_getChunkBytesArrayComposite, + value_getBlocksBytesArrayComposite, } from "./arrayComposite"; import {ArrayCompositeType, ArrayCompositeTreeView} from "../view/arrayComposite"; import {ArrayCompositeTreeViewDU} from "../viewDU/arrayComposite"; @@ -65,10 +65,7 @@ export class VectorCompositeType< this.minSize = minSizeArrayComposite(elementType, length); this.maxSize = maxSizeArrayComposite(elementType, length); this.defaultLen = length; - this.blocksBuffer = - this.maxChunkCount % 2 === 1 - ? new Uint8Array(this.maxChunkCount * 32 + 32) - : new Uint8Array(this.maxChunkCount * 32); + this.blocksBuffer = new Uint8Array(Math.ceil(this.maxChunkCount / 2) * 64); } // eslint-disable-next-line @typescript-eslint/no-explicit-any @@ -158,7 +155,7 @@ export class VectorCompositeType< // Merkleization protected getBlocksBytes(value: ValueOf[]): Uint8Array { - return value_getChunkBytesArrayComposite(this.elementType, this.length, value, this.blocksBuffer); + return value_getBlocksBytesArrayComposite(this.elementType, this.length, value, this.blocksBuffer); } // JSON: inherited from ArrayType diff --git a/packages/ssz/test/perf/merkleize.test.ts b/packages/ssz/test/perf/merkleize.test.ts index 70bc719b..af70868e 100644 --- a/packages/ssz/test/perf/merkleize.test.ts +++ b/packages/ssz/test/perf/merkleize.test.ts @@ -1,6 +1,6 @@ import {itBench} from "@dapplion/benchmark"; import {bitLength, merkleize} from "../../src/util/merkleize"; -import {merkleizeBlocksBytes} from "@chainsafe/persistent-merkle-tree"; +import {merkleizeBlockArray, merkleizeBlocksBytes} from "@chainsafe/persistent-merkle-tree"; describe("merkleize / bitLength", () => { for (const n of [50, 8000, 250000]) { @@ -15,19 +15,32 @@ describe("merkleize / bitLength", () => { }); describe("merkleize vs persistent-merkle-tree merkleizeBlocksBytes", () => { - const chunkCounts = [4, 8, 16, 32]; + const chunkCounts = [32, 128, 512, 1024]; for (const chunkCount of chunkCounts) { const rootArr = Array.from({length: chunkCount}, (_, i) => Buffer.alloc(32, i)); - const roots = Buffer.concat(rootArr); + const blocksBytes = Buffer.concat(rootArr); + if (blocksBytes.length % 64 !== 0) { + throw new Error("blockBytes length must be a multiple of 64"); + } + const blockArray: Uint8Array[] = []; + for (let i = 0; i < blocksBytes.length; i += 64) { + blockArray.push(blocksBytes.slice(i, i + 64)); + } + const result = Buffer.alloc(32); - itBench(`merkleizeBlocksBytes ${chunkCount} chunks`, () => { - merkleizeBlocksBytes(roots, chunkCount, result, 0); - }); itBench(`merkleize ${chunkCount} chunks`, () => { merkleize(rootArr, chunkCount); }); + + itBench(`merkleizeBlocksBytes ${chunkCount} chunks`, () => { + merkleizeBlocksBytes(blocksBytes, chunkCount, result, 0); + }); + + itBench(`merkleizeBlockArray ${chunkCount} chunks`, () => { + merkleizeBlockArray(blockArray, blockArray.length, chunkCount, result, 0); + }); } }); diff --git a/packages/ssz/test/unit/byType/byteList/value.test.ts b/packages/ssz/test/unit/byType/byteList/value.test.ts new file mode 100644 index 00000000..0033443f --- /dev/null +++ b/packages/ssz/test/unit/byType/byteList/value.test.ts @@ -0,0 +1,28 @@ +import {expect} from "chai"; +import {ByteListType} from "../../../../src"; + +describe("ByteListValue", () => { + const type = new ByteListType(1024); + + it("should zero out the last sha256 block if it's over value.length", () => { + const value = Buffer.alloc(65, 1); + const expectedRoot = type.hashTreeRoot(value); + // now hash another value which make the cached blocks non zero + type.hashTreeRoot(Buffer.alloc(1024, 2)); + const actualRoot = type.hashTreeRoot(value); + expect(actualRoot).to.deep.equal(expectedRoot); + }); + + it("should increase blockArray size if needed", () => { + const value0 = Buffer.alloc(65, 1); + const expectedRoot0 = type.hashTreeRoot(value0); + const value1 = Buffer.alloc(1024, 3); + const expectedRoot1 = type.hashTreeRoot(value1); + // now increase block array size + type.hashTreeRoot(Buffer.alloc(1024, 2)); + + // hash again + expect(type.hashTreeRoot(value0)).to.deep.equal(expectedRoot0); + expect(type.hashTreeRoot(value1)).to.deep.equal(expectedRoot1); + }); +}); diff --git a/packages/ssz/test/unit/byType/container/tree.test.ts b/packages/ssz/test/unit/byType/container/tree.test.ts index 6b545792..5c3bbd54 100644 --- a/packages/ssz/test/unit/byType/container/tree.test.ts +++ b/packages/ssz/test/unit/byType/container/tree.test.ts @@ -241,7 +241,7 @@ describe("ContainerViewDU batchHashTreeRoot", function () { a: uint64NumType, b: new BooleanType(), c: unionType, - d: new ByteListType(64), + d: new ByteListType(1024), e: new ByteVectorType(64), // a child container type f: childContainerType, @@ -259,7 +259,8 @@ describe("ContainerViewDU batchHashTreeRoot", function () { a: 10, b: true, c: {selector: 1, value: 100}, - d: Buffer.alloc(64, 2), + // make this not divisible by 64 to test edge case + d: Buffer.alloc(65, 2), e: Buffer.alloc(64, 1), f: {f0: 100, f1: 101}, g: {g0: 100, g1: 101}, @@ -271,6 +272,7 @@ describe("ContainerViewDU batchHashTreeRoot", function () { m: BitArray.fromSingleBit(4, 1), }; const expectedRoot = parentContainerType.toView(value).hashTreeRoot(); + expect(parentContainerType.hashTreeRoot(value)).to.be.deep.equal(expectedRoot); it("fresh ViewDU", () => { expect(parentContainerType.toViewDU(value).batchHashTreeRoot()).to.be.deep.equal(expectedRoot); @@ -327,9 +329,10 @@ describe("ContainerViewDU batchHashTreeRoot", function () { it("full hash then modify ByteListType", () => { const viewDU = parentContainerType.toViewDU(value); + viewDU.d = Buffer.alloc(1024, 3); viewDU.batchHashTreeRoot(); - // this takes more than 1 chunk so the resulting node is a branch node - viewDU.d = viewDU.d.slice(); + // set back to the original value, this takes more than 1 chunk so the resulting node is a branch node + viewDU.d = Buffer.alloc(65, 2); expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); // assign again but commit before batchHashTreeRoot() diff --git a/packages/ssz/test/unit/byType/listComposite/tree.test.ts b/packages/ssz/test/unit/byType/listComposite/tree.test.ts index 27141389..f428365b 100644 --- a/packages/ssz/test/unit/byType/listComposite/tree.test.ts +++ b/packages/ssz/test/unit/byType/listComposite/tree.test.ts @@ -226,119 +226,21 @@ describe("ListCompositeType.sliceFrom", () => { } }); -describe("ListCompositeType batchHashTreeRoot", () => { - const value = [ - {a: 1, b: 2}, - {a: 3, b: 4}, - ]; - const containerStructUintsType = new ContainerNodeStructType( - {a: uint64NumInfType, b: uint64NumInfType}, - {typeName: "ContainerNodeStruct(uint64)"} - ); - const listOfContainersType2 = new ListCompositeType(containerStructUintsType, 4, { - typeName: "ListCompositeType(ContainerNodeStructType)", +describe("ListCompositeType hashTreeRoot", () => { + it("shouldzero out the last sha256 block", () => { + const listType = new ListCompositeType(ssz.Root, 1024); + const value0 = Array.from({length: 65}, (_, i) => Buffer.alloc(32, i)); + const value1 = Array.from({length: 120}, (_, i) => Buffer.alloc(32, i)); + const expectedRoot0 = listType.hashTreeRoot(value0); + const expectedRoot1 = listType.hashTreeRoot(value1); + // now increase block array size + listType.hashTreeRoot(Array.from({length: 1024}, () => Buffer.alloc(32, 3))); + expect(listType.hashTreeRoot(value0)).to.deep.equal(expectedRoot0); + expect(listType.hashTreeRoot(value1)).to.deep.equal(expectedRoot1); }); - - for (const list of [listOfContainersType, listOfContainersType2]) { - const typeName = list.typeName; - const expectedRoot = list.toView(value).hashTreeRoot(); - - it(`${typeName} - fresh ViewDU`, () => { - expect(listOfContainersType.toViewDU(value).batchHashTreeRoot()).to.be.deep.equal(expectedRoot); - }); - - it(`${typeName} - push then batchHashTreeRoot()`, () => { - const viewDU = listOfContainersType.defaultViewDU(); - viewDU.push(containerUintsType.toViewDU({a: 1, b: 2})); - viewDU.push(containerUintsType.toViewDU({a: 3, b: 4})); - expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); - - // assign again, commit() then batchHashTreeRoot() - viewDU.set(0, containerUintsType.toViewDU({a: 1, b: 2})); - viewDU.set(1, containerUintsType.toViewDU({a: 3, b: 4})); - viewDU.commit(); - expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); - }); - - it(`${typeName} - full hash then modify full non-hashed child element`, () => { - const viewDU = listOfContainersType.defaultViewDU(); - viewDU.push(containerUintsType.toViewDU({a: 1, b: 2})); - viewDU.push(containerUintsType.toViewDU({a: 33, b: 44})); - viewDU.batchHashTreeRoot(); - viewDU.set(1, containerUintsType.toViewDU({a: 3, b: 4})); - expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); - - // assign the same value again, commit() then batchHashTreeRoot() - viewDU.set(1, containerUintsType.toViewDU({a: 3, b: 4})); - viewDU.commit(); - expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); - }); - - it(`${typeName} - full hash then modify partially hashed child element`, () => { - const viewDU = listOfContainersType.defaultViewDU(); - viewDU.push(containerUintsType.toViewDU({a: 1, b: 2})); - viewDU.push(containerUintsType.toViewDU({a: 33, b: 44})); - viewDU.batchHashTreeRoot(); - const item1 = containerUintsType.toViewDU({a: 3, b: 44}); - item1.batchHashTreeRoot(); - item1.b = 4; - viewDU.set(1, item1); - expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); - - // assign the same value again, commit() then batchHashTreeRoot() - const item2 = viewDU.get(1); - item2.a = 3; - item2.b = 4; - viewDU.commit(); - expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); - }); - - it(`${typeName} - full hash then modify full hashed child element`, () => { - const viewDU = listOfContainersType.defaultViewDU(); - viewDU.push(containerUintsType.toViewDU({a: 1, b: 2})); - viewDU.push(containerUintsType.toViewDU({a: 33, b: 44})); - viewDU.batchHashTreeRoot(); - const item1 = containerUintsType.toViewDU({a: 3, b: 4}); - item1.batchHashTreeRoot(); - viewDU.set(1, item1); - expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); - - // assign the same value again, commit() then batchHashTreeRoot() - const newItem = containerUintsType.toViewDU({a: 3, b: 4}); - viewDU.set(1, newItem); - viewDU.commit(); - expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); - }); - - it(`${typeName} - full hash then modify partial child element`, () => { - const viewDU = listOfContainersType.defaultViewDU(); - viewDU.push(containerUintsType.toViewDU({a: 1, b: 2})); - viewDU.push(containerUintsType.toViewDU({a: 33, b: 44})); - viewDU.batchHashTreeRoot(); - viewDU.get(1).a = 3; - viewDU.get(1).b = 4; - expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); - - // assign the same value again, commit() then batchHashTreeRoot() - viewDU.get(1).a = 3; - viewDU.get(1).b = 4; - viewDU.commit(); - expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); - }); - - // similar to a fresh ViewDU but it's good to test - it(`${typeName} - sliceTo()`, () => { - const viewDU = listOfContainersType.defaultViewDU(); - viewDU.push(containerUintsType.toViewDU({a: 1, b: 2})); - viewDU.push(containerUintsType.toViewDU({a: 3, b: 4})); - viewDU.push(containerUintsType.toViewDU({a: 5, b: 6})); - viewDU.batchHashTreeRoot(); - expect(viewDU.sliceTo(1).batchHashTreeRoot()).to.be.deep.equal(expectedRoot); - }); - } }); -describe("ListCompositeType batchHashTreeRoot", () => { +describe("ListCompositeType ViewDU batchHashTreeRoot", () => { const value = [ {a: 1, b: 2}, {a: 3, b: 4}, @@ -354,6 +256,7 @@ describe("ListCompositeType batchHashTreeRoot", () => { for (const list of [listOfContainersType, listOfContainersType2]) { const typeName = list.typeName; const expectedRoot = list.toView(value).hashTreeRoot(); + expect(listOfContainersType2.hashTreeRoot(value)).to.be.deep.equal(expectedRoot); it(`${typeName} - fresh ViewDU`, () => { expect(listOfContainersType.toViewDU(value).batchHashTreeRoot()).to.be.deep.equal(expectedRoot); diff --git a/packages/ssz/test/unit/merkleize.test.ts b/packages/ssz/test/unit/merkleize.test.ts index dccb33c6..6626d183 100644 --- a/packages/ssz/test/unit/merkleize.test.ts +++ b/packages/ssz/test/unit/merkleize.test.ts @@ -1,6 +1,6 @@ import {expect} from "chai"; import {bitLength, maxChunksToDepth, merkleize, mixInLength, nextPowerOf2} from "../../src/util/merkleize"; -import {merkleizeBlocksBytes, LeafNode, zeroHash} from "@chainsafe/persistent-merkle-tree"; +import {merkleizeBlocksBytes, LeafNode, zeroHash, merkleizeBlockArray} from "@chainsafe/persistent-merkle-tree"; describe("util / merkleize / bitLength", () => { const bitLengthByIndex = [0, 1, 2, 2, 3, 3, 3, 3, 4, 4]; @@ -63,3 +63,31 @@ describe("merkleize should be equal to merkleizeBlocksBytes of hasher", () => { }); } }); + +// same to the above but with merkleizeBlockArray() method +describe("merkleize should be equal to merkleizeBlockArray of hasher", () => { + // hashtree has a buffer of 16 * 64 bytes = 32 nodes + const numNodes = [64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79]; + for (const numNode of numNodes) { + it(`merkleize for ${numNode} nodes`, () => { + const nodes = Array.from({length: numNode}, (_, i) => LeafNode.fromRoot(Buffer.alloc(32, i))); + const data = Buffer.concat(nodes.map((node) => node.root)); + const padData = numNode % 2 === 1 ? Buffer.concat([data, zeroHash(0)]) : data; + expect(padData.length % 64).to.equal(0); + const blocks: Uint8Array[] = []; + for (let i = 0; i < padData.length; i += 64) { + blocks.push(padData.slice(i, i + 64)); + } + const expectedRoot = Buffer.alloc(32); + // depth of 79 nodes are 7, make it 10 to test the padding + const chunkCount = Math.max(numNode, 10); + // add redundant blocks, should not affect the result + const blockLimit = blocks.length; + blocks.push(Buffer.alloc(64, 1)); + blocks.push(Buffer.alloc(64, 2)); + merkleizeBlockArray(blocks, blockLimit, chunkCount, expectedRoot, 0); + const roots = nodes.map((node) => node.root); + expect(merkleize(roots, chunkCount)).to.be.deep.equal(expectedRoot); + }); + } +});