diff --git a/packages/as-sha256/src/index.ts b/packages/as-sha256/src/index.ts index 11f44cc3..46d60789 100644 --- a/packages/as-sha256/src/index.ts +++ b/packages/as-sha256/src/index.ts @@ -3,6 +3,7 @@ import {newInstance} from "./wasm"; import {HashObject, byteArrayIntoHashObject, byteArrayToHashObject, hashObjectToByteArray} from "./hashObject"; import SHA256 from "./sha256"; export {HashObject, byteArrayToHashObject, hashObjectToByteArray, byteArrayIntoHashObject, SHA256}; +export {allocUnsafe}; const ctx = newInstance(); const wasmInputValue = ctx.input.value; diff --git a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts index 943f9b49..9b8f8565 100644 --- a/packages/persistent-merkle-tree/src/hasher/as-sha256.ts +++ b/packages/persistent-merkle-tree/src/hasher/as-sha256.ts @@ -8,14 +8,20 @@ import { import type {Hasher} from "./types"; import {Node} from "../node"; import type {HashComputationLevel} from "../hashComputation"; -import {doDigestNLevel, doMerkleizeInto} from "./util"; +import {BLOCK_SIZE, doDigestNLevel, doMerkleizeBlockArray, doMerkleizeBlocksBytes} from "./util"; + +/** hashInto() function of as-sha256 loop through every 256 bytes */ +const buffer = new Uint8Array(4 * BLOCK_SIZE); export const hasher: Hasher = { name: "as-sha256", digest64: digest2Bytes32, digest64HashObjects: digest64HashObjectsInto, - merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { - return doMerkleizeInto(data, padFor, output, offset, hashInto); + merkleizeBlocksBytes(blocksBytes: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { + return doMerkleizeBlocksBytes(blocksBytes, padFor, output, offset, hashInto); + }, + merkleizeBlockArray(blocks, blockLimit, padFor, output, offset) { + return doMerkleizeBlockArray(blocks, blockLimit, padFor, output, offset, hashInto, buffer); }, digestNLevel(data: Uint8Array, nLevel: number): Uint8Array { return doDigestNLevel(data, nLevel, hashInto); diff --git a/packages/persistent-merkle-tree/src/hasher/hashtree.ts b/packages/persistent-merkle-tree/src/hasher/hashtree.ts index f578d50c..9d04eb07 100644 --- a/packages/persistent-merkle-tree/src/hasher/hashtree.ts +++ b/packages/persistent-merkle-tree/src/hasher/hashtree.ts @@ -3,7 +3,7 @@ import {Hasher, HashObject} from "./types"; import {Node} from "../node"; import type {HashComputationLevel} from "../hashComputation"; import {byteArrayIntoHashObject} from "@chainsafe/as-sha256/lib/hashObject"; -import {doDigestNLevel, doMerkleizeInto} from "./util"; +import {doDigestNLevel, doMerkleizeBlockArray, doMerkleizeBlocksBytes} from "./util"; /** * Best SIMD implementation is in 512 bits = 64 bytes @@ -40,8 +40,11 @@ export const hasher: Hasher = { hashInto(hash64Input, hash64Output); byteArrayIntoHashObject(hash64Output, 0, parent); }, - merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { - return doMerkleizeInto(data, padFor, output, offset, hashInto); + merkleizeBlocksBytes(blocksBytes: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { + return doMerkleizeBlocksBytes(blocksBytes, padFor, output, offset, hashInto); + }, + merkleizeBlockArray(blocks, blockLimit, padFor, output, offset) { + return doMerkleizeBlockArray(blocks, blockLimit, padFor, output, offset, hashInto, uint8Input); }, digestNLevel(data: Uint8Array, nLevel: number): Uint8Array { return doDigestNLevel(data, nLevel, hashInto); diff --git a/packages/persistent-merkle-tree/src/hasher/index.ts b/packages/persistent-merkle-tree/src/hasher/index.ts index 75442232..414ee703 100644 --- a/packages/persistent-merkle-tree/src/hasher/index.ts +++ b/packages/persistent-merkle-tree/src/hasher/index.ts @@ -27,8 +27,23 @@ export function digestNLevel(data: Uint8Array, nLevel: number): Uint8Array { return hasher.digestNLevel(data, nLevel); } -export function merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { - hasher.merkleizeInto(data, padFor, output, offset); +export function merkleizeBlocksBytes( + blocksBytes: Uint8Array, + padFor: number, + output: Uint8Array, + offset: number +): void { + hasher.merkleizeBlocksBytes(blocksBytes, padFor, output, offset); +} + +export function merkleizeBlockArray( + blocks: Uint8Array[], + blockLimit: number, + padFor: number, + output: Uint8Array, + offset: number +): void { + hasher.merkleizeBlockArray(blocks, blockLimit, padFor, output, offset); } export function executeHashComputations(hashComputations: HashComputationLevel[]): void { diff --git a/packages/persistent-merkle-tree/src/hasher/noble.ts b/packages/persistent-merkle-tree/src/hasher/noble.ts index eb1224b6..e4a98bc4 100644 --- a/packages/persistent-merkle-tree/src/hasher/noble.ts +++ b/packages/persistent-merkle-tree/src/hasher/noble.ts @@ -1,7 +1,13 @@ import {sha256} from "@noble/hashes/sha256"; import {digest64HashObjects, byteArrayIntoHashObject} from "@chainsafe/as-sha256"; import type {Hasher} from "./types"; -import {doDigestNLevel, doMerkleizeInto, hashObjectToUint8Array} from "./util"; +import { + BLOCK_SIZE, + doDigestNLevel, + doMerkleizeBlockArray, + doMerkleizeBlocksBytes, + hashObjectToUint8Array, +} from "./util"; const digest64 = (a: Uint8Array, b: Uint8Array): Uint8Array => sha256.create().update(a).update(b).digest(); const hashInto = (input: Uint8Array, output: Uint8Array): void => { @@ -22,14 +28,20 @@ const hashInto = (input: Uint8Array, output: Uint8Array): void => { } }; +/** should be multiple of 64, make it the same to as-sha256 */ +const buffer = new Uint8Array(4 * BLOCK_SIZE); + export const hasher: Hasher = { name: "noble", digest64, digest64HashObjects: (left, right, parent) => { byteArrayIntoHashObject(digest64(hashObjectToUint8Array(left), hashObjectToUint8Array(right)), 0, parent); }, - merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { - return doMerkleizeInto(data, padFor, output, offset, hashInto); + merkleizeBlocksBytes(blocksBytes: Uint8Array, padFor: number, output: Uint8Array, offset: number): void { + return doMerkleizeBlocksBytes(blocksBytes, padFor, output, offset, hashInto); + }, + merkleizeBlockArray(blocks, blockLimit, padFor, output, offset) { + return doMerkleizeBlockArray(blocks, blockLimit, padFor, output, offset, hashInto, buffer); }, digestNLevel(data: Uint8Array, nLevel: number): Uint8Array { return doDigestNLevel(data, nLevel, hashInto); diff --git a/packages/persistent-merkle-tree/src/hasher/types.ts b/packages/persistent-merkle-tree/src/hasher/types.ts index 9f5813f0..dc1b6289 100644 --- a/packages/persistent-merkle-tree/src/hasher/types.ts +++ b/packages/persistent-merkle-tree/src/hasher/types.ts @@ -15,11 +15,23 @@ export type Hasher = { */ digest64HashObjects(left: HashObject, right: HashObject, parent: HashObject): void; /** - * Merkleize n chunk of data, 32 bytes each + * Merkleize n SHA256 blocks in a single Uint8Array, each block is 64 bytes * padFor is maxChunkCount, use it to compute layers to hash - * data is mutated after the function + * blocksBytes is mutated after the function */ - merkleizeInto(data: Uint8Array, padFor: number, output: Uint8Array, offset: number): void; + merkleizeBlocksBytes(blocksBytes: Uint8Array, padFor: number, output: Uint8Array, offset: number): void; + /** + * Merkleize n SHA256 blocks, each is 64 bytes Uint8Array + * padFor is maxChunkCount, use it to compute layers to hash + * blocks are mutated after the function + */ + merkleizeBlockArray( + blocks: Uint8Array[], + blockLimit: number, + padFor: number, + output: Uint8Array, + offset: number + ): void; /** * Hash multiple chunks (1 chunk = 32 bytes) at multiple levels * With nLevel = 3, hash multiple of 256 bytes, return multiple of 32 bytes. diff --git a/packages/persistent-merkle-tree/src/hasher/util.ts b/packages/persistent-merkle-tree/src/hasher/util.ts index a028253c..e51d785e 100644 --- a/packages/persistent-merkle-tree/src/hasher/util.ts +++ b/packages/persistent-merkle-tree/src/hasher/util.ts @@ -13,14 +13,18 @@ export function uint8ArrayToHashObject(byteArr: Uint8Array): HashObject { type HashIntoFn = (input: Uint8Array, output: Uint8Array) => void; +/** a SHA256 block is 64 bytes */ +export const BLOCK_SIZE = 64; + /** - * Input data is unsafe because it's modified - * If its chunk count is not even, need to be appended with zero hash at layer 0 so that we don't need - * a new memory allocation here (even through we don't need it if padFor = 1) - * The Uint8Array(32) will be written to output at offset + * Merkleize multiple SHA256 blocks in a single Uint8Array into ${output} at ${offset} + * - if padFor > 1 blocksBytes need to be multiple of 64 bytes. + * - if padFor = 1, blocksBytes need to be at least 32 bytes + * - if padFor = 0, throw error + * blocksBytes is unsafe because it's modified */ -export function doMerkleizeInto( - data: Uint8Array, +export function doMerkleizeBlocksBytes( + blocksBytes: Uint8Array, padFor: number, output: Uint8Array, offset: number, @@ -31,33 +35,35 @@ export function doMerkleizeInto( } const layerCount = Math.ceil(Math.log2(padFor)); - if (data.length === 0) { + if (blocksBytes.length === 0) { output.set(zeroHash(layerCount), offset); return; } - if (data.length % 32 !== 0) { - throw new Error(`Invalid input length, expect to be multiple of 32 bytes, got ${data.length}`); + if (blocksBytes.length % 32 !== 0) { + throw new Error(`Invalid input length, expect to be multiple of 32 bytes, got ${blocksBytes.length}`); } // if padFor = 1, only need 32 bytes - if (padFor > 1 && data.length % 64 !== 0) { - throw new Error(`Invalid input length, expect to be multiple of 64 bytes, got ${data.length}, padFor=${padFor}`); + if (padFor > 1 && blocksBytes.length % BLOCK_SIZE !== 0) { + throw new Error( + `Invalid input length, expect to be multiple of 64 bytes, got ${blocksBytes.length}, padFor=${padFor}` + ); } - let inputLength = data.length; + let inputLength = blocksBytes.length; let outputLength = Math.floor(inputLength / 2); - let bufferIn = data; - // hash into the same buffer - for (let i = 0; i < layerCount; i++) { - const bufferOut = data.subarray(0, outputLength); + let bufferIn = blocksBytes; + // hash into the same buffer to save memory allocation + for (let layer = 0; layer < layerCount; layer++) { + const bufferOut = blocksBytes.subarray(0, outputLength); hashInto(bufferIn, bufferOut); const chunkCount = Math.floor(outputLength / 32); - if (chunkCount % 2 === 1 && i < layerCount - 1) { + if (chunkCount % 2 === 1 && layer < layerCount - 1) { // extend to 1 more chunk inputLength = outputLength + 32; - bufferIn = data.subarray(0, inputLength); - bufferIn.set(zeroHash(i + 1), outputLength); + bufferIn = blocksBytes.subarray(0, inputLength); + bufferIn.set(zeroHash(layer + 1), outputLength); } else { bufferIn = bufferOut; inputLength = outputLength; @@ -68,6 +74,115 @@ export function doMerkleizeInto( output.set(bufferIn.subarray(0, 32), offset); } +/** + * Merkleize multiple SHA256 blocks into ${output} at ${offset} + * @param padFor is maxChunkCount, should be >= 2 + * @param blocks is unsafe because it's modified + * @param blockLimit number of blocks, should be <= blocks.length so that consumer can reuse memory + */ +export function doMerkleizeBlockArray( + blocks: Uint8Array[], + blockLimit: number, + padFor: number, + output: Uint8Array, + offset: number, + hashInto: HashIntoFn, + buffer: Uint8Array +): void { + if (padFor < 1) { + throw new Error(`Invalid padFor, expect to be at least 1, got ${padFor}`); + } + + if (blockLimit > blocks.length) { + throw new Error( + `Invalid blockLimit, expect to be less than or equal blocks.length ${blocks.length}, got ${blockLimit}` + ); + } + + const layerCount = Math.ceil(Math.log2(padFor)); + if (blockLimit === 0) { + output.set(zeroHash(layerCount), offset); + return; + } + + for (const block of blocks) { + if (block.length !== BLOCK_SIZE) { + throw new Error(`Invalid block length, expect to be 64 bytes, got ${block.length}`); + } + } + + // as-sha256 has a buffer of 4 * 64 bytes + // hashtree has a buffer of 16 * 64 bytes + if (buffer.length === 0 || buffer.length % (4 * BLOCK_SIZE) !== 0) { + throw new Error(`Invalid buffer length, expect to be multiple of 64 bytes, got ${buffer.length}`); + } + + // batchSize is 4 for as-sha256, 16 for hashtree + const batchSize = Math.floor(buffer.length / BLOCK_SIZE); + const halfBatchSize = Math.floor(batchSize / 2); + let bufferIn = buffer; + // hash into the same buffer + let bufferOut = buffer.subarray(0, halfBatchSize * BLOCK_SIZE); + // ignore remaining blocks + let blockCount = blockLimit; + // hash into the same blocks to save memory allocation + for (let layer = 0; layer < layerCount; layer++) { + let outBlockIndex = 0; + const sameLayerLoop = Math.floor(blockCount / batchSize); + for (let i = 0; i < sameLayerLoop; i++) { + // populate bufferIn + for (let j = 0; j < batchSize; j++) { + const blockIndex = i * batchSize + j; + bufferIn.set(blocks[blockIndex], j * BLOCK_SIZE); + } + + // hash into bufferOut + hashInto(bufferIn, bufferOut); + + // copy bufferOut to blocks, bufferOut.len = halfBatchSize * BLOCK_SIZE + for (let j = 0; j < halfBatchSize; j++) { + blocks[outBlockIndex].set(bufferOut.subarray(j * BLOCK_SIZE, (j + 1) * BLOCK_SIZE)); + outBlockIndex++; + } + } + + // remaining blocks + const remainingBlocks = blockCount % batchSize; + bufferIn = buffer.subarray(0, remainingBlocks * BLOCK_SIZE); + bufferOut = buffer.subarray(0, Math.floor(bufferIn.length / 2)); + + // populate bufferIn + for (let blockIndex = Math.floor(blockCount / batchSize) * batchSize; blockIndex < blockCount; blockIndex++) { + bufferIn.set(blocks[blockIndex], (blockIndex % batchSize) * BLOCK_SIZE); + } + + // hash into bufferOut + hashInto(bufferIn, bufferOut); + + // copy bufferOut to blocks, note that bufferOut.len may not be divisible by BLOCK_SIZE + for (let j = 0; j < Math.floor(bufferOut.length / BLOCK_SIZE); j++) { + blocks[outBlockIndex].set(bufferOut.subarray(j * BLOCK_SIZE, (j + 1) * BLOCK_SIZE)); + outBlockIndex++; + } + + if (bufferOut.length % BLOCK_SIZE !== 0) { + // set the last 32 bytes of bufferOut + blocks[outBlockIndex].set(bufferOut.subarray(bufferOut.length - 32, bufferOut.length), 0); + // add zeroHash + blocks[outBlockIndex].set(zeroHash(layer + 1), 32); + outBlockIndex++; + } + + // end of layer, update blockCount, bufferIn, bufferOut + blockCount = outBlockIndex; + bufferIn = buffer.subarray(0, blockCount * BLOCK_SIZE); + bufferOut = buffer.subarray(0, Math.floor(bufferIn.length / 2)); + } + + // the end result stays in blocks[0] + output.set(blocks[0].subarray(0, 32), offset); +} + /** * Input data is unsafe because it's modified * given nLevel = 3 diff --git a/packages/persistent-merkle-tree/src/packedNode.ts b/packages/persistent-merkle-tree/src/packedNode.ts index 3c6413b8..459f6444 100644 --- a/packages/persistent-merkle-tree/src/packedNode.ts +++ b/packages/persistent-merkle-tree/src/packedNode.ts @@ -18,9 +18,16 @@ export function packedRootsBytesToNode(depth: number, dataView: DataView, start: * * h0 h1 h2 h3 h4 h5 h6 h7 * |------|------|------|------|------|------|------|------| + * + * @param values list of uint64 numbers + * @param leafNodes optional list of LeafNodes to reuse */ -export function packedUintNum64sToLeafNodes(values: number[]): LeafNode[] { - const leafNodes = new Array(Math.ceil(values.length / 4)); +export function packedUintNum64sToLeafNodes(values: number[], leafNodes?: LeafNode[]): LeafNode[] { + const nodeCount = Math.ceil(values.length / 4); + if (leafNodes && leafNodes.length !== nodeCount) { + throw new Error(`Invalid leafNode length: ${leafNodes.length} !== ${nodeCount}`); + } + leafNodes = leafNodes ?? new Array(Math.ceil(values.length / 4)); for (let i = 0; i < values.length; i++) { const nodeIndex = Math.floor(i / 4); const leafNode = leafNodes[nodeIndex] ?? new LeafNode(0, 0, 0, 0, 0, 0, 0, 0); diff --git a/packages/persistent-merkle-tree/test/perf/hasher.test.ts b/packages/persistent-merkle-tree/test/perf/hasher.test.ts index a01171c2..3c5dba2c 100644 --- a/packages/persistent-merkle-tree/test/perf/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/perf/hasher.test.ts @@ -64,42 +64,3 @@ describe("hasher", function () { }); } }); - -describe("hashtree", function () { - itBench({ - id: `getHashComputations`, - beforeEach: () => { - const [tree] = buildComparisonTrees(16); - return tree; - }, - fn: (tree) => { - const hcByLevel: HashComputationLevel[] = []; - getHashComputations(tree, 0, hcByLevel); - }, - }); - - itBench({ - id: `executeHashComputations`, - beforeEach: () => { - const [tree] = buildComparisonTrees(16); - return tree; - }, - fn: (tree) => { - const hcByLevel: HashComputationLevel[] = []; - getHashComputations(tree, 0, hcByLevel); - hashtreeHasher.executeHashComputations(hcByLevel); - }, - }); - - itBench({ - id: `get root`, - beforeEach: () => { - const [tree] = buildComparisonTrees(16); - setHasher(hashtreeHasher); - return tree; - }, - fn: (tree) => { - tree.root; - }, - }); -}); diff --git a/packages/persistent-merkle-tree/test/perf/node.test.ts b/packages/persistent-merkle-tree/test/perf/node.test.ts index 5de5373b..96f3d973 100644 --- a/packages/persistent-merkle-tree/test/perf/node.test.ts +++ b/packages/persistent-merkle-tree/test/perf/node.test.ts @@ -1,6 +1,6 @@ import {itBench} from "@dapplion/benchmark"; import {BranchNode, getNodeH, LeafNode} from "../../src/node"; -import {countToDepth, getHashComputations, HashComputation, subtreeFillToContents} from "../../src"; +import {countToDepth, getHashComputations, subtreeFillToContents} from "../../src"; import {batchHash} from "../utils/batchHash"; describe("HashObject LeafNode", () => { diff --git a/packages/persistent-merkle-tree/test/unit/hasher.test.ts b/packages/persistent-merkle-tree/test/unit/hasher.test.ts index ee129fd0..6205bd56 100644 --- a/packages/persistent-merkle-tree/test/unit/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/unit/hasher.test.ts @@ -89,24 +89,77 @@ describe("hasher.digestNLevel", function () { }); -describe("hasher.merkleizeInto", function () { +describe("hasher.merkleizeBlocksBytes", function () { const numNodes = [0, 1, 2, 3, 4, 5, 6, 7, 8]; for (const hasher of [nobleHasher, hashtreeHasher, asSha256Hasher]) { it (`${hasher.name} should throw error if not multiple of 64 bytes`, () => { const data = Buffer.alloc(63, 0); const output = Buffer.alloc(32); - expect(() => hasher.merkleizeInto(data, 2, output, 0)).to.throw("Invalid input length"); + expect(() => hasher.merkleizeBlocksBytes(data, 2, output, 0)).to.throw("Invalid input length"); }); for (const numNode of numNodes) { - it(`${hasher.name}.merkleizeInto for ${numNode} nodes`, () => { + it(`${hasher.name}.merkleizeBlocksBytes for ${numNode} nodes`, () => { const nodes = Array.from({length: numNode}, (_, i) => LeafNode.fromRoot(Buffer.alloc(32, i))); const data = Buffer.concat(nodes.map((node) => node.root)); const output = Buffer.alloc(32); const chunkCount = Math.max(numNode, 1); const padData = numNode % 2 === 1 ? Buffer.concat([data, zeroHash(0)]) : data; - hasher.merkleizeInto(padData, chunkCount, output, 0); + hasher.merkleizeBlocksBytes(padData, chunkCount, output, 0); + const depth = Math.ceil(Math.log2(chunkCount)); + const root = subtreeFillToContents(nodes, depth).root; + expectEqualHex(output, root); + }); + } + } +}); + +/** + * The same to the previous test, but using the merkleizeBlockArray method + */ +describe("hasher.merkleizeBlockArray", function () { + for (const hasher of [nobleHasher, hashtreeHasher, asSha256Hasher]) { + it (`${hasher.name} should throw error if invalid blockLimit`, () => { + const data = Buffer.alloc(64, 0); + const output = Buffer.alloc(32); + expect(() => hasher.merkleizeBlockArray([data], 2, 2, output, 0)).to.throw("Invalid blockLimit, expect to be less than or equal blocks.length 1, got 2"); + }); + + it (`${hasher.name} should throw error if not multiple of 64 bytes`, () => { + const data = Buffer.alloc(63, 0); + const output = Buffer.alloc(32); + expect(() => hasher.merkleizeBlockArray([data], 1, 2, output, 0)).to.throw("Invalid block length, expect to be 64 bytes, got 63"); + }); + + it (`${hasher.name} should throw error if chunkCount < 1`, () => { + const data = Buffer.alloc(64, 0); + const output = Buffer.alloc(32); + const chunkCount = 0; + expect(() => hasher.merkleizeBlockArray([data], 1, chunkCount, output, 0)).to.throw("Invalid padFor, expect to be at least 1, got 0"); + }); + + // hashtree has a buffer of 16 * 64 bytes = 32 nodes + const numNodes = [64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79]; + for (const numNode of numNodes) { + it(`${hasher.name}.merkleizeBlockArray for ${numNode} nodes`, () => { + + const nodes = Array.from({length: numNode}, (_, i) => LeafNode.fromRoot(Buffer.alloc(32, i))); + const data = Buffer.concat(nodes.map((node) => node.root)); + const output = Buffer.alloc(32); + // depth of 79 nodes are 7, make it 10 to test the padding + const chunkCount = Math.max(numNode, 10); + const padData = numNode % 2 === 1 ? Buffer.concat([data, zeroHash(0)]) : data; + expect(padData.length % 64).to.equal(0); + const blocks: Uint8Array[] = []; + for (let i = 0; i < padData.length; i += 64) { + blocks.push(padData.slice(i, i + 64)); + } + const blockLimit = blocks.length; + // should be able to run with above blocks, however add some redundant blocks similar to the consumer + blocks.push(Buffer.alloc(64, 1)); + blocks.push(Buffer.alloc(64, 2)); + hasher.merkleizeBlockArray(blocks, blockLimit, chunkCount, output, 0); const depth = Math.ceil(Math.log2(chunkCount)); const root = subtreeFillToContents(nodes, depth).root; expectEqualHex(output, root); diff --git a/packages/simpleserialize.com/package.json b/packages/simpleserialize.com/package.json index e472df4b..ab66ad7c 100644 --- a/packages/simpleserialize.com/package.json +++ b/packages/simpleserialize.com/package.json @@ -25,6 +25,7 @@ "eyzy-tree": "^0.2.2", "file-saver": "^2.0.5", "js-yaml": "^4.1.0", + "null-loader": "^4.0.1", "react": "^17.0.2", "react-alert": "^7.0.1", "react-alert-template-basic": "^1.0.0", diff --git a/packages/simpleserialize.com/webpack.config.js b/packages/simpleserialize.com/webpack.config.js index 0f2ae396..f822c0b7 100644 --- a/packages/simpleserialize.com/webpack.config.js +++ b/packages/simpleserialize.com/webpack.config.js @@ -1,22 +1,22 @@ -const webpack = require('webpack'); -const { resolve } = require('path'); -const MiniCssExtractPlugin = require('mini-css-extract-plugin') -const HtmlWebpackPlugin = require('html-webpack-plugin'); +const webpack = require("webpack"); +const {resolve} = require("path"); +const MiniCssExtractPlugin = require("mini-css-extract-plugin"); +const HtmlWebpackPlugin = require("html-webpack-plugin"); -const isProd = process.env.NODE_ENV === 'production'; +const isProd = process.env.NODE_ENV === "production"; const config = { devtool: "source-map", - mode: isProd ? 'production' : 'development', + mode: isProd ? "production" : "development", entry: { - index: './src/index.tsx', + index: "./src/index.tsx", }, output: { - path: resolve(__dirname, 'dist'), - filename: '[name].js', + path: resolve(__dirname, "dist"), + filename: "[name].js", }, resolve: { - extensions: ['.js', '.jsx', '.ts', '.tsx'], + extensions: [".js", ".jsx", ".ts", ".tsx"], }, module: { rules: [ @@ -27,45 +27,49 @@ const config = { }, }, { - test: /\.scss$/, - use: [ + test: /\.scss$/, + use: [ MiniCssExtractPlugin.loader, { - loader: 'css-loader' + loader: "css-loader", }, { - loader: 'sass-loader', + loader: "sass-loader", options: { sourceMap: true, - } - } - ] - },{ + }, + }, + ], + }, + { test: /\.tsx?$/, - use: 'babel-loader', + use: "babel-loader", exclude: /node_modules/, - } + }, + { + use: "null-loader", + test: /@chainsafe\/hashtree/, + }, ], }, plugins: [ new webpack.ProvidePlugin({ - process: 'process/browser', - Buffer: ['buffer', 'Buffer'], + process: "process/browser", + Buffer: ["buffer", "Buffer"], }), new MiniCssExtractPlugin({ - filename: 'css/[name].bundle.css' + filename: "css/[name].bundle.css", }), new HtmlWebpackPlugin({ - title: 'Simple Serialize | Chainsafe Systems', - template: 'src/index.html', + title: "Simple Serialize | Chainsafe Systems", + template: "src/index.html", }), ], }; if (isProd) { config.optimization = { - minimizer: [ - ], + minimizer: [], }; } else { config.devServer = { @@ -73,7 +77,7 @@ if (isProd) { open: true, // https://webpack.js.org/configuration/dev-server/#devserveropen hot: true, // https://webpack.js.org/configuration/dev-server/#devserverhot compress: true, // https://webpack.js.org/configuration/dev-server/#devservercompress - stats: 'errors-only', // https://webpack.js.org/configuration/dev-server/#devserverstats- + stats: "errors-only", // https://webpack.js.org/configuration/dev-server/#devserverstats- overlay: true, // https://webpack.js.org/configuration/dev-server/#devserveroverlay }; } @@ -81,14 +85,14 @@ if (isProd) { const workerConfig = { name: "worker", resolve: { - extensions: ['.js', '.jsx', '.ts', '.tsx'], + extensions: [".js", ".jsx", ".ts", ".tsx"], }, entry: { - index: './src/components/worker/index.ts', + index: "./src/components/worker/index.ts", }, output: { - path: resolve(__dirname, 'dist'), - filename: 'worker.js', + path: resolve(__dirname, "dist"), + filename: "worker.js", }, module: { rules: [ @@ -100,21 +104,25 @@ const workerConfig = { }, { test: /worker?$/, - loader: 'threads-webpack-plugin', + loader: "threads-webpack-plugin", }, { test: /\.ts?$/, - use: 'babel-loader', + use: "babel-loader", exclude: /node_modules/, - } + }, + { + use: "null-loader", + test: /@chainsafe\/hashtree/, + }, ], }, plugins: [ new webpack.ProvidePlugin({ - process: 'process/browser', - Buffer: ['buffer', 'Buffer'], + process: "process/browser", + Buffer: ["buffer", "Buffer"], }), - ] -} + ], +}; module.exports = [config, workerConfig]; diff --git a/packages/ssz/src/index.ts b/packages/ssz/src/index.ts index ab3330c6..3a94a1bc 100644 --- a/packages/ssz/src/index.ts +++ b/packages/ssz/src/index.ts @@ -29,13 +29,21 @@ export {Type, ValueOf, JsonPath, ByteViews} from "./type/abstract"; export {BasicType, isBasicType} from "./type/basic"; export {CompositeType, CompositeTypeAny, CompositeView, CompositeViewDU, isCompositeType} from "./type/composite"; export {TreeView} from "./view/abstract"; -export {ValueOfFields} from "./view/container"; +export {ValueOfFields, ContainerTypeGeneric} from "./view/container"; export {TreeViewDU} from "./viewDU/abstract"; +export {ListCompositeTreeViewDU} from "./viewDU/listComposite"; +export {ListBasicTreeViewDU} from "./viewDU/listBasic"; +export {ArrayCompositeTreeViewDUCache} from "./viewDU/arrayComposite"; +export {ContainerNodeStructTreeViewDU} from "./viewDU/containerNodeStruct"; // Values export {BitArray, getUint8ByteToBitBooleanArray} from "./value/bitArray"; // Utils export {fromHexString, toHexString, byteArrayEquals} from "./util/byteArray"; + export {Snapshot} from "./util/types"; export {hash64, symbolCachedPermanentRoot} from "./util/merkleize"; + +// others +export {BranchNodeStruct} from "./branchNodeStruct"; diff --git a/packages/ssz/src/type/abstract.ts b/packages/ssz/src/type/abstract.ts index b96b7355..792ca077 100644 --- a/packages/ssz/src/type/abstract.ts +++ b/packages/ssz/src/type/abstract.ts @@ -145,6 +145,11 @@ export abstract class Type { */ abstract hashTreeRoot(value: V): Uint8Array; + /** + * Same to hashTreeRoot() but here we write result to output. + */ + abstract hashTreeRootInto(value: V, output: Uint8Array, offset: number): void; + // JSON support /** Parse JSON representation of a type to value */ diff --git a/packages/ssz/src/type/arrayComposite.ts b/packages/ssz/src/type/arrayComposite.ts index d3b0a8fb..d77e89dc 100644 --- a/packages/ssz/src/type/arrayComposite.ts +++ b/packages/ssz/src/type/arrayComposite.ts @@ -211,21 +211,29 @@ export function tree_deserializeFromBytesArrayComposite>( +export function value_getBlocksBytesArrayComposite>( elementType: ElementType, length: number, - value: ValueOf[] -): Uint8Array[] { - const roots = new Array(length); + value: ValueOf[], + blocksBuffer: Uint8Array +): Uint8Array { + const blockBytesLen = Math.ceil(length / 2) * 64; + if (blockBytesLen > blocksBuffer.length) { + throw new Error(`blocksBuffer is too small: ${blocksBuffer.length} < ${blockBytesLen}`); + } + const blocksBytes = blocksBuffer.subarray(0, blockBytesLen); for (let i = 0; i < length; i++) { - roots[i] = elementType.hashTreeRoot(value[i]); + elementType.hashTreeRootInto(value[i], blocksBytes, i * 32); + } + + const isOddChunk = length % 2 === 1; + if (isOddChunk) { + // similar to append zeroHash(0) + blocksBytes.subarray(length * 32, blockBytesLen).fill(0); } - return roots; + return blocksBytes; } function readOffsetsArrayComposite( diff --git a/packages/ssz/src/type/basic.ts b/packages/ssz/src/type/basic.ts index 0260ea49..920c6d97 100644 --- a/packages/ssz/src/type/basic.ts +++ b/packages/ssz/src/type/basic.ts @@ -30,11 +30,18 @@ export abstract class BasicType extends Type { } hashTreeRoot(value: V): Uint8Array { - // TODO: Optimize - const uint8Array = new Uint8Array(32); + // cannot use allocUnsafe() here because hashTreeRootInto() may not fill the whole 32 bytes + const root = new Uint8Array(32); + this.hashTreeRootInto(value, root, 0); + return root; + } + + hashTreeRootInto(value: V, output: Uint8Array, offset: number): void { + const uint8Array = output.subarray(offset, offset + 32); + // output could have preallocated data, some types may not fill the whole 32 bytes + uint8Array.fill(0); const dataView = new DataView(uint8Array.buffer, uint8Array.byteOffset, uint8Array.byteLength); this.value_serializeToBytes({uint8Array, dataView}, 0, value); - return uint8Array; } clone(value: V): V { diff --git a/packages/ssz/src/type/bitArray.ts b/packages/ssz/src/type/bitArray.ts index 5071550c..469cd131 100644 --- a/packages/ssz/src/type/bitArray.ts +++ b/packages/ssz/src/type/bitArray.ts @@ -1,10 +1,10 @@ import {concatGindices, Gindex, Node, toGindex, Tree, HashComputationLevel} from "@chainsafe/persistent-merkle-tree"; import {fromHexString, toHexString, byteArrayEquals} from "../util/byteArray"; -import {splitIntoRootChunks} from "../util/merkleize"; import {CompositeType, LENGTH_GINDEX} from "./composite"; import {BitArray} from "../value/bitArray"; import {BitArrayTreeView} from "../view/bitArray"; import {BitArrayTreeViewDU} from "../viewDU/bitArray"; +import {getBlocksBytes} from "./byteArray"; /* eslint-disable @typescript-eslint/member-ordering */ @@ -40,8 +40,13 @@ export abstract class BitArrayType extends CompositeType this.blocksBuffer.length) { + const chunkCount = Math.ceil(value.bitLen / 8 / 32); + this.blocksBuffer = new Uint8Array(Math.ceil(chunkCount / 2) * 64); + } + return getBlocksBytes(value.uint8Array, this.blocksBuffer); } // Proofs diff --git a/packages/ssz/src/type/bitList.ts b/packages/ssz/src/type/bitList.ts index 0d8268b2..c343ac48 100644 --- a/packages/ssz/src/type/bitList.ts +++ b/packages/ssz/src/type/bitList.ts @@ -1,5 +1,12 @@ -import {getNodesAtDepth, Node, packedNodeRootsToBytes, packedRootsBytesToNode} from "@chainsafe/persistent-merkle-tree"; -import {mixInLength, maxChunksToDepth} from "../util/merkleize"; +import {allocUnsafe} from "@chainsafe/as-sha256"; +import { + getNodesAtDepth, + merkleizeBlocksBytes, + Node, + packedNodeRootsToBytes, + packedRootsBytesToNode, +} from "@chainsafe/persistent-merkle-tree"; +import {maxChunksToDepth} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; import {ByteViews} from "./composite"; @@ -29,6 +36,12 @@ export class BitListType extends BitArrayType { readonly maxSize: number; readonly maxChunkCount: number; readonly isList = true; + readonly mixInLengthBlockBytes = new Uint8Array(64); + readonly mixInLengthBuffer = Buffer.from( + this.mixInLengthBlockBytes.buffer, + this.mixInLengthBlockBytes.byteOffset, + this.mixInLengthBlockBytes.byteLength + ); constructor(readonly limitBits: number, opts?: BitListOptions) { super(); @@ -101,7 +114,18 @@ export class BitListType extends BitArrayType { // Merkleization: inherited from BitArrayType hashTreeRoot(value: BitArray): Uint8Array { - return mixInLength(super.hashTreeRoot(value), value.bitLen); + const root = allocUnsafe(32); + this.hashTreeRootInto(value, root, 0); + return root; + } + + hashTreeRootInto(value: BitArray, output: Uint8Array, offset: number): void { + super.hashTreeRootInto(value, this.mixInLengthBlockBytes, 0); + // mixInLength + this.mixInLengthBuffer.writeUIntLE(value.bitLen, 32, 6); + // one for hashTreeRoot(value), one for length + const chunkCount = 2; + merkleizeBlocksBytes(this.mixInLengthBlockBytes, chunkCount, output, offset); } // Proofs: inherited from BitArrayType diff --git a/packages/ssz/src/type/byteArray.ts b/packages/ssz/src/type/byteArray.ts index 202f1b24..fc67037b 100644 --- a/packages/ssz/src/type/byteArray.ts +++ b/packages/ssz/src/type/byteArray.ts @@ -8,7 +8,6 @@ import { getHashComputations, } from "@chainsafe/persistent-merkle-tree"; import {fromHexString, toHexString, byteArrayEquals} from "../util/byteArray"; -import {splitIntoRootChunks} from "../util/merkleize"; import {ByteViews} from "./abstract"; import {CompositeType, LENGTH_GINDEX} from "./composite"; @@ -82,10 +81,21 @@ export abstract class ByteArrayType extends CompositeType this.blocksBuffer.length) { + const chunkCount = Math.ceil(value.length / 32); + this.blocksBuffer = new Uint8Array(Math.ceil(chunkCount / 2) * 64); + } + return getBlocksBytes(value, this.blocksBuffer); } // Proofs @@ -149,3 +159,16 @@ export abstract class ByteArrayType extends CompositeType blocksBuffer.length) { + throw new Error(`data length ${value.length} exceeds blocksBuffer length ${blocksBuffer.length}`); + } + + blocksBuffer.set(value); + const valueLen = value.length; + const blockByteLen = Math.ceil(valueLen / 64) * 64; + // all padding bytes must be zero, this is similar to set zeroHash(0) + blocksBuffer.subarray(valueLen, blockByteLen).fill(0); + return blocksBuffer.subarray(0, blockByteLen); +} diff --git a/packages/ssz/src/type/byteList.ts b/packages/ssz/src/type/byteList.ts index 6f12fff7..0ceaab7a 100644 --- a/packages/ssz/src/type/byteList.ts +++ b/packages/ssz/src/type/byteList.ts @@ -1,11 +1,18 @@ -import {getNodesAtDepth, Node, packedNodeRootsToBytes, packedRootsBytesToNode} from "@chainsafe/persistent-merkle-tree"; -import {mixInLength, maxChunksToDepth} from "../util/merkleize"; +import {allocUnsafe} from "@chainsafe/as-sha256"; +import { + getNodesAtDepth, + Node, + packedNodeRootsToBytes, + packedRootsBytesToNode, + merkleizeBlocksBytes, + merkleizeBlockArray, +} from "@chainsafe/persistent-merkle-tree"; +import {maxChunksToDepth} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; import {addLengthNode, getChunksNodeFromRootNode, getLengthFromRootNode} from "./arrayBasic"; import {ByteViews} from "./composite"; import {ByteArrayType, ByteArray} from "./byteArray"; - /* eslint-disable @typescript-eslint/member-ordering */ export interface ByteListOptions { @@ -34,6 +41,14 @@ export class ByteListType extends ByteArrayType { readonly maxSize: number; readonly maxChunkCount: number; readonly isList = true; + readonly blockArray: Uint8Array[] = []; + private blockBytesLen = 0; + readonly mixInLengthBlockBytes = new Uint8Array(64); + readonly mixInLengthBuffer = Buffer.from( + this.mixInLengthBlockBytes.buffer, + this.mixInLengthBlockBytes.byteOffset, + this.mixInLengthBlockBytes.byteLength + ); constructor(readonly limitBytes: number, opts?: ByteListOptions) { super(); @@ -89,7 +104,49 @@ export class ByteListType extends ByteArrayType { // Merkleization: inherited from ByteArrayType hashTreeRoot(value: ByteArray): Uint8Array { - return mixInLength(super.hashTreeRoot(value), value.length); + const root = allocUnsafe(32); + this.hashTreeRootInto(value, root, 0); + return root; + } + + /** + * Use merkleizeBlockArray() instead of merkleizeBlocksBytes() to avoid big memory allocation + */ + hashTreeRootInto(value: Uint8Array, output: Uint8Array, offset: number): void { + // should not call super.hashTreeRoot() here + // use merkleizeBlockArray() instead of merkleizeBlocksBytes() to avoid big memory allocation + // reallocate this.blockArray if needed + if (value.length > this.blockBytesLen) { + const newBlockCount = Math.ceil(value.length / 64); + // this.blockBytesLen should be a multiple of 64 + const oldBlockCount = Math.ceil(this.blockBytesLen / 64); + const blockDiff = newBlockCount - oldBlockCount; + const newBlocksBytes = new Uint8Array(blockDiff * 64); + for (let i = 0; i < blockDiff; i++) { + this.blockArray.push(newBlocksBytes.subarray(i * 64, (i + 1) * 64)); + this.blockBytesLen += 64; + } + } + + // populate this.blockArray + for (let i = 0; i < value.length; i += 64) { + const block = this.blockArray[i / 64]; + // zero out the last block if it's over value.length + if (i + 64 > value.length) { + block.fill(0); + } + block.set(value.subarray(i, Math.min(i + 64, value.length))); + } + + // compute hashTreeRoot + const blockLimit = Math.ceil(value.length / 64); + merkleizeBlockArray(this.blockArray, blockLimit, this.maxChunkCount, this.mixInLengthBlockBytes, 0); + + // mixInLength + this.mixInLengthBuffer.writeUIntLE(value.length, 32, 6); + // one for hashTreeRoot(value), one for length + const chunkCount = 2; + merkleizeBlocksBytes(this.mixInLengthBlockBytes, chunkCount, output, offset); } // Proofs: inherited from BitArrayType diff --git a/packages/ssz/src/type/composite.ts b/packages/ssz/src/type/composite.ts index c403c385..ce70be4b 100644 --- a/packages/ssz/src/type/composite.ts +++ b/packages/ssz/src/type/composite.ts @@ -1,3 +1,4 @@ +import {allocUnsafe} from "@chainsafe/as-sha256"; import { concatGindices, createProof, @@ -7,10 +8,11 @@ import { Proof, ProofType, Tree, + merkleizeBlocksBytes, HashComputationLevel, } from "@chainsafe/persistent-merkle-tree"; import {byteArrayEquals} from "../util/byteArray"; -import {merkleize, symbolCachedPermanentRoot, ValueWithCachedPermanentRoot} from "../util/merkleize"; +import {cacheRoot, symbolCachedPermanentRoot, ValueWithCachedPermanentRoot} from "../util/merkleize"; import {treePostProcessFromProofNode} from "../util/proof/treePostProcessFromProofNode"; import {Type, ByteViews, JsonPath, JsonPathProp} from "./abstract"; export {ByteViews}; @@ -59,6 +61,7 @@ export abstract class CompositeType extends Type { * Required for ContainerNodeStruct to ensure no dangerous types are constructed. */ abstract readonly isViewMutable: boolean; + protected blocksBuffer = new Uint8Array(0); constructor( /** @@ -216,13 +219,30 @@ export abstract class CompositeType extends Type { } } - const root = merkleize(this.getRoots(value), this.maxChunkCount); + const root = allocUnsafe(32); + const safeCache = true; + this.hashTreeRootInto(value, root, 0, safeCache); + // hashTreeRootInto will cache the root if cachePermanentRootStruct is true + + return root; + } + + hashTreeRootInto(value: V, output: Uint8Array, offset: number, safeCache = false): void { + // Return cached mutable root if any if (this.cachePermanentRootStruct) { - (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = root; + const cachedRoot = (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot]; + if (cachedRoot) { + output.set(cachedRoot, offset); + return; + } } - return root; + const blocksBuffer = this.getBlocksBytes(value); + merkleizeBlocksBytes(blocksBuffer, this.maxChunkCount, output, offset); + if (this.cachePermanentRootStruct) { + cacheRoot(value as ValueWithCachedPermanentRoot, output, offset, safeCache); + } } // For debugging and testing this feature @@ -236,7 +256,12 @@ export abstract class CompositeType extends Type { // and feed those numbers directly to the hasher input with a DataView // - The return of the hasher should be customizable too, to reduce conversions from Uint8Array // to hashObject and back. - protected abstract getRoots(value: V): Uint8Array[]; + + /** + * Get multiple SHA256 blocks, each is 64 bytes long. + * If chunk count is not even, need to append zeroHash(0) + */ + protected abstract getBlocksBytes(value: V): Uint8Array; // Proofs API diff --git a/packages/ssz/src/type/container.ts b/packages/ssz/src/type/container.ts index daa1911d..2d6505ea 100644 --- a/packages/ssz/src/type/container.ts +++ b/packages/ssz/src/type/container.ts @@ -130,6 +130,8 @@ export class ContainerType>> extends // Refactor this constructor to allow customization without pollutin the options this.TreeView = opts?.getContainerTreeViewClass?.(this) ?? getContainerTreeViewClass(this); this.TreeViewDU = opts?.getContainerTreeViewDUClass?.(this) ?? getContainerTreeViewDUClass(this); + const fieldBytes = this.fieldsEntries.length * 32; + this.blocksBuffer = new Uint8Array(Math.ceil(fieldBytes / 64) * 64); } static named>>( @@ -272,15 +274,13 @@ export class ContainerType>> extends // Merkleization - protected getRoots(struct: ValueOfFields): Uint8Array[] { - const roots = new Array(this.fieldsEntries.length); - + protected getBlocksBytes(struct: ValueOfFields): Uint8Array { for (let i = 0; i < this.fieldsEntries.length; i++) { const {fieldName, fieldType} = this.fieldsEntries[i]; - roots[i] = fieldType.hashTreeRoot(struct[fieldName]); + fieldType.hashTreeRootInto(struct[fieldName], this.blocksBuffer, i * 32); } - - return roots; + // remaining bytes are zeroed as we never write them + return this.blocksBuffer; } // Proofs diff --git a/packages/ssz/src/type/containerNodeStruct.ts b/packages/ssz/src/type/containerNodeStruct.ts index 8cefa381..76147f12 100644 --- a/packages/ssz/src/type/containerNodeStruct.ts +++ b/packages/ssz/src/type/containerNodeStruct.ts @@ -106,7 +106,6 @@ export class ContainerNodeStructType return new BranchNodeStruct(this.valueToTree.bind(this), value); } - // TODO: Optimize conversion private valueToTree(value: ValueOfFields): Node { const uint8Array = new Uint8Array(this.value_serializedSize(value)); const dataView = new DataView(uint8Array.buffer, uint8Array.byteOffset, uint8Array.byteLength); diff --git a/packages/ssz/src/type/listBasic.ts b/packages/ssz/src/type/listBasic.ts index c9e397e6..9af933d7 100644 --- a/packages/ssz/src/type/listBasic.ts +++ b/packages/ssz/src/type/listBasic.ts @@ -1,4 +1,4 @@ -import {LeafNode, Node, Tree, HashComputationLevel} from "@chainsafe/persistent-merkle-tree"; +import {HashComputationLevel, LeafNode, Node, Tree, merkleizeBlocksBytes} from "@chainsafe/persistent-merkle-tree"; import {ValueOf} from "./abstract"; import {BasicType} from "./basic"; import {ByteViews} from "./composite"; @@ -10,19 +10,14 @@ import { addLengthNode, setChunksNode, } from "./arrayBasic"; -import { - mixInLength, - maxChunksToDepth, - splitIntoRootChunks, - symbolCachedPermanentRoot, - ValueWithCachedPermanentRoot, -} from "../util/merkleize"; +import {cacheRoot, maxChunksToDepth, symbolCachedPermanentRoot, ValueWithCachedPermanentRoot} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; import {ArrayBasicType} from "../view/arrayBasic"; import {ListBasicTreeView} from "../view/listBasic"; import {ListBasicTreeViewDU} from "../viewDU/listBasic"; import {ArrayType} from "./array"; +import {allocUnsafe} from "@chainsafe/as-sha256"; /* eslint-disable @typescript-eslint/member-ordering */ @@ -52,6 +47,12 @@ export class ListBasicType> readonly maxSize: number; readonly isList = true; readonly isViewMutable = true; + readonly mixInLengthBlockBytes = new Uint8Array(64); + readonly mixInLengthBuffer = Buffer.from( + this.mixInLengthBlockBytes.buffer, + this.mixInLengthBlockBytes.byteOffset, + this.mixInLengthBlockBytes.byteLength + ); protected readonly defaultLen = 0; constructor(readonly elementType: ElementType, readonly limit: number, opts?: ListBasicOpts) { @@ -174,20 +175,52 @@ export class ListBasicType> } } - const root = mixInLength(super.hashTreeRoot(value), value.length); + const root = allocUnsafe(32); + const safeCache = true; + this.hashTreeRootInto(value, root, 0, safeCache); + + // hashTreeRootInto will cache the root if cachePermanentRootStruct is true + return root; + } + + hashTreeRootInto(value: ValueOf[], output: Uint8Array, offset: number, safeCache = false): void { if (this.cachePermanentRootStruct) { - (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = root; + const cachedRoot = (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot]; + if (cachedRoot) { + output.set(cachedRoot, offset); + return; + } } - return root; + super.hashTreeRootInto(value, this.mixInLengthBlockBytes, 0); + // mixInLength + this.mixInLengthBuffer.writeUIntLE(value.length, 32, 6); + // one for hashTreeRoot(value), one for length + const chunkCount = 2; + merkleizeBlocksBytes(this.mixInLengthBlockBytes, chunkCount, output, offset); + + if (this.cachePermanentRootStruct) { + cacheRoot(value as ValueWithCachedPermanentRoot, output, offset, safeCache); + } } - protected getRoots(value: ValueOf[]): Uint8Array[] { - const uint8Array = new Uint8Array(this.value_serializedSize(value)); + protected getBlocksBytes(value: ValueOf[]): Uint8Array { + const byteLen = this.value_serializedSize(value); + const blockByteLen = Math.ceil(byteLen / 64) * 64; + // reallocate this.blocksBuffer if needed + if (byteLen > this.blocksBuffer.length) { + // pad 1 chunk if maxChunkCount is not even + this.blocksBuffer = new Uint8Array(blockByteLen); + } + const blockBytes = this.blocksBuffer.subarray(0, blockByteLen); + const uint8Array = blockBytes.subarray(0, byteLen); const dataView = new DataView(uint8Array.buffer, uint8Array.byteOffset, uint8Array.byteLength); value_serializeToBytesArrayBasic(this.elementType, value.length, {uint8Array, dataView}, 0, value); - return splitIntoRootChunks(uint8Array); + + // all padding bytes must be zero, this is similar to set zeroHash(0) + this.blocksBuffer.subarray(byteLen, blockByteLen).fill(0); + return blockBytes; } // JSON: inherited from ArrayType diff --git a/packages/ssz/src/type/listComposite.ts b/packages/ssz/src/type/listComposite.ts index dad8e77c..5487f700 100644 --- a/packages/ssz/src/type/listComposite.ts +++ b/packages/ssz/src/type/listComposite.ts @@ -1,10 +1,11 @@ -import {Node, Tree, HashComputationLevel} from "@chainsafe/persistent-merkle-tree"; import { - mixInLength, - maxChunksToDepth, - symbolCachedPermanentRoot, - ValueWithCachedPermanentRoot, -} from "../util/merkleize"; + HashComputationLevel, + Node, + Tree, + merkleizeBlocksBytes, + merkleizeBlockArray, +} from "@chainsafe/persistent-merkle-tree"; +import {cacheRoot, maxChunksToDepth, symbolCachedPermanentRoot, ValueWithCachedPermanentRoot} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; import {ValueOf, ByteViews} from "./abstract"; @@ -17,13 +18,13 @@ import { tree_serializedSizeArrayComposite, tree_deserializeFromBytesArrayComposite, tree_serializeToBytesArrayComposite, - value_getRootsArrayComposite, maxSizeArrayComposite, } from "./arrayComposite"; import {ArrayCompositeType} from "../view/arrayComposite"; import {ListCompositeTreeView} from "../view/listComposite"; import {ListCompositeTreeViewDU} from "../viewDU/listComposite"; import {ArrayType} from "./array"; +import {allocUnsafe} from "@chainsafe/as-sha256"; /* eslint-disable @typescript-eslint/member-ordering */ @@ -56,6 +57,13 @@ export class ListCompositeType< readonly maxSize: number; readonly isList = true; readonly isViewMutable = true; + readonly blockArray: Uint8Array[] = []; + readonly mixInLengthBlockBytes = new Uint8Array(64); + readonly mixInLengthBuffer = Buffer.from( + this.mixInLengthBlockBytes.buffer, + this.mixInLengthBlockBytes.byteOffset, + this.mixInLengthBlockBytes.byteLength + ); protected readonly defaultLen = 0; constructor(readonly elementType: ElementType, readonly limit: number, opts?: ListCompositeOpts) { @@ -180,17 +188,66 @@ export class ListCompositeType< } } - const root = mixInLength(super.hashTreeRoot(value), value.length); + const root = allocUnsafe(32); + const safeCache = true; + this.hashTreeRootInto(value, root, 0, safeCache); + // hashTreeRootInto will cache the root if cachePermanentRootStruct is true + + return root; + } + + hashTreeRootInto(value: ValueOf[], output: Uint8Array, offset: number, safeCache = false): void { if (this.cachePermanentRootStruct) { - (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = root; + const cachedRoot = (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot]; + if (cachedRoot) { + output.set(cachedRoot, offset); + return; + } } - return root; + // should not call super.hashTreeRootInto() here + // use merkleizeBlockArray() instead of merkleizeBlocksBytes() to avoid big memory allocation + // reallocate this.blockArray if needed + if (value.length > this.blockArray.length) { + const blockDiff = value.length - this.blockArray.length; + const newBlocksBytes = new Uint8Array(blockDiff * 64); + for (let i = 0; i < blockDiff; i++) { + this.blockArray.push(newBlocksBytes.subarray(i * 64, (i + 1) * 64)); + } + } + + // populate this.blockArray + for (let i = 0; i < value.length; i++) { + // 2 values share a block + const block = this.blockArray[Math.floor(i / 2)]; + const offset = i % 2 === 0 ? 0 : 32; + this.elementType.hashTreeRootInto(value[i], block, offset); + } + + const blockLimit = Math.ceil(value.length / 2); + // zero out the last block if needed + if (value.length % 2 === 1) { + this.blockArray[blockLimit - 1].fill(0, 32); + } + + // compute hashTreeRoot + merkleizeBlockArray(this.blockArray, blockLimit, this.maxChunkCount, this.mixInLengthBlockBytes, 0); + + // mixInLength + this.mixInLengthBuffer.writeUIntLE(value.length, 32, 6); + // one for hashTreeRoot(value), one for length + const chunkCount = 2; + merkleizeBlocksBytes(this.mixInLengthBlockBytes, chunkCount, output, offset); + + if (this.cachePermanentRootStruct) { + cacheRoot(value as ValueWithCachedPermanentRoot, output, offset, safeCache); + } } - protected getRoots(value: ValueOf[]): Uint8Array[] { - return value_getRootsArrayComposite(this.elementType, value.length, value); + protected getBlocksBytes(): Uint8Array { + // we use merkleizeBlockArray for hashTreeRoot() computation + throw Error("getBlockBytes should not be called for ListCompositeType"); } // JSON: inherited from ArrayType diff --git a/packages/ssz/src/type/listUintNum64.ts b/packages/ssz/src/type/listUintNum64.ts index 663e637d..4ee4826b 100644 --- a/packages/ssz/src/type/listUintNum64.ts +++ b/packages/ssz/src/type/listUintNum64.ts @@ -1,24 +1,53 @@ -import {LeafNode, Node, packedUintNum64sToLeafNodes, subtreeFillToContents} from "@chainsafe/persistent-merkle-tree"; +import { + HashComputationGroup, + HashComputationLevel, + LeafNode, + Node, + executeHashComputations, + getNodesAtDepth, + levelAtIndex, + packedUintNum64sToLeafNodes, + setNodesAtDepth, + subtreeFillToContents, + zeroNode, +} from "@chainsafe/persistent-merkle-tree"; import {ListBasicTreeViewDU} from "../viewDU/listBasic"; import {ListBasicOpts, ListBasicType} from "./listBasic"; import {UintNumberType} from "./uint"; -import {addLengthNode} from "./arrayBasic"; +import {addLengthNode, getLengthFromRootNode} from "./arrayBasic"; /** * Specific implementation of ListBasicType for UintNumberType with some optimizations. */ export class ListUintNum64Type extends ListBasicType { + private hcGroup: HashComputationGroup | undefined; constructor(limit: number, opts?: ListBasicOpts) { super(new UintNumberType(8), limit, opts); } /** * Return a ListBasicTreeViewDU with nodes populated + * @param unusedViewDU optional, if provided we'll create ViewDU using the provided rootNode. Need to rehash the whole + * tree in this case to make it clean for consumers. */ - toViewDU(value: number[]): ListBasicTreeViewDU { + toViewDU(value: number[], unusedViewDU?: ListBasicTreeViewDU): ListBasicTreeViewDU { // no need to serialize and deserialize like in the abstract class - const {treeNode, leafNodes} = this.packedUintNum64sToNode(value); + const {treeNode, leafNodes} = this.packedUintNum64sToNode(value, unusedViewDU?.node); + + if (unusedViewDU) { + const hcGroup = this.getHcGroup(); + hcGroup.reset(); + forceGetHashComputations(treeNode, this.chunkDepth + 1, 0, hcGroup.byLevel); + hcGroup.clean(); + + treeNode.h0 = null as unknown as number; + executeHashComputations(hcGroup.byLevel); + // This makes sure the root node is computed by batch + if (treeNode.h0 === null) { + throw Error("Root is not computed by batch"); + } + } // cache leaf nodes in the ViewDU return this.getViewDU(treeNode, { nodes: leafNodes, @@ -29,21 +58,86 @@ export class ListUintNum64Type extends ListBasicType { /** * No need to serialize and deserialize like in the abstract class + * This should be conformed to parent's signature so cannot provide an `unusedViewDU` parameter here */ value_toTree(value: number[]): Node { const {treeNode} = this.packedUintNum64sToNode(value); return treeNode; } - private packedUintNum64sToNode(value: number[]): {treeNode: Node; leafNodes: LeafNode[]} { + private packedUintNum64sToNode(value: number[], unusedRootNode?: Node): {treeNode: Node; leafNodes: LeafNode[]} { if (value.length > this.limit) { throw new Error(`Exceeds limit: ${value.length} > ${this.limit}`); } + if (unusedRootNode) { + // create new tree from unusedRootNode + const oldLength = getLengthFromRootNode(unusedRootNode); + if (oldLength > value.length) { + throw new Error(`Cannot decrease length: ${oldLength} > ${value.length}`); + } + + const oldNodeCount = Math.ceil(oldLength / 4); + const oldChunksNode = unusedRootNode.left; + const oldLeafNodes = getNodesAtDepth(oldChunksNode, this.chunkDepth, 0, oldNodeCount) as LeafNode[]; + if (oldLeafNodes.length !== oldNodeCount) { + throw new Error(`oldLeafNodes.length ${oldLeafNodes.length} !== oldNodeCount ${oldNodeCount}`); + } + + const newNodeCount = Math.ceil(value.length / 4); + const count = newNodeCount - oldNodeCount; + const newLeafNodes = Array.from({length: count}, () => new LeafNode(0, 0, 0, 0, 0, 0, 0, 0)); + const leafNodes = [...oldLeafNodes, ...newLeafNodes]; + packedUintNum64sToLeafNodes(value, leafNodes); + + // middle nodes are not changed so consumer must recompute parent hashes + const newChunksNode = setNodesAtDepth( + oldChunksNode, + this.chunkDepth, + Array.from({length: count}, (_, i) => oldNodeCount + i), + newLeafNodes + ); + const treeNode = addLengthNode(newChunksNode, value.length); + + return {treeNode, leafNodes}; + } + + // create new tree from scratch const leafNodes = packedUintNum64sToLeafNodes(value); // subtreeFillToContents mutates the leafNodes array - const rootNode = subtreeFillToContents([...leafNodes], this.chunkDepth); - const treeNode = addLengthNode(rootNode, value.length); + const chunksNode = subtreeFillToContents([...leafNodes], this.chunkDepth); + const treeNode = addLengthNode(chunksNode, value.length); return {treeNode, leafNodes}; } + + private getHcGroup(): HashComputationGroup { + if (!this.hcGroup) { + this.hcGroup = new HashComputationGroup(); + } + return this.hcGroup; + } +} + +/** + * Consider moving this to persistent-merkle-tree. + * For now this is the only flow to force get hash computations. + */ +function forceGetHashComputations( + node: Node, + nodeDepth: number, + index: number, + hcByLevel: HashComputationLevel[] +): void { + // very important: never mutate zeroNode + if (node === zeroNode(nodeDepth) || node.isLeaf()) { + return; + } + + // if (node.h0 === null) { + const hashComputations = levelAtIndex(hcByLevel, index); + const {left, right} = node; + hashComputations.push(left, right, node); + // leaf nodes should have h0 to stop the recursion + forceGetHashComputations(left, nodeDepth - 1, index + 1, hcByLevel); + forceGetHashComputations(right, nodeDepth - 1, index + 1, hcByLevel); } diff --git a/packages/ssz/src/type/optional.ts b/packages/ssz/src/type/optional.ts index 7c5f9baf..1473e6d7 100644 --- a/packages/ssz/src/type/optional.ts +++ b/packages/ssz/src/type/optional.ts @@ -1,18 +1,19 @@ import { concatGindices, Gindex, + merkleizeBlocksBytes, Node, Tree, zeroNode, - HashComputationLevel, getHashComputations, + HashComputationLevel, } from "@chainsafe/persistent-merkle-tree"; -import {mixInLength} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; import {Type, ByteViews, JsonPath, JsonPathProp} from "./abstract"; import {CompositeType, isCompositeType} from "./composite"; import {addLengthNode, getLengthFromRootNode} from "./arrayBasic"; +import {allocUnsafe} from "@chainsafe/as-sha256"; /* eslint-disable @typescript-eslint/member-ordering */ export type NonOptionalType> = T extends OptionalType ? U : T; @@ -47,6 +48,12 @@ export class OptionalType> extends CompositeTy readonly maxSize: number; readonly isList = true; readonly isViewMutable = true; + readonly mixInLengthBlockBytes = new Uint8Array(64); + readonly mixInLengthBuffer = Buffer.from( + this.mixInLengthBlockBytes.buffer, + this.mixInLengthBlockBytes.byteOffset, + this.mixInLengthBlockBytes.byteLength + ); constructor(readonly elementType: ElementType, opts?: OptionalOpts) { super(); @@ -59,6 +66,8 @@ export class OptionalType> extends CompositeTy this.minSize = 0; // Max size includes prepended 0x01 byte this.maxSize = elementType.maxSize + 1; + // maxChunkCount = 1 so this.blocksBuffer.length = 32 in this case + this.blocksBuffer = new Uint8Array(32); } static named>( @@ -171,13 +180,27 @@ export class OptionalType> extends CompositeTy // Merkleization hashTreeRoot(value: ValueOfType): Uint8Array { + const root = allocUnsafe(32); + this.hashTreeRootInto(value, root, 0); + return root; + } + + hashTreeRootInto(value: ValueOfType, output: Uint8Array, offset: number): void { + super.hashTreeRootInto(value, this.mixInLengthBlockBytes, 0); const selector = value === null ? 0 : 1; - return mixInLength(super.hashTreeRoot(value), selector); + this.mixInLengthBuffer.writeUIntLE(selector, 32, 6); + // one for hashTreeRoot(value), one for selector + const chunkCount = 2; + merkleizeBlocksBytes(this.mixInLengthBlockBytes, chunkCount, output, offset); } - protected getRoots(value: ValueOfType): Uint8Array[] { - const valueRoot = value === null ? new Uint8Array(32) : this.elementType.hashTreeRoot(value); - return [valueRoot]; + protected getBlocksBytes(value: ValueOfType): Uint8Array { + if (value === null) { + this.blocksBuffer.fill(0); + } else { + this.elementType.hashTreeRootInto(value, this.blocksBuffer, 0); + } + return this.blocksBuffer; } // Proofs diff --git a/packages/ssz/src/type/profile.ts b/packages/ssz/src/type/profile.ts index f9469fe0..06b2cf76 100644 --- a/packages/ssz/src/type/profile.ts +++ b/packages/ssz/src/type/profile.ts @@ -6,12 +6,13 @@ import { Gindex, toGindex, concatGindices, + merkleizeBlocksBytes, getNode, BranchNode, zeroHash, zeroNode, } from "@chainsafe/persistent-merkle-tree"; -import {ValueWithCachedPermanentRoot, maxChunksToDepth, symbolCachedPermanentRoot} from "../util/merkleize"; +import {ValueWithCachedPermanentRoot, cacheRoot, maxChunksToDepth, symbolCachedPermanentRoot} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; import {Type, ValueOf} from "./abstract"; @@ -87,6 +88,8 @@ export class ProfileType>> extends C protected readonly TreeView: ContainerTreeViewTypeConstructor; protected readonly TreeViewDU: ContainerTreeViewDUTypeConstructor; private optionalFieldsCount: number; + // temporary root to avoid memory allocation + private tempRoot = new Uint8Array(32); constructor(readonly fields: Fields, activeFields: BitArray, readonly opts?: ProfileOptions) { super(); @@ -154,6 +157,8 @@ export class ProfileType>> extends C // Refactor this constructor to allow customization without pollutin the options this.TreeView = opts?.getProfileTreeViewClass?.(this) ?? getProfileTreeViewClass(this); this.TreeViewDU = opts?.getProfileTreeViewDUClass?.(this) ?? getProfileTreeViewDUClass(this); + const fieldBytes = this.activeFields.bitLen * 32; + this.blocksBuffer = new Uint8Array(Math.ceil(fieldBytes / 64) * 64); } static named>>( @@ -361,37 +366,38 @@ export class ProfileType>> extends C } // Merkleization - hashTreeRoot(value: ValueOfFields): Uint8Array { + // hashTreeRoot is the same to parent as it call hashTreeRootInto() + hashTreeRootInto(value: ValueOfFields, output: Uint8Array, offset: number, safeCache = false): void { // Return cached mutable root if any if (this.cachePermanentRootStruct) { const cachedRoot = (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot]; if (cachedRoot) { - return cachedRoot; + output.set(cachedRoot, offset); + return; } } - const root = mixInActiveFields(super.hashTreeRoot(value), this.activeFields); + const blocksBytes = this.getBlocksBytes(value); + merkleizeBlocksBytes(blocksBytes, this.maxChunkCount, this.tempRoot, 0); + mixInActiveFields(this.tempRoot, this.activeFields, output, offset); if (this.cachePermanentRootStruct) { - (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = root; + cacheRoot(value as ValueWithCachedPermanentRoot, output, offset, safeCache); } - - return root; } - protected getRoots(struct: ValueOfFields): Uint8Array[] { - const roots = new Array(this.activeFields.bitLen).fill(zeroHash(0)); - - // already asserted that # of active fields in bitvector === # of fields + protected getBlocksBytes(struct: ValueOfFields): Uint8Array { + this.blocksBuffer.fill(0); for (let i = 0; i < this.fieldsEntries.length; i++) { const {fieldName, fieldType, chunkIndex, optional} = this.fieldsEntries[i]; if (optional && struct[fieldName] == null) { - continue; + this.blocksBuffer.set(zeroHash(0), chunkIndex * 32); + } else { + fieldType.hashTreeRootInto(struct[fieldName], this.blocksBuffer, chunkIndex * 32); } - roots[chunkIndex] = fieldType.hashTreeRoot(struct[fieldName]); } - - return roots; + // remaining bytes are zeroed as we never write them + return this.blocksBuffer; } // Proofs diff --git a/packages/ssz/src/type/stableContainer.ts b/packages/ssz/src/type/stableContainer.ts index bf8b94fa..b64ed331 100644 --- a/packages/ssz/src/type/stableContainer.ts +++ b/packages/ssz/src/type/stableContainer.ts @@ -11,19 +11,13 @@ import { getNode, zeroNode, zeroHash, + merkleizeBlocksBytes, countToDepth, getNodeH, setNode, setNodeWithFn, } from "@chainsafe/persistent-merkle-tree"; -import { - ValueWithCachedPermanentRoot, - hash64, - maxChunksToDepth, - merkleize, - splitIntoRootChunks, - symbolCachedPermanentRoot, -} from "../util/merkleize"; +import {ValueWithCachedPermanentRoot, cacheRoot, maxChunksToDepth, symbolCachedPermanentRoot} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; import {JsonPath, Type, ValueOf} from "./abstract"; @@ -99,6 +93,8 @@ export class StableContainerType>> e protected readonly TreeView: ContainerTreeViewTypeConstructor; protected readonly TreeViewDU: ContainerTreeViewDUTypeConstructor; private padActiveFields: boolean[]; + // temporary root to avoid memory allocation + private tempRoot = new Uint8Array(32); constructor(fields: Fields, readonly maxFields: number, readonly opts?: StableContainerOptions) { super(); @@ -153,6 +149,8 @@ export class StableContainerType>> e // Refactor this constructor to allow customization without pollutin the options this.TreeView = opts?.getContainerTreeViewClass?.(this) ?? getContainerTreeViewClass(this); this.TreeViewDU = opts?.getContainerTreeViewDUClass?.(this) ?? getContainerTreeViewDUClass(this); + const fieldBytes = this.fieldsEntries.length * 32; + this.blocksBuffer = new Uint8Array(Math.ceil(fieldBytes / 64) * 64); } static named>>( @@ -341,43 +339,43 @@ export class StableContainerType>> e } // Merkleization - hashTreeRoot(value: ValueOfFields): Uint8Array { + // hashTreeRoot is the same to parent as it call hashTreeRootInto() + hashTreeRootInto(value: ValueOfFields, output: Uint8Array, offset: number, safeCache = false): void { // Return cached mutable root if any if (this.cachePermanentRootStruct) { const cachedRoot = (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot]; if (cachedRoot) { - return cachedRoot; + output.set(cachedRoot, offset); + return; } } + const blockBytes = this.getBlocksBytes(value); + merkleizeBlocksBytes(blockBytes, this.maxChunkCount, this.tempRoot, 0); // compute active field bitvector const activeFields = BitArray.fromBoolArray([ ...this.fieldsEntries.map(({fieldName}) => value[fieldName] != null), ...this.padActiveFields, ]); - const root = mixInActiveFields(super.hashTreeRoot(value), activeFields); + mixInActiveFields(this.tempRoot, activeFields, output, offset); if (this.cachePermanentRootStruct) { - (value as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = root; + cacheRoot(value as ValueWithCachedPermanentRoot, output, offset, safeCache); } - - return root; } - protected getRoots(struct: ValueOfFields): Uint8Array[] { - const roots = new Array(this.fieldsEntries.length); - + protected getBlocksBytes(struct: ValueOfFields): Uint8Array { + this.blocksBuffer.fill(0); for (let i = 0; i < this.fieldsEntries.length; i++) { const {fieldName, fieldType, optional} = this.fieldsEntries[i]; if (optional && struct[fieldName] == null) { - roots[i] = zeroHash(0); - continue; + this.blocksBuffer.set(zeroHash(0), i * 32); + } else { + fieldType.hashTreeRootInto(struct[fieldName], this.blocksBuffer, i * 32); } - - roots[i] = fieldType.hashTreeRoot(struct[fieldName]); } - return roots; + return this.blocksBuffer; } // Proofs @@ -751,12 +749,15 @@ export function getActiveFields(rootNode: Node, bitLen: number): BitArray { return new BitArray(activeFieldsBuf, bitLen); } +// This is a global buffer to avoid creating a new one for each call to getActiveFields +const singleChunkActiveFieldsBuf = new Uint8Array(32); + export function setActiveFields(rootNode: Node, activeFields: BitArray): Node { // fast path for depth 1, the bitvector fits in one chunk if (activeFields.bitLen <= 256) { - const activeFieldsBuf = new Uint8Array(32); - activeFieldsBuf.set(activeFields.uint8Array); - return new BranchNode(rootNode.left, LeafNode.fromRoot(activeFieldsBuf)); + singleChunkActiveFieldsBuf.fill(0); + singleChunkActiveFieldsBuf.set(activeFields.uint8Array); + return new BranchNode(rootNode.left, LeafNode.fromRoot(singleChunkActiveFieldsBuf)); } const activeFieldsChunkCount = Math.ceil(activeFields.bitLen / 256); @@ -815,15 +816,24 @@ export function setActiveField(rootNode: Node, bitLen: number, fieldIndex: numbe return new BranchNode(rootNode.left, newActiveFieldsNode); } -export function mixInActiveFields(root: Uint8Array, activeFields: BitArray): Uint8Array { +// This is a global buffer to avoid creating a new one for each call to getBlocksBytes +const mixInActiveFieldsBlockBytes = new Uint8Array(64); +const activeFieldsSingleChunk = mixInActiveFieldsBlockBytes.subarray(32); + +export function mixInActiveFields(root: Uint8Array, activeFields: BitArray, output: Uint8Array, offset: number): void { // fast path for depth 1, the bitvector fits in one chunk + mixInActiveFieldsBlockBytes.set(root, 0); if (activeFields.bitLen <= 256) { - const activeFieldsChunk = new Uint8Array(32); - activeFieldsChunk.set(activeFields.uint8Array); - return hash64(root, activeFieldsChunk); - } - - const activeFieldsChunks = splitIntoRootChunks(activeFields.uint8Array); - const activeFieldsRoot = merkleize(activeFieldsChunks, activeFieldsChunks.length); - return hash64(root, activeFieldsRoot); + activeFieldsSingleChunk.fill(0); + activeFieldsSingleChunk.set(activeFields.uint8Array); + // 1 chunk for root, 1 chunk for activeFields + const chunkCount = 2; + merkleizeBlocksBytes(mixInActiveFieldsBlockBytes, chunkCount, output, offset); + return; + } + + const chunkCount = Math.ceil(activeFields.uint8Array.length / 32); + merkleizeBlocksBytes(activeFields.uint8Array, chunkCount, activeFieldsSingleChunk, 0); + // 1 chunk for root, 1 chunk for activeFields + merkleizeBlocksBytes(mixInActiveFieldsBlockBytes, 2, output, offset); } diff --git a/packages/ssz/src/type/uint.ts b/packages/ssz/src/type/uint.ts index 910310f4..81b47a18 100644 --- a/packages/ssz/src/type/uint.ts +++ b/packages/ssz/src/type/uint.ts @@ -133,6 +133,12 @@ export class UintNumberType extends BasicType { } } + value_toTree(value: number): Node { + const node = LeafNode.fromZero(); + node.setUint(this.byteLength, 0, value, this.clipInfinity); + return node; + } + tree_serializeToBytes(output: ByteViews, offset: number, node: Node): number { const value = (node as LeafNode).getUint(this.byteLength, 0, this.clipInfinity); this.value_serializeToBytes(output, offset, value); diff --git a/packages/ssz/src/type/union.ts b/packages/ssz/src/type/union.ts index fbd7f97a..908d9604 100644 --- a/packages/ssz/src/type/union.ts +++ b/packages/ssz/src/type/union.ts @@ -4,16 +4,17 @@ import { Gindex, Node, Tree, - HashComputationLevel, + merkleizeBlocksBytes, getHashComputations, + HashComputationLevel, } from "@chainsafe/persistent-merkle-tree"; -import {mixInLength} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; import {Type, ByteViews} from "./abstract"; import {CompositeType, isCompositeType} from "./composite"; import {addLengthNode, getLengthFromRootNode} from "./arrayBasic"; import {NoneType} from "./none"; +import {allocUnsafe} from "@chainsafe/as-sha256"; /* eslint-disable @typescript-eslint/member-ordering */ @@ -48,6 +49,12 @@ export class UnionType[]> extends CompositeType< readonly maxSize: number; readonly isList = true; readonly isViewMutable = true; + readonly mixInLengthBlockBytes = new Uint8Array(64); + readonly mixInLengthBuffer = Buffer.from( + this.mixInLengthBlockBytes.buffer, + this.mixInLengthBlockBytes.byteOffset, + this.mixInLengthBlockBytes.byteLength + ); protected readonly maxSelector: number; @@ -85,6 +92,8 @@ export class UnionType[]> extends CompositeType< this.minSize = 1 + Math.min(...minLens); this.maxSize = 1 + Math.max(...maxLens); this.maxSelector = this.types.length - 1; + // maxChunkCount = 1 so this.blocksBuffer.length = 32 in this case + this.blocksBuffer = new Uint8Array(32); } static named[]>(types: Types, opts: Require): UnionType { @@ -170,12 +179,21 @@ export class UnionType[]> extends CompositeType< // Merkleization hashTreeRoot(value: ValueOfTypes): Uint8Array { - return mixInLength(super.hashTreeRoot(value), value.selector); + const root = allocUnsafe(32); + this.hashTreeRootInto(value, root, 0); + return root; + } + + hashTreeRootInto(value: ValueOfTypes, output: Uint8Array, offset: number): void { + super.hashTreeRootInto(value, this.mixInLengthBlockBytes, 0); + this.mixInLengthBuffer.writeUIntLE(value.selector, 32, 6); + const chunkCount = 2; + merkleizeBlocksBytes(this.mixInLengthBlockBytes, chunkCount, output, offset); } - protected getRoots(value: ValueOfTypes): Uint8Array[] { - const valueRoot = this.types[value.selector].hashTreeRoot(value.value); - return [valueRoot]; + protected getBlocksBytes(value: ValueOfTypes): Uint8Array { + this.types[value.selector].hashTreeRootInto(value.value, this.blocksBuffer, 0); + return this.blocksBuffer; } // Proofs diff --git a/packages/ssz/src/type/vectorBasic.ts b/packages/ssz/src/type/vectorBasic.ts index d52a9405..0c528c96 100644 --- a/packages/ssz/src/type/vectorBasic.ts +++ b/packages/ssz/src/type/vectorBasic.ts @@ -1,5 +1,5 @@ -import {Node, Tree, HashComputationLevel} from "@chainsafe/persistent-merkle-tree"; -import {maxChunksToDepth, splitIntoRootChunks} from "../util/merkleize"; +import {HashComputationLevel, Node, Tree} from "@chainsafe/persistent-merkle-tree"; +import {maxChunksToDepth} from "../util/merkleize"; import {Require} from "../util/types"; import {namedClass} from "../util/named"; import {ValueOf, ByteViews} from "./abstract"; @@ -59,6 +59,7 @@ export class VectorBasicType> this.minSize = this.fixedSize; this.maxSize = this.fixedSize; this.defaultLen = length; + this.blocksBuffer = new Uint8Array(Math.ceil(this.maxChunkCount / 2) * 64); } static named>( @@ -146,11 +147,13 @@ export class VectorBasicType> // Merkleization - protected getRoots(value: ValueOf[]): Uint8Array[] { - const uint8Array = new Uint8Array(this.fixedSize); + protected getBlocksBytes(value: ValueOf[]): Uint8Array { + const uint8Array = this.blocksBuffer.subarray(0, this.fixedSize); const dataView = new DataView(uint8Array.buffer, uint8Array.byteOffset, uint8Array.byteLength); value_serializeToBytesArrayBasic(this.elementType, this.length, {uint8Array, dataView}, 0, value); - return splitIntoRootChunks(uint8Array); + + // remaining bytes from this.fixedSize to this.blocksBuffer.length must be zeroed + return this.blocksBuffer; } // JSON: inherited from ArrayType diff --git a/packages/ssz/src/type/vectorComposite.ts b/packages/ssz/src/type/vectorComposite.ts index e1af8dd4..908f4f9b 100644 --- a/packages/ssz/src/type/vectorComposite.ts +++ b/packages/ssz/src/type/vectorComposite.ts @@ -11,9 +11,9 @@ import { tree_serializedSizeArrayComposite, tree_deserializeFromBytesArrayComposite, tree_serializeToBytesArrayComposite, - value_getRootsArrayComposite, maxSizeArrayComposite, minSizeArrayComposite, + value_getBlocksBytesArrayComposite, } from "./arrayComposite"; import {ArrayCompositeType, ArrayCompositeTreeView} from "../view/arrayComposite"; import {ArrayCompositeTreeViewDU} from "../viewDU/arrayComposite"; @@ -65,6 +65,7 @@ export class VectorCompositeType< this.minSize = minSizeArrayComposite(elementType, length); this.maxSize = maxSizeArrayComposite(elementType, length); this.defaultLen = length; + this.blocksBuffer = new Uint8Array(Math.ceil(this.maxChunkCount / 2) * 64); } // eslint-disable-next-line @typescript-eslint/no-explicit-any @@ -153,8 +154,8 @@ export class VectorCompositeType< // Merkleization - protected getRoots(value: ValueOf[]): Uint8Array[] { - return value_getRootsArrayComposite(this.elementType, this.length, value); + protected getBlocksBytes(value: ValueOf[]): Uint8Array { + return value_getBlocksBytesArrayComposite(this.elementType, this.length, value, this.blocksBuffer); } // JSON: inherited from ArrayType diff --git a/packages/ssz/src/util/merkleize.ts b/packages/ssz/src/util/merkleize.ts index 073dea5d..932e80d7 100644 --- a/packages/ssz/src/util/merkleize.ts +++ b/packages/ssz/src/util/merkleize.ts @@ -1,5 +1,4 @@ -import {hasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/index"; -import {zeroHash} from "@chainsafe/persistent-merkle-tree"; +import {hasher, zeroHash} from "@chainsafe/persistent-merkle-tree"; /** Dedicated property to cache hashTreeRoot of immutable CompositeType values */ export const symbolCachedPermanentRoot = Symbol("ssz_cached_permanent_root"); @@ -9,6 +8,28 @@ export type ValueWithCachedPermanentRoot = { [symbolCachedPermanentRoot]?: Uint8Array; }; +/** + * Cache a root for a ValueWithCachedPermanentRoot instance + * - if safeCache is true and output is 32 bytes and offset is 0, use output directly + * - if safeCache, use output subarray + * - otherwise, need to clone the root at output offset + */ +export function cacheRoot( + value: ValueWithCachedPermanentRoot, + output: Uint8Array, + offset: number, + safeCache: boolean +): void { + const cachedRoot = + safeCache && output.length === 32 && offset === 0 + ? output + : safeCache + ? output.subarray(offset, offset + 32) + : // Buffer.prototype.slice does not copy memory, Enforce Uint8Array usage https://github.com/nodejs/node/issues/28087 + Uint8Array.prototype.slice.call(output, offset, offset + 32); + value[symbolCachedPermanentRoot] = cachedRoot; +} + export function hash64(bytes32A: Uint8Array, bytes32B: Uint8Array): Uint8Array { return hasher.digest64(bytes32A, bytes32B); } @@ -43,22 +64,6 @@ export function merkleize(chunks: Uint8Array[], padFor: number): Uint8Array { return chunks[0]; } -/** - * Split a long Uint8Array into Uint8Array of exactly 32 bytes - */ -export function splitIntoRootChunks(longChunk: Uint8Array): Uint8Array[] { - const chunkCount = Math.ceil(longChunk.length / 32); - const chunks = new Array(chunkCount); - - for (let i = 0; i < chunkCount; i++) { - const chunk = new Uint8Array(32); - chunk.set(longChunk.slice(i * 32, (i + 1) * 32)); - chunks[i] = chunk; - } - - return chunks; -} - /** @ignore */ export function mixInLength(root: Uint8Array, length: number): Uint8Array { const lengthBuf = Buffer.alloc(32); diff --git a/packages/ssz/src/view/arrayBasic.ts b/packages/ssz/src/view/arrayBasic.ts index 7f821a96..ccfdee15 100644 --- a/packages/ssz/src/view/arrayBasic.ts +++ b/packages/ssz/src/view/arrayBasic.ts @@ -88,13 +88,16 @@ export class ArrayBasicTreeView> extends /** * Get all values of this array as Basic element type values, from index zero to `this.length - 1` */ - getAll(): ValueOf[] { + getAll(values?: ValueOf[]): ValueOf[] { + if (values && values.length !== this.length) { + throw Error(`Expected ${this.length} values, got ${values.length}`); + } const length = this.length; const chunksNode = this.type.tree_getChunksNode(this.node); const chunkCount = Math.ceil(length / this.type.itemsPerChunk); const leafNodes = getNodesAtDepth(chunksNode, this.type.chunkDepth, 0, chunkCount) as LeafNode[]; - const values = new Array>(length); + values = values ?? new Array>(length); const itemsPerChunk = this.type.itemsPerChunk; // Prevent many access in for loop below const lenFullNodes = Math.floor(length / itemsPerChunk); const remainder = length % itemsPerChunk; diff --git a/packages/ssz/src/view/arrayComposite.ts b/packages/ssz/src/view/arrayComposite.ts index b74799b2..35c633df 100644 --- a/packages/ssz/src/view/arrayComposite.ts +++ b/packages/ssz/src/view/arrayComposite.ts @@ -74,11 +74,14 @@ export class ArrayCompositeTreeView< * The returned views don't have a parent hook to this View's Tree, so changes in the returned views won't be * propagated upwards. To get linked element Views use `this.get()` */ - getAllReadonly(): CompositeView[] { + getAllReadonly(views?: CompositeView[]): CompositeView[] { + if (views && views.length !== this.length) { + throw Error(`Expected ${this.length} views, got ${views.length}`); + } const length = this.length; const chunksNode = this.type.tree_getChunksNode(this.node); const nodes = getNodesAtDepth(chunksNode, this.type.chunkDepth, 0, length); - const views = new Array>(length); + views = views ?? new Array>(length); for (let i = 0; i < length; i++) { // TODO: Optimize views[i] = this.type.elementType.getView(new Tree(nodes[i])); @@ -91,11 +94,14 @@ export class ArrayCompositeTreeView< * The returned values are not Views so any changes won't be propagated upwards. * To get linked element Views use `this.get()` */ - getAllReadonlyValues(): ValueOf[] { + getAllReadonlyValues(values?: ValueOf[]): ValueOf[] { + if (values && values.length !== this.length) { + throw Error(`Expected ${this.length} values, got ${values.length}`); + } const length = this.length; const chunksNode = this.type.tree_getChunksNode(this.node); const nodes = getNodesAtDepth(chunksNode, this.type.chunkDepth, 0, length); - const values = new Array>(length); + values = values ?? new Array>(length); for (let i = 0; i < length; i++) { values[i] = this.type.elementType.tree_toValue(nodes[i]); } diff --git a/packages/ssz/src/viewDU/arrayBasic.ts b/packages/ssz/src/viewDU/arrayBasic.ts index 1a06d84a..0464b26c 100644 --- a/packages/ssz/src/viewDU/arrayBasic.ts +++ b/packages/ssz/src/viewDU/arrayBasic.ts @@ -110,7 +110,10 @@ export class ArrayBasicTreeViewDU> extend /** * Get all values of this array as Basic element type values, from index zero to `this.length - 1` */ - getAll(): ValueOf[] { + getAll(values?: ValueOf[]): ValueOf[] { + if (values && values.length !== this._length) { + throw Error(`Expected ${this._length} values, got ${values.length}`); + } if (!this.nodesPopulated) { const nodesPrev = this.nodes; const chunksNode = this.type.tree_getChunksNode(this.node); @@ -125,7 +128,7 @@ export class ArrayBasicTreeViewDU> extend this.nodesPopulated = true; } - const values = new Array>(this._length); + values = values ?? new Array>(this._length); const itemsPerChunk = this.type.itemsPerChunk; // Prevent many access in for loop below const lenFullNodes = Math.floor(this._length / itemsPerChunk); const remainder = this._length % itemsPerChunk; diff --git a/packages/ssz/src/viewDU/arrayComposite.ts b/packages/ssz/src/viewDU/arrayComposite.ts index 44c50375..bced3024 100644 --- a/packages/ssz/src/viewDU/arrayComposite.ts +++ b/packages/ssz/src/viewDU/arrayComposite.ts @@ -147,29 +147,55 @@ export class ArrayCompositeTreeViewDU< /** * WARNING: Returns all commited changes, if there are any pending changes commit them beforehand */ - getAllReadonly(): CompositeViewDU[] { + getAllReadonly(views?: CompositeViewDU[]): CompositeViewDU[] { + if (views && views.length !== this._length) { + throw Error(`Expected ${this._length} views, got ${views.length}`); + } this.populateAllNodes(); - const views = new Array>(this._length); + views = views ?? new Array>(this._length); for (let i = 0; i < this._length; i++) { views[i] = this.type.elementType.getViewDU(this.nodes[i], this.caches[i]); } return views; } + /** + * Apply `fn` to each ViewDU in the array + */ + forEach(fn: (viewDU: CompositeViewDU, index: number) => void): void { + this.populateAllNodes(); + for (let i = 0; i < this._length; i++) { + fn(this.type.elementType.getViewDU(this.nodes[i], this.caches[i]), i); + } + } + /** * WARNING: Returns all commited changes, if there are any pending changes commit them beforehand */ - getAllReadonlyValues(): ValueOf[] { + getAllReadonlyValues(values?: ValueOf[]): ValueOf[] { + if (values && values.length !== this._length) { + throw Error(`Expected ${this._length} values, got ${values.length}`); + } this.populateAllNodes(); - const values = new Array>(this._length); + values = values ?? new Array>(this._length); for (let i = 0; i < this._length; i++) { values[i] = this.type.elementType.tree_toValue(this.nodes[i]); } return values; } + /** + * Apply `fn` to each value in the array + */ + forEachValue(fn: (value: ValueOf, index: number) => void): void { + this.populateAllNodes(); + for (let i = 0; i < this._length; i++) { + fn(this.type.elementType.tree_toValue(this.nodes[i]), i); + } + } + /** * When we need to compute HashComputations (hcByLevel != null): * - if old _rootNode is hashed, then only need to put pending changes to hcByLevel @@ -193,9 +219,12 @@ export class ArrayCompositeTreeViewDU< for (const [index, view] of this.viewsChanged) { const node = this.type.elementType.commitViewDU(view, offsetView, byLevelView); - // Set new node in nodes array to ensure data represented in the tree and fast nodes access is equal - this.nodes[index] = node; - nodesChanged.push({index, node}); + // there's a chance the view is not changed, no need to rebind nodes in that case + if (this.nodes[index] !== node) { + // Set new node in nodes array to ensure data represented in the tree and fast nodes access is equal + this.nodes[index] = node; + nodesChanged.push({index, node}); + } // Cache the view's caches to preserve it's data after 'this.viewsChanged.clear()' const cache = this.type.elementType.cacheOfViewDU(view); diff --git a/packages/ssz/src/viewDU/container.ts b/packages/ssz/src/viewDU/container.ts index 993ff602..68b0a00b 100644 --- a/packages/ssz/src/viewDU/container.ts +++ b/packages/ssz/src/viewDU/container.ts @@ -101,9 +101,12 @@ export class BasicContainerTreeViewDU { + constructor(limit: number) { + super(new ValidatorNodeStructType(), limit); + } + + getViewDU(node: Node, cache?: unknown): ListCompositeTreeViewDU { + return new ListValidatorTreeViewDU(this, node, cache as any); + } +} diff --git a/packages/ssz/test/lodestarTypes/phase0/sszTypes.ts b/packages/ssz/test/lodestarTypes/phase0/sszTypes.ts index d3e6b4a3..f328aea9 100644 --- a/packages/ssz/test/lodestarTypes/phase0/sszTypes.ts +++ b/packages/ssz/test/lodestarTypes/phase0/sszTypes.ts @@ -2,7 +2,6 @@ import { BitListType, BitVectorType, ContainerType, - ContainerNodeStructType, ListBasicType, ListCompositeType, VectorBasicType, @@ -18,6 +17,10 @@ import { ATTESTATION_SUBNET_COUNT, } from "../params"; import * as primitiveSsz from "../primitive/sszTypes"; +import {ListValidatorType} from "./listValidator"; +import {ValidatorNodeStruct} from "./validator"; + +export {ValidatorNodeStruct}; const { EPOCHS_PER_ETH1_VOTING_PERIOD, @@ -261,12 +264,12 @@ export const ValidatorContainer = new ContainerType( {typeName: "Validator", jsonCase: "eth2"} ); -export const ValidatorNodeStruct = new ContainerNodeStructType(ValidatorContainer.fields, ValidatorContainer.opts); // The main Validator type is the 'ContainerNodeStructType' version export const Validator = ValidatorNodeStruct; // Export as stand-alone for direct tree optimizations -export const Validators = new ListCompositeType(ValidatorNodeStruct, VALIDATOR_REGISTRY_LIMIT); +// export const Validators = new ListCompositeType(ValidatorNodeStruct, VALIDATOR_REGISTRY_LIMIT); +export const Validators = new ListValidatorType(VALIDATOR_REGISTRY_LIMIT); export const Balances = new ListUintNum64Type(VALIDATOR_REGISTRY_LIMIT); export const RandaoMixes = new VectorCompositeType(Bytes32, EPOCHS_PER_HISTORICAL_VECTOR); export const Slashings = new VectorBasicType(Gwei, EPOCHS_PER_SLASHINGS_VECTOR); diff --git a/packages/ssz/test/lodestarTypes/phase0/validator.ts b/packages/ssz/test/lodestarTypes/phase0/validator.ts new file mode 100644 index 00000000..4cd82a83 --- /dev/null +++ b/packages/ssz/test/lodestarTypes/phase0/validator.ts @@ -0,0 +1,130 @@ +import {ByteViews} from "../../../src/type/abstract"; +import {ContainerNodeStructType} from "../../../src/type/containerNodeStruct"; +import {ValueOfFields} from "../../../src/view/container"; +import * as primitiveSsz from "../primitive/sszTypes"; + +const {Boolean, Bytes32, UintNum64, BLSPubkey, EpochInf} = primitiveSsz; + +// this is to work with uint32, see https://github.com/ChainSafe/ssz/blob/ssz-v0.15.1/packages/ssz/src/type/uint.ts +const NUMBER_2_POW_32 = 2 ** 32; + +/* + * Below constants are respective to their ssz type in `ValidatorType`. + */ +const UINT32_SIZE = 4; +const PUBKEY_SIZE = 48; +const WITHDRAWAL_CREDENTIALS_SIZE = 32; +const SLASHED_SIZE = 1; +const CHUNK_SIZE = 32; + +export const ValidatorType = { + pubkey: BLSPubkey, + withdrawalCredentials: Bytes32, + effectiveBalance: UintNum64, + slashed: Boolean, + activationEligibilityEpoch: EpochInf, + activationEpoch: EpochInf, + exitEpoch: EpochInf, + withdrawableEpoch: EpochInf, +}; + +/** + * Improve serialization performance for state.validators.serialize(); + */ +export class ValidatorNodeStructType extends ContainerNodeStructType { + constructor() { + super(ValidatorType, {typeName: "Validator", jsonCase: "eth2"}); + } + + value_serializeToBytes( + {uint8Array: output, dataView}: ByteViews, + offset: number, + validator: ValueOfFields + ): number { + output.set(validator.pubkey, offset); + offset += PUBKEY_SIZE; + output.set(validator.withdrawalCredentials, offset); + offset += WITHDRAWAL_CREDENTIALS_SIZE; + const {effectiveBalance, activationEligibilityEpoch, activationEpoch, exitEpoch, withdrawableEpoch} = validator; + // effectiveBalance is UintNum64 + dataView.setUint32(offset, effectiveBalance & 0xffffffff, true); + offset += UINT32_SIZE; + dataView.setUint32(offset, (effectiveBalance / NUMBER_2_POW_32) & 0xffffffff, true); + offset += UINT32_SIZE; + output[offset] = validator.slashed ? 1 : 0; + offset += SLASHED_SIZE; + offset = writeEpochInf(dataView, offset, activationEligibilityEpoch); + offset = writeEpochInf(dataView, offset, activationEpoch); + offset = writeEpochInf(dataView, offset, exitEpoch); + offset = writeEpochInf(dataView, offset, withdrawableEpoch); + + return offset; + } +} + +export const ValidatorNodeStruct = new ValidatorNodeStructType(); + +/** + * Write to level3 and level4 bytes to compute merkle root. Note that this is to compute + * merkle root and it's different from serialization (which is more compressed). + * pub0 + pub1 are at level4, they will be hashed to 1st chunked of level 3 + * then use 8 chunks of level 3 to compute the root hash. + * reserved withdr eff sla actElig act exit with + * level 3 |----------|----------|----------|----------|----------|----------|----------|----------| + * + * pub0 pub1 + * level4 |----------|----------| + * + */ +export function validatorToChunkBytes( + level3: ByteViews, + level4: Uint8Array, + value: ValueOfFields +): void { + const { + pubkey, + withdrawalCredentials, + effectiveBalance, + slashed, + activationEligibilityEpoch, + activationEpoch, + exitEpoch, + withdrawableEpoch, + } = value; + const {uint8Array: outputLevel3, dataView} = level3; + + // pubkey = 48 bytes which is 2 * CHUNK_SIZE + level4.set(pubkey, 0); + let offset = CHUNK_SIZE; + outputLevel3.set(withdrawalCredentials, offset); + offset += CHUNK_SIZE; + // effectiveBalance is UintNum64 + dataView.setUint32(offset, effectiveBalance & 0xffffffff, true); + dataView.setUint32(offset + 4, (effectiveBalance / NUMBER_2_POW_32) & 0xffffffff, true); + + offset += CHUNK_SIZE; + dataView.setUint32(offset, slashed ? 1 : 0, true); + offset += CHUNK_SIZE; + writeEpochInf(dataView, offset, activationEligibilityEpoch); + offset += CHUNK_SIZE; + writeEpochInf(dataView, offset, activationEpoch); + offset += CHUNK_SIZE; + writeEpochInf(dataView, offset, exitEpoch); + offset += CHUNK_SIZE; + writeEpochInf(dataView, offset, withdrawableEpoch); +} + +function writeEpochInf(dataView: DataView, offset: number, value: number): number { + if (value === Infinity) { + dataView.setUint32(offset, 0xffffffff, true); + offset += UINT32_SIZE; + dataView.setUint32(offset, 0xffffffff, true); + offset += UINT32_SIZE; + } else { + dataView.setUint32(offset, value & 0xffffffff, true); + offset += UINT32_SIZE; + dataView.setUint32(offset, (value / NUMBER_2_POW_32) & 0xffffffff, true); + offset += UINT32_SIZE; + } + return offset; +} diff --git a/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts b/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts new file mode 100644 index 00000000..c9e0cb73 --- /dev/null +++ b/packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts @@ -0,0 +1,179 @@ +import {byteArrayIntoHashObject} from "@chainsafe/as-sha256"; +import {HashComputationLevel, Node, digestNLevel, setNodesAtDepth} from "@chainsafe/persistent-merkle-tree"; +import {ListCompositeType} from "../../../../src/type/listComposite"; +import {ArrayCompositeTreeViewDUCache} from "../../../../src/viewDU/arrayComposite"; +import {ListCompositeTreeViewDU} from "../../../../src/viewDU/listComposite"; +import {ValidatorNodeStructType, ValidatorType, validatorToChunkBytes} from "../validator"; +import {ByteViews} from "../../../../src"; +import {ContainerNodeStructTreeViewDU} from "../../../../src/viewDU/containerNodeStruct"; +import {ValidatorIndex} from "../../primitive/types"; + +/** + * hashtree has a MAX_SIZE of 1024 bytes = 32 chunks + * Given a level3 of validators have 8 chunks, we can hash 4 validators at a time + */ +const PARALLEL_FACTOR = 4; +/** + * Allocate memory once for batch hash validators. + */ +// each level 3 of validator has 8 chunks, each chunk has 32 bytes +const batchLevel3Bytes = new Uint8Array(PARALLEL_FACTOR * 8 * 32); +const level3ByteViewsArr: ByteViews[] = []; +for (let i = 0; i < PARALLEL_FACTOR; i++) { + const uint8Array = batchLevel3Bytes.subarray(i * 8 * 32, (i + 1) * 8 * 32); + const dataView = new DataView(uint8Array.buffer, uint8Array.byteOffset, uint8Array.byteLength); + level3ByteViewsArr.push({uint8Array, dataView}); +} +// each level 4 of validator has 2 chunks for pubkey, each chunk has 32 bytes +const batchLevel4Bytes = new Uint8Array(PARALLEL_FACTOR * 2 * 32); +const level4BytesArr: Uint8Array[] = []; +for (let i = 0; i < PARALLEL_FACTOR; i++) { + level4BytesArr.push(batchLevel4Bytes.subarray(i * 2 * 32, (i + 1) * 2 * 32)); +} +const pubkeyRoots: Uint8Array[] = []; +for (let i = 0; i < PARALLEL_FACTOR; i++) { + pubkeyRoots.push(batchLevel4Bytes.subarray(i * 32, (i + 1) * 32)); +} + +const validatorRoots: Uint8Array[] = []; +for (let i = 0; i < PARALLEL_FACTOR; i++) { + validatorRoots.push(batchLevel3Bytes.subarray(i * 32, (i + 1) * 32)); +} +const validatorRoot = new Uint8Array(32); + +/** + * Similar to ListCompositeTreeViewDU with some differences: + * - if called without params, it's from hashTreeRoot() api call, no need to compute root + * - otherwise it's from batchHashTreeRoot() call, compute validator roots in batch + */ +export class ListValidatorTreeViewDU extends ListCompositeTreeViewDU { + constructor( + readonly type: ListCompositeType, + protected _rootNode: Node, + cache?: ArrayCompositeTreeViewDUCache + ) { + super(type, _rootNode, cache); + } + + commit(hcOffset = 0, hcByLevel: HashComputationLevel[] | null = null): void { + if (hcByLevel === null) { + // this is not from batchHashTreeRoot() call, go with regular flow + return super.commit(); + } + + const isOldRootHashed = this._rootNode.h0 !== null; + if (this.viewsChanged.size === 0) { + if (!isOldRootHashed && hcByLevel !== null) { + // not possible to get HashComputations due to BranchNodeStruct + this._rootNode.root; + } + return; + } + + // TODO - batch: remove this type cast + const viewsChanged = this.viewsChanged as unknown as Map< + number, + ContainerNodeStructTreeViewDU + >; + + const indicesChanged: number[] = []; + for (const [index, viewChanged] of viewsChanged) { + // should not have any params here in order not to compute root + viewChanged.commit(); + // Set new node in nodes array to ensure data represented in the tree and fast nodes access is equal + this.nodes[index] = viewChanged.node; + // `validators.get(i)` was called but it may not modify any property, do not need to compute root + if (viewChanged.node.h0 === null) { + indicesChanged.push(index); + } + } + + // these validators don't have roots, we compute roots in batch + const sortedIndicesChanged = indicesChanged.sort((a, b) => a - b); + const nodesChanged: {index: ValidatorIndex; node: Node}[] = new Array<{index: ValidatorIndex; node: Node}>( + sortedIndicesChanged.length + ); + for (const [i, validatorIndex] of sortedIndicesChanged.entries()) { + nodesChanged[i] = {index: validatorIndex, node: this.nodes[validatorIndex]}; + } + doBatchHashTreeRootValidators(sortedIndicesChanged, viewsChanged); + + // do the remaining commit step the same to parent (ArrayCompositeTreeViewDU) + const indexes = nodesChanged.map((entry) => entry.index); + const nodes = nodesChanged.map((entry) => entry.node); + const chunksNode = this.type.tree_getChunksNode(this._rootNode); + const offsetThis = hcOffset + this.type.tree_chunksNodeOffset(); + const byLevelThis = hcByLevel != null && isOldRootHashed ? hcByLevel : null; + const newChunksNode = setNodesAtDepth(chunksNode, this.type.chunkDepth, indexes, nodes, offsetThis, byLevelThis); + + this._rootNode = this.type.tree_setChunksNode( + this._rootNode, + newChunksNode, + this.dirtyLength ? this._length : null, + hcOffset, + hcByLevel + ); + + if (!isOldRootHashed && hcByLevel !== null) { + // should never happen, handle just in case + // not possible to get HashComputations due to BranchNodeStruct + this._rootNode.root; + } + + this.viewsChanged.clear(); + this.dirtyLength = false; + } +} + +function doBatchHashTreeRootValidators( + indices: ValidatorIndex[], + validators: Map> +): void { + const endBatch = indices.length - (indices.length % PARALLEL_FACTOR); + + // commit every 16 validators in batch + for (let i = 0; i < endBatch; i++) { + if (i % PARALLEL_FACTOR === 0) { + batchLevel3Bytes.fill(0); + batchLevel4Bytes.fill(0); + } + const indexInBatch = i % PARALLEL_FACTOR; + const viewIndex = indices[i]; + const validator = validators.get(viewIndex); + if (validator) { + validatorToChunkBytes(level3ByteViewsArr[indexInBatch], level4BytesArr[indexInBatch], validator.value); + } + + if (indexInBatch === PARALLEL_FACTOR - 1) { + // hash level 4, this is populated to pubkeyRoots + digestNLevel(batchLevel4Bytes, 1); + for (let j = 0; j < PARALLEL_FACTOR; j++) { + level3ByteViewsArr[j].uint8Array.set(pubkeyRoots[j], 0); + } + // hash level 3, this is populated to validatorRoots + digestNLevel(batchLevel3Bytes, 3); + // commit all validators in this batch + for (let j = PARALLEL_FACTOR - 1; j >= 0; j--) { + const viewIndex = indices[i - j]; + const indexInBatch = (i - j) % PARALLEL_FACTOR; + const viewChanged = validators.get(viewIndex); + if (viewChanged) { + const branchNodeStruct = viewChanged.node; + byteArrayIntoHashObject(validatorRoots[indexInBatch], 0, branchNodeStruct); + } + } + } + } + + // commit the remaining validators, we can do in batch too but don't want to create new Uint8Array views + // it's not much different to commit one by one + for (let i = endBatch; i < indices.length; i++) { + const viewIndex = indices[i]; + const viewChanged = validators.get(viewIndex); + if (viewChanged) { + // compute root for each validator + viewChanged.type.hashTreeRootInto(viewChanged.value, validatorRoot, 0); + byteArrayIntoHashObject(validatorRoot, 0, viewChanged.node); + } + } +} diff --git a/packages/ssz/test/perf/byType/listUintNum64.test.ts b/packages/ssz/test/perf/byType/listUintNum64.test.ts new file mode 100644 index 00000000..19e8b7f7 --- /dev/null +++ b/packages/ssz/test/perf/byType/listUintNum64.test.ts @@ -0,0 +1,25 @@ +import {itBench} from "@dapplion/benchmark"; +import {ListUintNum64Type} from "../../../src/type/listUintNum64"; + +describe("ListUintNum64Type.toViewDU", () => { + const balancesType = new ListUintNum64Type(1099511627776); + const seedLength = 1_900_000; + const seedViewDU = balancesType.toViewDU(Array.from({length: seedLength}, () => 0)); + + const vc = 2_000_000; + const value = Array.from({length: vc}, (_, i) => 32 * 1e9 + i); + + itBench({ + id: `ListUintNum64Type.toViewDU ${seedLength} -> ${vc}`, + fn: () => { + balancesType.toViewDU(value, seedViewDU); + }, + }); + + itBench({ + id: "ListUintNum64Type.toViewDU()", + fn: () => { + balancesType.toViewDU(value); + }, + }); +}); diff --git a/packages/ssz/test/perf/eth2/beaconBlock.test.ts b/packages/ssz/test/perf/eth2/beaconBlock.test.ts new file mode 100644 index 00000000..523e94ea --- /dev/null +++ b/packages/ssz/test/perf/eth2/beaconBlock.test.ts @@ -0,0 +1,94 @@ +import {itBench, setBenchOpts} from "@dapplion/benchmark"; +import {ValueWithCachedPermanentRoot, symbolCachedPermanentRoot} from "../../../src/util/merkleize"; +import {deneb, ssz} from "../../lodestarTypes"; +import {preset} from "../../lodestarTypes/params"; +import {BitArray, toHexString} from "../../../src"; +const {MAX_ATTESTATIONS, MAX_DEPOSITS, MAX_VOLUNTARY_EXITS, MAX_BLS_TO_EXECUTION_CHANGES} = preset; + +describe("Benchmark BeaconBlock.hashTreeRoot()", function () { + setBenchOpts({ + minMs: 10_000, + }); + + const block = ssz.deneb.BeaconBlock.defaultValue(); + for (let i = 0; i < MAX_ATTESTATIONS; i++) { + block.body.attestations.push({ + aggregationBits: BitArray.fromBoolArray(Array.from({length: 64}, () => true)), + data: { + slot: 1, + index: 1, + beaconBlockRoot: Buffer.alloc(32, 1), + source: { + epoch: 1, + root: Buffer.alloc(32, 1), + }, + target: { + epoch: 1, + root: Buffer.alloc(32, 1), + }, + }, + signature: Buffer.alloc(96, 1), + }); + } + for (let i = 0; i < MAX_DEPOSITS; i++) { + block.body.deposits.push({ + proof: ssz.phase0.Deposit.fields.proof.defaultValue(), + data: { + pubkey: Buffer.alloc(48, 1), + withdrawalCredentials: Buffer.alloc(32, 1), + amount: 32 * 1e9, + signature: Buffer.alloc(96, 1), + }, + }); + } + for (let i = 0; i < MAX_VOLUNTARY_EXITS; i++) { + block.body.voluntaryExits.push({ + signature: Buffer.alloc(96, 1), + message: { + epoch: 1, + validatorIndex: 1, + }, + }); + } + // common data on mainnet as of Jun 2024 + const numTransaction = 200; + const transactionLen = 500; + for (let i = 0; i < numTransaction; i++) { + block.body.executionPayload.transactions.push(Buffer.alloc(transactionLen, 1)); + } + for (let i = 0; i < MAX_BLS_TO_EXECUTION_CHANGES; i++) { + block.body.blsToExecutionChanges.push({ + signature: Buffer.alloc(96, 1), + message: { + validatorIndex: 1, + fromBlsPubkey: Buffer.alloc(48, 1), + toExecutionAddress: Buffer.alloc(20, 1), + }, + }); + } + + const root = ssz.deneb.BeaconBlock.hashTreeRoot(block); + console.log("BeaconBlock.hashTreeRoot() root", toHexString(root)); + itBench({ + id: `Deneb BeaconBlock.hashTreeRoot(), numTransaction=${numTransaction}`, + beforeEach: () => { + clearCachedRoots(block); + return block; + }, + fn: (block: deneb.BeaconBlock) => { + ssz.deneb.BeaconBlock.hashTreeRoot(block); + }, + }); +}); + +function clearCachedRoots(block: deneb.BeaconBlock): void { + (block as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = undefined; + (block.body as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = undefined; + const attestations = block.body.attestations; + for (const attestation of attestations) { + (attestation.data as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = undefined; + } + for (const exit of block.body.voluntaryExits) { + (exit as ValueWithCachedPermanentRoot)[symbolCachedPermanentRoot] = undefined; + } +} diff --git a/packages/ssz/test/perf/eth2/beaconState.test.ts b/packages/ssz/test/perf/eth2/beaconState.test.ts index 32d8b031..f8e9951c 100644 --- a/packages/ssz/test/perf/eth2/beaconState.test.ts +++ b/packages/ssz/test/perf/eth2/beaconState.test.ts @@ -6,9 +6,9 @@ import {preset} from "../../lodestarTypes/params"; const {SLOTS_PER_HISTORICAL_ROOT, EPOCHS_PER_ETH1_VOTING_PERIOD, SLOTS_PER_EPOCH} = preset; const vc = 200_000; -const numModified = vc / 20; +const numModified = vc / 2; // every we increase vc, need to change this value from "recursive hash" test -const expectedRoot = "0x759d635af161ac1e4f4af11aa7721fd4996253af50f8a81e5003bbb4cbcaae42"; +const expectedRoot = "0xb0780ec0d44bff1ae8a351e98e37a9d8c3e28edb38c9d5a6312656e0cba915d9"; /** * This simulates a BeaconState being modified after an epoch transition in lodestar @@ -22,50 +22,50 @@ describe(`BeaconState ViewDU partially modified tree vc=${vc} numModified=${numM minMs: 20_000, }); + const hc = new HashComputationGroup(); itBench({ - id: `BeaconState ViewDU hashTreeRoot() vc=${vc}`, + id: `BeaconState ViewDU batchHashTreeRoot vc=${vc}`, beforeEach: () => createPartiallyModifiedDenebState(), fn: (state: CompositeViewDU) => { - state.hashTreeRoot(); - if (toHexString(state.node.root) !== expectedRoot) { - throw new Error("hashTreeRoot does not match expectedRoot"); + // commit() step is inside hashTreeRoot(), reuse HashComputationGroup + if (toHexString(state.batchHashTreeRoot(hc)) !== expectedRoot) { + throw new Error( + `batchHashTreeRoot ${toHexString(state.batchHashTreeRoot(hc))} does not match expectedRoot ${expectedRoot}` + ); } + state.batchHashTreeRoot(hc); }, }); itBench({ - id: `BeaconState ViewDU recursive hash - commit step vc=${vc}`, + id: `BeaconState ViewDU batchHashTreeRoot - commit step vc=${vc}`, beforeEach: () => createPartiallyModifiedDenebState(), fn: (state: CompositeViewDU) => { - state.commit(); + state.commit(0, []); }, }); itBench({ - id: `BeaconState ViewDU validator tree creation vc=${numModified}`, + id: `BeaconState ViewDU batchHashTreeRoot - hash step vc=${vc}`, beforeEach: () => { const state = createPartiallyModifiedDenebState(); - state.commit(); - return state; + const hcByLevel: HashComputationLevel[] = []; + state.commit(0, hcByLevel); + return hcByLevel; }, - fn: (state: CompositeViewDU) => { - const validators = state.validators; - for (let i = 0; i < numModified; i++) { - validators.getReadonly(i).node.left; - } + fn: (hcByLevel) => { + executeHashComputations(hcByLevel); }, }); - const hc = new HashComputationGroup(); itBench({ - id: `BeaconState ViewDU batchHashTreeRoot vc=${vc}`, + id: `BeaconState ViewDU hashTreeRoot() vc=${vc}`, beforeEach: () => createPartiallyModifiedDenebState(), fn: (state: CompositeViewDU) => { - // commit() step is inside hashTreeRoot(), reuse HashComputationGroup - if (toHexString(state.batchHashTreeRoot(hc)) !== expectedRoot) { - throw new Error("batchHashTreeRoot does not match expectedRoot"); + state.hashTreeRoot(); + if (toHexString(state.node.root) !== expectedRoot) { + throw new Error(`hashTreeRoot ${toHexString(state.node.root)} does not match expectedRoot ${expectedRoot}`); } - state.batchHashTreeRoot(hc); }, }); @@ -73,20 +73,22 @@ describe(`BeaconState ViewDU partially modified tree vc=${vc} numModified=${numM id: `BeaconState ViewDU hashTreeRoot - commit step vc=${vc}`, beforeEach: () => createPartiallyModifiedDenebState(), fn: (state: CompositeViewDU) => { - state.commit(0, []); + state.commit(); }, }); itBench({ - id: `BeaconState ViewDU hashTreeRoot - hash step vc=${vc}`, + id: `BeaconState ViewDU hashTreeRoot - validator tree creation vc=${numModified}`, beforeEach: () => { const state = createPartiallyModifiedDenebState(); - const hcByLevel: HashComputationLevel[] = []; - state.commit(0, hcByLevel); - return hcByLevel; + state.commit(); + return state; }, - fn: (hcByLevel) => { - executeHashComputations(hcByLevel); + fn: (state: CompositeViewDU) => { + const validators = state.validators; + for (let i = 0; i < numModified; i++) { + validators.getReadonly(i).node.left; + } }, }); }); @@ -116,7 +118,12 @@ function createPartiallyModifiedDenebState(): CompositeViewDU 32e9)); + // remaining validators are accessed with no modification + for (let i = numModified; i < vc; i++) { + state.validators.get(i); + } state.eth1Data = BeaconState.fields.eth1Data.toViewDU({ depositRoot: Buffer.alloc(32, 0x02), diff --git a/packages/ssz/test/perf/eth2/validators.test.ts b/packages/ssz/test/perf/eth2/validators.test.ts index 2cd92faf..3e15ce24 100644 --- a/packages/ssz/test/perf/eth2/validators.test.ts +++ b/packages/ssz/test/perf/eth2/validators.test.ts @@ -1,7 +1,7 @@ import {itBench} from "@dapplion/benchmark"; import {Validator} from "../../lodestarTypes/phase0/types"; -import {ValidatorContainer, ValidatorNodeStruct} from "../../lodestarTypes/phase0/sszTypes"; -import {CompositeViewDU} from "../../../src"; +import {ValidatorContainer, ValidatorNodeStruct, Validators} from "../../lodestarTypes/phase0/sszTypes"; +import {BranchNodeStruct, CompositeViewDU} from "../../../src"; const validatorStruct: Validator = { pubkey: Buffer.alloc(48, 0xdd), @@ -49,3 +49,41 @@ describe("Validator vs ValidatorLeafNodeStruct", () => { } } }); + +describe("ContainerNodeStructViewDU vs ValidatorViewDU hashtreeroot", () => { + // ListValidatorTreeViewDU commits every 4 validators in batch + const listValidator = Validators.toViewDU(Array.from({length: 4}, () => validatorStruct)); + const nodes: BranchNodeStruct[] = []; + for (let i = 0; i < listValidator.length; i++) { + nodes.push(listValidator.get(i).node as BranchNodeStruct); + } + + // this does not create validator tree every time, and it compute roots in batch + itBench({ + id: "ValidatorViewDU hashTreeRoot", + beforeEach: () => { + for (let i = 0; i < listValidator.length; i++) { + listValidator.get(i).exitEpoch = 20242024; + } + }, + fn: () => { + listValidator.commit(); + }, + }); + + // this needs to create validator tree every time + itBench({ + id: "ContainerNodeStructViewDU hashTreeRoot", + beforeEach: () => { + for (const node of nodes) { + node.value.exitEpoch = 20242024; + node.h0 = null as unknown as number; + } + }, + fn: () => { + for (const node of nodes) { + node.root; + } + }, + }); +}); diff --git a/packages/ssz/test/perf/merkleize.test.ts b/packages/ssz/test/perf/merkleize.test.ts index b83a1f5d..af70868e 100644 --- a/packages/ssz/test/perf/merkleize.test.ts +++ b/packages/ssz/test/perf/merkleize.test.ts @@ -1,5 +1,6 @@ import {itBench} from "@dapplion/benchmark"; -import {bitLength} from "../../src/util/merkleize"; +import {bitLength, merkleize} from "../../src/util/merkleize"; +import {merkleizeBlockArray, merkleizeBlocksBytes} from "@chainsafe/persistent-merkle-tree"; describe("merkleize / bitLength", () => { for (const n of [50, 8000, 250000]) { @@ -13,6 +14,36 @@ describe("merkleize / bitLength", () => { } }); +describe("merkleize vs persistent-merkle-tree merkleizeBlocksBytes", () => { + const chunkCounts = [32, 128, 512, 1024]; + + for (const chunkCount of chunkCounts) { + const rootArr = Array.from({length: chunkCount}, (_, i) => Buffer.alloc(32, i)); + const blocksBytes = Buffer.concat(rootArr); + if (blocksBytes.length % 64 !== 0) { + throw new Error("blockBytes length must be a multiple of 64"); + } + const blockArray: Uint8Array[] = []; + for (let i = 0; i < blocksBytes.length; i += 64) { + blockArray.push(blocksBytes.slice(i, i + 64)); + } + + const result = Buffer.alloc(32); + + itBench(`merkleize ${chunkCount} chunks`, () => { + merkleize(rootArr, chunkCount); + }); + + itBench(`merkleizeBlocksBytes ${chunkCount} chunks`, () => { + merkleizeBlocksBytes(blocksBytes, chunkCount, result, 0); + }); + + itBench(`merkleizeBlockArray ${chunkCount} chunks`, () => { + merkleizeBlockArray(blockArray, blockArray.length, chunkCount, result, 0); + }); + } +}); + // Previous implementation, replaced by bitLength function bitLengthStr(n: number): number { const bitstring = n.toString(2); diff --git a/packages/ssz/test/spec/runValidTest.ts b/packages/ssz/test/spec/runValidTest.ts index 1bac7760..eab25843 100644 --- a/packages/ssz/test/spec/runValidTest.ts +++ b/packages/ssz/test/spec/runValidTest.ts @@ -101,13 +101,10 @@ export function runValidSszTest(type: Type, testData: ValidTestCaseData // 0x0000000000000000000000000000000000000000000000000000000000000000 if (process.env.RENDER_ROOTS) { if (type.isBasic) { - console.log("ROOTS Basic", toHexString(type.serialize(testDataValue))); + console.log("Chunk Bytes Basic", toHexString(type.serialize(testDataValue))); } else { - const roots = (type as CompositeType)["getRoots"](testDataValue); - console.log( - "ROOTS Composite", - roots.map((root) => toHexString(root)) - ); + const blocksBytes = (type as CompositeType)["getBlocksBytes"](testDataValue); + console.log("Chunk Bytes Composite", toHexString(blocksBytes)); } } diff --git a/packages/ssz/test/unit/byType/byteList/value.test.ts b/packages/ssz/test/unit/byType/byteList/value.test.ts new file mode 100644 index 00000000..0033443f --- /dev/null +++ b/packages/ssz/test/unit/byType/byteList/value.test.ts @@ -0,0 +1,28 @@ +import {expect} from "chai"; +import {ByteListType} from "../../../../src"; + +describe("ByteListValue", () => { + const type = new ByteListType(1024); + + it("should zero out the last sha256 block if it's over value.length", () => { + const value = Buffer.alloc(65, 1); + const expectedRoot = type.hashTreeRoot(value); + // now hash another value which make the cached blocks non zero + type.hashTreeRoot(Buffer.alloc(1024, 2)); + const actualRoot = type.hashTreeRoot(value); + expect(actualRoot).to.deep.equal(expectedRoot); + }); + + it("should increase blockArray size if needed", () => { + const value0 = Buffer.alloc(65, 1); + const expectedRoot0 = type.hashTreeRoot(value0); + const value1 = Buffer.alloc(1024, 3); + const expectedRoot1 = type.hashTreeRoot(value1); + // now increase block array size + type.hashTreeRoot(Buffer.alloc(1024, 2)); + + // hash again + expect(type.hashTreeRoot(value0)).to.deep.equal(expectedRoot0); + expect(type.hashTreeRoot(value1)).to.deep.equal(expectedRoot1); + }); +}); diff --git a/packages/ssz/test/unit/byType/container/tree.test.ts b/packages/ssz/test/unit/byType/container/tree.test.ts index 6b545792..5c3bbd54 100644 --- a/packages/ssz/test/unit/byType/container/tree.test.ts +++ b/packages/ssz/test/unit/byType/container/tree.test.ts @@ -241,7 +241,7 @@ describe("ContainerViewDU batchHashTreeRoot", function () { a: uint64NumType, b: new BooleanType(), c: unionType, - d: new ByteListType(64), + d: new ByteListType(1024), e: new ByteVectorType(64), // a child container type f: childContainerType, @@ -259,7 +259,8 @@ describe("ContainerViewDU batchHashTreeRoot", function () { a: 10, b: true, c: {selector: 1, value: 100}, - d: Buffer.alloc(64, 2), + // make this not divisible by 64 to test edge case + d: Buffer.alloc(65, 2), e: Buffer.alloc(64, 1), f: {f0: 100, f1: 101}, g: {g0: 100, g1: 101}, @@ -271,6 +272,7 @@ describe("ContainerViewDU batchHashTreeRoot", function () { m: BitArray.fromSingleBit(4, 1), }; const expectedRoot = parentContainerType.toView(value).hashTreeRoot(); + expect(parentContainerType.hashTreeRoot(value)).to.be.deep.equal(expectedRoot); it("fresh ViewDU", () => { expect(parentContainerType.toViewDU(value).batchHashTreeRoot()).to.be.deep.equal(expectedRoot); @@ -327,9 +329,10 @@ describe("ContainerViewDU batchHashTreeRoot", function () { it("full hash then modify ByteListType", () => { const viewDU = parentContainerType.toViewDU(value); + viewDU.d = Buffer.alloc(1024, 3); viewDU.batchHashTreeRoot(); - // this takes more than 1 chunk so the resulting node is a branch node - viewDU.d = viewDU.d.slice(); + // set back to the original value, this takes more than 1 chunk so the resulting node is a branch node + viewDU.d = Buffer.alloc(65, 2); expect(viewDU.batchHashTreeRoot()).to.be.deep.equal(expectedRoot); // assign again but commit before batchHashTreeRoot() diff --git a/packages/ssz/test/unit/byType/listBasic/listUintNum64.test.ts b/packages/ssz/test/unit/byType/listBasic/listUintNum64.test.ts new file mode 100644 index 00000000..ae867a5e --- /dev/null +++ b/packages/ssz/test/unit/byType/listBasic/listUintNum64.test.ts @@ -0,0 +1,21 @@ +import {expect} from "chai"; +import {ListUintNum64Type} from "../../../../src/type/listUintNum64"; + +describe("ListUintNum64Type.toViewDU", () => { + const type = new ListUintNum64Type(1024); + // seed ViewDU contains 16 leaf nodes = 64 uint64 + // but we test all cases + for (const seedLength of [61, 62, 63, 64]) { + const value = Array.from({length: seedLength}, (_, i) => i); + const unusedViewDU = type.toViewDU(value); + + it(`should create ViewDU from a seedViewDU with ${seedLength} uint64`, () => { + for (let i = seedLength; i < 1024; i++) { + const newValue = Array.from({length: i + 1}, (_, j) => j); + const expectedRoot = type.toViewDU(newValue).hashTreeRoot(); + const viewDUFromExistingTree = type.toViewDU(newValue, unusedViewDU); + expect(viewDUFromExistingTree.hashTreeRoot()).to.deep.equal(expectedRoot); + } + }); + } +}); diff --git a/packages/ssz/test/unit/byType/listComposite/tree.test.ts b/packages/ssz/test/unit/byType/listComposite/tree.test.ts index 95b39746..f428365b 100644 --- a/packages/ssz/test/unit/byType/listComposite/tree.test.ts +++ b/packages/ssz/test/unit/byType/listComposite/tree.test.ts @@ -226,7 +226,21 @@ describe("ListCompositeType.sliceFrom", () => { } }); -describe("ListCompositeType batchHashTreeRoot", () => { +describe("ListCompositeType hashTreeRoot", () => { + it("shouldzero out the last sha256 block", () => { + const listType = new ListCompositeType(ssz.Root, 1024); + const value0 = Array.from({length: 65}, (_, i) => Buffer.alloc(32, i)); + const value1 = Array.from({length: 120}, (_, i) => Buffer.alloc(32, i)); + const expectedRoot0 = listType.hashTreeRoot(value0); + const expectedRoot1 = listType.hashTreeRoot(value1); + // now increase block array size + listType.hashTreeRoot(Array.from({length: 1024}, () => Buffer.alloc(32, 3))); + expect(listType.hashTreeRoot(value0)).to.deep.equal(expectedRoot0); + expect(listType.hashTreeRoot(value1)).to.deep.equal(expectedRoot1); + }); +}); + +describe("ListCompositeType ViewDU batchHashTreeRoot", () => { const value = [ {a: 1, b: 2}, {a: 3, b: 4}, @@ -242,6 +256,7 @@ describe("ListCompositeType batchHashTreeRoot", () => { for (const list of [listOfContainersType, listOfContainersType2]) { const typeName = list.typeName; const expectedRoot = list.toView(value).hashTreeRoot(); + expect(listOfContainersType2.hashTreeRoot(value)).to.be.deep.equal(expectedRoot); it(`${typeName} - fresh ViewDU`, () => { expect(listOfContainersType.toViewDU(value).batchHashTreeRoot()).to.be.deep.equal(expectedRoot); diff --git a/packages/ssz/test/unit/lodestarTypes/phase0/listValidator.test.ts b/packages/ssz/test/unit/lodestarTypes/phase0/listValidator.test.ts new file mode 100644 index 00000000..ff3ae2a2 --- /dev/null +++ b/packages/ssz/test/unit/lodestarTypes/phase0/listValidator.test.ts @@ -0,0 +1,89 @@ +import {ListCompositeType} from "../../../../src/type/listComposite"; +import {ValidatorType} from "../../../lodestarTypes/phase0/validator"; +import {preset} from "../../../lodestarTypes/params"; +import {ssz} from "../../../lodestarTypes"; +import {expect} from "chai"; +import {ContainerType} from "../../../../src/type/container"; +import {Validator} from "../../../lodestarTypes/phase0"; +const {VALIDATOR_REGISTRY_LIMIT} = preset; + +describe("ListValidator ssz type", function () { + const seedValidator = { + activationEligibilityEpoch: 10, + activationEpoch: 11, + exitEpoch: Infinity, + slashed: false, + withdrawableEpoch: 13, + pubkey: Buffer.alloc(48, 100), + withdrawalCredentials: Buffer.alloc(32, 100), + effectiveBalance: 32000000000, + }; + + const testCases = [32, 33, 34, 35]; + const ValidatorContainer = new ContainerType(ValidatorType, {typeName: "Validator", jsonCase: "eth2"}); + const oldValidatorsType = new ListCompositeType(ValidatorContainer, VALIDATOR_REGISTRY_LIMIT); + for (const numValidators of testCases) { + it(`should commit ${numValidators} validators`, () => { + const validators = Array.from({length: numValidators}, (_, i) => ({ + ...seedValidator, + withdrawableEpoch: seedValidator.withdrawableEpoch + i, + })); + const oldViewDU = oldValidatorsType.toViewDU(validators); + const newViewDU = ssz.phase0.Validators.toViewDU(validators); + // modify all validators + for (let i = 0; i < numValidators; i++) { + oldViewDU.get(i).activationEpoch = 2024; + newViewDU.get(i).activationEpoch = 2024; + } + expect(newViewDU.batchHashTreeRoot()).to.be.deep.equal(oldViewDU.batchHashTreeRoot()); + expect(newViewDU.serialize()).to.be.deep.equal(oldViewDU.serialize()); + }); + } + + const testCases2 = [[1], [3, 5], [1, 9, 7]]; + const numValidator = 33; + for (const modifiedIndices of testCases2) { + it(`should modify ${modifiedIndices.length} validators`, () => { + const validators = Array.from({length: numValidator}, (_, i) => ({ + ...seedValidator, + withdrawableEpoch: seedValidator.withdrawableEpoch + i, + })); + const oldViewDU = oldValidatorsType.toViewDU(validators); + const newViewDU = ssz.phase0.Validators.toViewDU(validators); + for (const index of modifiedIndices) { + oldViewDU.get(index).activationEpoch = 2024; + newViewDU.get(index).activationEpoch = 2024; + } + expect(newViewDU.batchHashTreeRoot()).to.be.deep.equal(oldViewDU.batchHashTreeRoot()); + expect(newViewDU.serialize()).to.be.deep.equal(oldViewDU.serialize()); + }); + } + + const testCases3 = [1, 3, 5, 7]; + for (const numPush of testCases3) { + it(`should push ${numPush} validators`, () => { + const validators = Array.from({length: numValidator}, (_, i) => ({ + ...seedValidator, + withdrawableEpoch: seedValidator.withdrawableEpoch + i, + })); + const oldViewDU = oldValidatorsType.toViewDU(validators); + const newViewDU = ssz.phase0.Validators.toViewDU(validators); + const newValidators: Validator[] = []; + // this ensure the commit() should update nodes array + newViewDU.getAllReadonlyValues(); + for (let i = 0; i < numPush; i++) { + const validator = {...seedValidator, withdrawableEpoch: seedValidator.withdrawableEpoch + numValidator + i}; + newValidators.push(validator); + oldViewDU.push(ValidatorContainer.toViewDU(validator)); + newViewDU.push(ssz.phase0.Validator.toViewDU(validator)); + } + oldViewDU.commit(); + expect(newViewDU.batchHashTreeRoot()).to.be.deep.equal(oldViewDU.node.root); + expect(newViewDU.serialize()).to.be.deep.equal(oldViewDU.serialize()); + const allValidators = newViewDU.getAllReadonlyValues(); + for (let i = 0; i < numPush; i++) { + expect(allValidators[numValidator + i]).to.be.deep.equal(newValidators[i]); + } + }); + } +}); diff --git a/packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts b/packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts new file mode 100644 index 00000000..6725a92b --- /dev/null +++ b/packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts @@ -0,0 +1,80 @@ +import {digestNLevel} from "@chainsafe/persistent-merkle-tree"; +import {ContainerType} from "../../../../../ssz/src/type/container"; +import {ssz} from "../../../lodestarTypes"; +import {ValidatorNodeStruct, ValidatorType, validatorToChunkBytes} from "../../../lodestarTypes/phase0/validator"; +import {expect} from "chai"; +import {Validator} from "../../../lodestarTypes/phase0/sszTypes"; + +const ValidatorContainer = new ContainerType(ValidatorType, {typeName: "Validator", jsonCase: "eth2"}); + +describe("Validator ssz types", function () { + const seedValidator = { + activationEligibilityEpoch: 10, + activationEpoch: 11, + exitEpoch: Infinity, + slashed: false, + withdrawableEpoch: 13, + pubkey: Buffer.alloc(48, 100), + withdrawalCredentials: Buffer.alloc(32, 100), + effectiveBalance: 32000000000, + }; + + const validators = [ + {...seedValidator, effectiveBalance: 31000000000, slashed: false}, + {...seedValidator, effectiveBalance: 32000000000, slashed: true}, + ]; + + it("should serialize and hash to the same value", () => { + for (const validator of validators) { + const serialized = ValidatorContainer.serialize(validator); + const serialized2 = ssz.phase0.Validator.serialize(validator); + const serialized3 = ssz.phase0.Validator.toViewDU(validator).serialize(); + expect(serialized2).to.be.deep.equal(serialized); + expect(serialized3).to.be.deep.equal(serialized); + + const root = ValidatorContainer.hashTreeRoot(validator); + const root2 = ssz.phase0.Validator.hashTreeRoot(validator); + const root3 = ssz.phase0.Validator.toViewDU(validator).hashTreeRoot(); + const root4 = ssz.phase0.Validator.toViewDU(validator).batchHashTreeRoot(); + expect(root2).to.be.deep.equal(root); + expect(root3).to.be.deep.equal(root); + expect(root4).to.be.deep.equal(root); + } + }); +}); + +describe("validatorToChunkBytes", function () { + const seedValidator = { + activationEligibilityEpoch: 10, + activationEpoch: 11, + exitEpoch: Infinity, + slashed: false, + withdrawableEpoch: 13, + pubkey: Buffer.alloc(48, 100), + withdrawalCredentials: Buffer.alloc(32, 100), + }; + + const validators = [ + {...seedValidator, effectiveBalance: 31000000000, slashed: false}, + {...seedValidator, effectiveBalance: 32000000000, slashed: true}, + ]; + + it("should populate validator value to merkle bytes", () => { + for (const validator of validators) { + const expectedRoot0 = ValidatorNodeStruct.hashTreeRoot(validator); + // validator has 8 fields + const level3 = new Uint8Array(32 * 8); + const dataView = new DataView(level3.buffer, level3.byteOffset, level3.byteLength); + // pubkey takes 2 chunks, has to go to another level + const level4 = new Uint8Array(32 * 2); + validatorToChunkBytes({uint8Array: level3, dataView}, level4, validator); + // additional slice() call make it easier to debug + const pubkeyRoot = digestNLevel(level4, 1).slice(); + level3.set(pubkeyRoot, 0); + const root = digestNLevel(level3, 3).slice(); + const expectedRootNode2 = Validator.value_toTree(validator); + expect(root).to.be.deep.equals(expectedRoot0); + expect(root).to.be.deep.equals(expectedRootNode2.root); + } + }); +}); diff --git a/packages/ssz/test/unit/merkleize.test.ts b/packages/ssz/test/unit/merkleize.test.ts index 6b996c8c..6626d183 100644 --- a/packages/ssz/test/unit/merkleize.test.ts +++ b/packages/ssz/test/unit/merkleize.test.ts @@ -1,5 +1,6 @@ import {expect} from "chai"; -import {bitLength, maxChunksToDepth, nextPowerOf2} from "../../src/util/merkleize"; +import {bitLength, maxChunksToDepth, merkleize, mixInLength, nextPowerOf2} from "../../src/util/merkleize"; +import {merkleizeBlocksBytes, LeafNode, zeroHash, merkleizeBlockArray} from "@chainsafe/persistent-merkle-tree"; describe("util / merkleize / bitLength", () => { const bitLengthByIndex = [0, 1, 2, 2, 3, 3, 3, 3, 4, 4]; @@ -30,3 +31,63 @@ describe("util / merkleize / nextPowerOf2", () => { }); } }); + +describe("util / merkleize / mixInLength", () => { + const root = Buffer.alloc(32, 1); + const lengths = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]; + for (const length of lengths) { + it(`mixInLength(${length})`, () => { + const mixInLengthBuffer = Buffer.alloc(64); + mixInLengthBuffer.set(root, 0); + mixInLengthBuffer.writeUIntLE(length, 32, 6); + const finalRoot = new Uint8Array(32); + merkleizeBlocksBytes(mixInLengthBuffer, 2, finalRoot, 0); + const expectedRoot = mixInLength(root, length); + expect(finalRoot).to.be.deep.equal(expectedRoot); + }); + } +}); + +describe("merkleize should be equal to merkleizeBlocksBytes of hasher", () => { + const numNodes = [0, 1, 2, 3, 4, 5, 6, 7, 8]; + for (const numNode of numNodes) { + it(`merkleize for ${numNode} nodes`, () => { + const nodes = Array.from({length: numNode}, (_, i) => LeafNode.fromRoot(Buffer.alloc(32, i))); + const data = Buffer.concat(nodes.map((node) => node.root)); + const padData = numNode % 2 === 1 ? Buffer.concat([data, zeroHash(0)]) : data; + const roots = nodes.map((node) => node.root); + const expectedRoot = Buffer.alloc(32); + const chunkCount = Math.max(numNode, 1); + merkleizeBlocksBytes(padData, chunkCount, expectedRoot, 0); + expect(merkleize(roots, chunkCount)).to.be.deep.equal(expectedRoot); + }); + } +}); + +// same to the above but with merkleizeBlockArray() method +describe("merkleize should be equal to merkleizeBlockArray of hasher", () => { + // hashtree has a buffer of 16 * 64 bytes = 32 nodes + const numNodes = [64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79]; + for (const numNode of numNodes) { + it(`merkleize for ${numNode} nodes`, () => { + const nodes = Array.from({length: numNode}, (_, i) => LeafNode.fromRoot(Buffer.alloc(32, i))); + const data = Buffer.concat(nodes.map((node) => node.root)); + const padData = numNode % 2 === 1 ? Buffer.concat([data, zeroHash(0)]) : data; + expect(padData.length % 64).to.equal(0); + const blocks: Uint8Array[] = []; + for (let i = 0; i < padData.length; i += 64) { + blocks.push(padData.slice(i, i + 64)); + } + const expectedRoot = Buffer.alloc(32); + // depth of 79 nodes are 7, make it 10 to test the padding + const chunkCount = Math.max(numNode, 10); + // add redundant blocks, should not affect the result + const blockLimit = blocks.length; + blocks.push(Buffer.alloc(64, 1)); + blocks.push(Buffer.alloc(64, 2)); + merkleizeBlockArray(blocks, blockLimit, chunkCount, expectedRoot, 0); + const roots = nodes.map((node) => node.root); + expect(merkleize(roots, chunkCount)).to.be.deep.equal(expectedRoot); + }); + } +}); diff --git a/packages/ssz/test/unit/unchangedViewDUs.test.ts b/packages/ssz/test/unit/unchangedViewDUs.test.ts index f1e57a36..aa61dfe2 100644 --- a/packages/ssz/test/unit/unchangedViewDUs.test.ts +++ b/packages/ssz/test/unit/unchangedViewDUs.test.ts @@ -5,7 +5,7 @@ import {getRandomState} from "../utils/generateEth2Objs"; describe("Unchanged ViewDUs", () => { const state = sszAltair.BeaconState.toViewDU(getRandomState(100)); - it.skip("should not recompute batchHashTreeRoot() when no fields is changed", () => { + it("should not recompute batchHashTreeRoot() when no fields is changed", () => { const root = state.batchHashTreeRoot(); // this causes viewsChanged inside BeaconState container state.validators.length; diff --git a/yarn.lock b/yarn.lock index fef8f425..e6d96d22 100644 --- a/yarn.lock +++ b/yarn.lock @@ -9008,6 +9008,14 @@ nth-check@^2.0.1: dependencies: boolbase "^1.0.0" +null-loader@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/null-loader/-/null-loader-4.0.1.tgz#8e63bd3a2dd3c64236a4679428632edd0a6dbc6a" + integrity sha512-pxqVbi4U6N26lq+LmgIbB5XATP0VdZKOG25DhHi8btMmJJefGArFyDg1yc4U3hWCJbMqSrw0qyrz1UQX+qYXqg== + dependencies: + loader-utils "^2.0.0" + schema-utils "^3.0.0" + nx@19.0.4, "nx@>=17.1.2 < 20": version "19.0.4" resolved "https://registry.yarnpkg.com/nx/-/nx-19.0.4.tgz#c39803f6186f6b009c39f5f30f902ce8e136dcde"