From f9825bbeb16fb245d7ff194c338c9243a5ffab81 Mon Sep 17 00:00:00 2001 From: Tuyen Nguyen Date: Wed, 26 Jun 2024 13:16:34 +0700 Subject: [PATCH] fix: remove hashtree --- .../src/hasher/hashtree.ts | 179 ------------ .../test/unit/hasher.test.ts | 9 +- .../lodestarTypes/phase0/listValidator.ts | 15 - .../ssz/test/lodestarTypes/phase0/sszTypes.ts | 4 +- .../test/lodestarTypes/phase0/validator.ts | 6 - .../phase0/viewDU/listValidator.ts | 131 --------- .../lodestarTypes/phase0/viewDU/validator.ts | 275 ------------------ .../phase0/listValidator.test.ts | 82 ------ .../lodestarTypes/phase0/validator.test.ts | 32 -- .../phase0/viewDU/validatorNodeStruct.test.ts | 53 ---- setHasher.mjs | 2 +- 11 files changed, 3 insertions(+), 785 deletions(-) delete mode 100644 packages/persistent-merkle-tree/src/hasher/hashtree.ts delete mode 100644 packages/ssz/test/lodestarTypes/phase0/listValidator.ts delete mode 100644 packages/ssz/test/lodestarTypes/phase0/viewDU/listValidator.ts delete mode 100644 packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts delete mode 100644 packages/ssz/test/unit/lodestarTypes/phase0/listValidator.test.ts delete mode 100644 packages/ssz/test/unit/lodestarTypes/phase0/viewDU/validatorNodeStruct.test.ts diff --git a/packages/persistent-merkle-tree/src/hasher/hashtree.ts b/packages/persistent-merkle-tree/src/hasher/hashtree.ts deleted file mode 100644 index 39b9dcd6..00000000 --- a/packages/persistent-merkle-tree/src/hasher/hashtree.ts +++ /dev/null @@ -1,179 +0,0 @@ -import {hash, hashInto} from "@chainsafe/hashtree"; -import {Hasher, HashObject} from "./types"; -import {HashComputation, Node} from "../node"; -import { byteArrayToHashObject, hashObjectToByteArray } from "@chainsafe/as-sha256"; - -/** - * Best SIMD implementation is in 512 bits = 64 bytes - * If not, hashtree will make a loop inside - * Given sha256 operates on a block of 4 bytes, we can hash 16 inputs at once - * Each input is 64 bytes - */ -const PARALLEL_FACTOR = 16; -const MAX_INPUT_SIZE = PARALLEL_FACTOR * 64; -const uint8Input = new Uint8Array(MAX_INPUT_SIZE); -const uint32Input = new Uint32Array(uint8Input.buffer); -const uint8Output = new Uint8Array(PARALLEL_FACTOR * 32); -const uint32Output = new Uint32Array(uint8Output.buffer); - - -export const hasher: Hasher = { - name: "hashtree", - digest64(obj1: Uint8Array, obj2: Uint8Array): Uint8Array { - if (obj1.length !== 32 || obj2.length !== 32) { - throw new Error("Invalid input length"); - } - uint8Input.set(obj1, 0); - uint8Input.set(obj2, 32); - const hashInput = uint8Input.subarray(0, 64); - const hashOutput = uint8Output.subarray(0, 32); - hashInto(hashInput, hashOutput); - return hashOutput.slice(); - }, - digest64HashObjects(obj1: HashObject, obj2: HashObject): HashObject { - hashObjectToUint32Array(obj1, uint32Input, 0); - hashObjectToUint32Array(obj2, uint32Input, 8); - const hashInput = uint8Input.subarray(0, 64); - const hashOutput = uint8Output.subarray(0, 32); - hashInto(hashInput, hashOutput); - return uint32ArrayToHashObject(uint32Output, 0); - }, - // given nLevel = 3 - // digest multiple of 8 chunks = 256 bytes - // the result is multiple of 1 chunk = 32 bytes - // this is the same to hashTreeRoot() of multiple validators - // TODO - batch: data, offset, length to avoid subarray call - digestNLevelUnsafe(data: Uint8Array, nLevel: number): Uint8Array { - let inputLength = data.length; - const bytesInBatch = Math.pow(2, nLevel) * 32; - if (nLevel < 1) { - throw new Error(`Invalid nLevel, expect to be greater than 0, got ${nLevel}`); - } - if (inputLength % bytesInBatch !== 0) { - throw new Error(`Invalid input length, expect to be multiple of ${bytesInBatch} for nLevel ${nLevel}, got ${inputLength}`); - } - if (inputLength > MAX_INPUT_SIZE) { - throw new Error(`Invalid input length, expect to be less than ${MAX_INPUT_SIZE}, got ${inputLength}`); - } - - let outputLength = Math.floor(inputLength / 2); - let hashOutput: Uint8Array | null = null; - for (let i = nLevel; i > 0; i--) { - uint8Input.set(hashOutput ?? data, 0); - const hashInput = uint8Input.subarray(0, inputLength); - hashOutput = uint8Output.subarray(0, outputLength); - hashInto(hashInput, hashOutput); - inputLength = outputLength; - outputLength = Math.floor(inputLength / 2); - } - - if (hashOutput === null) { - throw new Error("hashOutput is null"); - } - // the result is unsafe as it will be modified later, consumer should save the result if needed - return hashOutput; - }, - // eslint-disable-next-line @typescript-eslint/no-unused-vars - batchHashObjects(inputs: HashObject[]): HashObject[] { - if (inputs.length === 0) { - return []; - } - if (inputs.length % 2 !== 0) { - throw new Error("inputs length must be even"); - } - - const batch = PARALLEL_FACTOR * 2; - const outHashObjects: HashObject[] = []; - for (const [i, hashInput] of inputs.entries()) { - const indexInBatch = i % batch; - hashObjectToUint32Array(hashInput, uint32Input, indexInBatch * 8); - if (indexInBatch === batch - 1) { - hashInto(uint8Input, uint8Output); - for (let j = 0; j < batch / 2; j++) { - outHashObjects.push(uint32ArrayToHashObject(uint32Output, j * 8)); - } - } - } - - // hash remaining - const remaining = inputs.length % batch; - if (remaining > 0) { - const remainingInput = uint8Input.subarray(0, remaining * 32); - const remainingOutput = uint8Output.subarray(0, remaining * 16); - hashInto(remainingInput, remainingOutput); - for (let i = 0; i < remaining / 2; i++) { - outHashObjects.push(uint32ArrayToHashObject(uint32Output, i * 8)); - } - } - - return outHashObjects; - }, - executeHashComputations(hashComputations: Array): void { - for (let level = hashComputations.length - 1; level >= 0; level--) { - const hcArr = hashComputations[level]; - if (!hcArr) { - // should not happen - throw Error(`no hash computations for level ${level}`); - } - - // size input array to 2 HashObject per computation * 32 bytes per object - // const input: Uint8Array = Uint8Array.from(new Array(hcArr.length * 2 * 32)); - let destNodes: Node[] = []; - - // hash every 16 inputs at once to avoid memory allocation - for (const [i, {src0, src1, dest}] of hcArr.entries()) { - const indexInBatch = i % PARALLEL_FACTOR; - const offset = indexInBatch * 16; - - hashObjectToUint32Array(src0, uint32Input, offset); - hashObjectToUint32Array(src1, uint32Input, offset + 8); - destNodes.push(dest); - if (indexInBatch === PARALLEL_FACTOR - 1) { - hashInto(uint8Input, uint8Output); - for (const [j, destNode] of destNodes.entries()) { - const outputOffset = j * 8; - destNode.applyHash(uint32ArrayToHashObject(uint32Output, outputOffset)); - } - destNodes = []; - } - } - - const remaining = hcArr.length % PARALLEL_FACTOR; - // we prepared data in input, now hash the remaining - if (remaining > 0) { - const remainingInput = uint8Input.subarray(0, remaining * 64); - const remainingOutput = uint8Output.subarray(0, remaining * 32); - hashInto(remainingInput, remainingOutput); - // destNodes was prepared above - for (const [i, destNode] of destNodes.entries()) { - const offset = i * 8; - destNode.applyHash(uint32ArrayToHashObject(uint32Output, offset)); - } - } - } - }, -}; - -function hashObjectToUint32Array(obj: HashObject, arr: Uint32Array, offset: number): void { - arr[offset] = obj.h0; - arr[offset + 1] = obj.h1; - arr[offset + 2] = obj.h2; - arr[offset + 3] = obj.h3; - arr[offset + 4] = obj.h4; - arr[offset + 5] = obj.h5; - arr[offset + 6] = obj.h6; - arr[offset + 7] = obj.h7; -} - -function uint32ArrayToHashObject(arr: Uint32Array, offset: number): HashObject { - return { - h0: arr[offset], - h1: arr[offset + 1], - h2: arr[offset + 2], - h3: arr[offset + 3], - h4: arr[offset + 4], - h5: arr[offset + 5], - h6: arr[offset + 6], - h7: arr[offset + 7], - }; -} diff --git a/packages/persistent-merkle-tree/test/unit/hasher.test.ts b/packages/persistent-merkle-tree/test/unit/hasher.test.ts index a2edf1d2..ca728150 100644 --- a/packages/persistent-merkle-tree/test/unit/hasher.test.ts +++ b/packages/persistent-merkle-tree/test/unit/hasher.test.ts @@ -2,11 +2,10 @@ import {expectEqualHex} from "../utils/expectHex"; import {uint8ArrayToHashObject, hashObjectToUint8Array} from "../../src/hasher/util"; import {hasher as nobleHasher} from "../../src/hasher/noble"; import {hasher as asSha256Hasher} from "../../src/hasher/as-sha256"; -import {hasher as hashtreeHasher} from "../../src/hasher/hashtree"; import {linspace} from "../utils/misc"; import {buildComparisonTrees} from "../utils/tree"; -const hashers = [hashtreeHasher, asSha256Hasher, nobleHasher]; +const hashers = [asSha256Hasher, nobleHasher]; describe("hashers", function () { describe("digest64 vs digest64HashObjects methods should be the same", () => { @@ -30,9 +29,7 @@ describe("hashers", function () { const root2 = Buffer.alloc(32, 0xff); const hash1 = nobleHasher.digest64(root1, root2); const hash2 = asSha256Hasher.digest64(root1, root2); - const hash3 = hashtreeHasher.digest64(root1, root2); expectEqualHex(hash1, hash2); - expectEqualHex(hash1, hash3); }); it("all hashers should return the same values from digest64HashObjects", () => { @@ -42,9 +39,7 @@ describe("hashers", function () { const hashObject2 = uint8ArrayToHashObject(root2); const hash1 = hashObjectToUint8Array(nobleHasher.digest64HashObjects(hashObject1, hashObject2)); const hash2 = hashObjectToUint8Array(asSha256Hasher.digest64HashObjects(hashObject1, hashObject2)); - const hash3 = hashObjectToUint8Array(hashtreeHasher.digest64HashObjects(hashObject1, hashObject2)); expectEqualHex(hash1, hash2); - expectEqualHex(hash1, hash3); }); it("all hashers should return the same values from batchHashObjects", () => { @@ -53,10 +48,8 @@ describe("hashers", function () { .map(uint8ArrayToHashObject); const results1 = nobleHasher.batchHashObjects(hashObjects).map(hashObjectToUint8Array); const results2 = asSha256Hasher.batchHashObjects(hashObjects).map(hashObjectToUint8Array); - const results3 = hashtreeHasher.batchHashObjects(hashObjects).map(hashObjectToUint8Array); Object.values(results1).forEach((result1, i) => { expectEqualHex(result1, results2[i]); - expectEqualHex(result1, results3[i]); }); }); diff --git a/packages/ssz/test/lodestarTypes/phase0/listValidator.ts b/packages/ssz/test/lodestarTypes/phase0/listValidator.ts deleted file mode 100644 index ef189c85..00000000 --- a/packages/ssz/test/lodestarTypes/phase0/listValidator.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { ListCompositeType } from "../../../src/type/listComposite"; -import { Node } from "@chainsafe/persistent-merkle-tree"; -import { ListCompositeTreeViewDU } from "../../../src/viewDU/listComposite"; -import { ValidatorNodeStructType } from "./validator"; -import { ListValidatorTreeViewDU } from "./viewDU/listValidator"; - -export class ListValidatorType extends ListCompositeType { - constructor(limit: number) { - super(new ValidatorNodeStructType(), limit); - } - - getViewDU(node: Node, cache?: unknown): ListCompositeTreeViewDU { - return new ListValidatorTreeViewDU(this, node, cache as any); - } -} \ No newline at end of file diff --git a/packages/ssz/test/lodestarTypes/phase0/sszTypes.ts b/packages/ssz/test/lodestarTypes/phase0/sszTypes.ts index a2e7e8ab..5a9c84df 100644 --- a/packages/ssz/test/lodestarTypes/phase0/sszTypes.ts +++ b/packages/ssz/test/lodestarTypes/phase0/sszTypes.ts @@ -16,7 +16,6 @@ import { ATTESTATION_SUBNET_COUNT, } from "../params"; import * as primitiveSsz from "../primitive/sszTypes"; -import {ListValidatorType} from "./listValidator"; import {ValidatorNodeStruct} from "./validator"; export {ValidatorNodeStruct}; @@ -252,8 +251,7 @@ export const ValidatorContainer = new ContainerType( export const Validator = ValidatorNodeStruct; // Export as stand-alone for direct tree optimizations -// export const Validators = new ListCompositeType(ValidatorNodeStruct, VALIDATOR_REGISTRY_LIMIT); -export const Validators = new ListValidatorType(VALIDATOR_REGISTRY_LIMIT); +export const Validators = new ListCompositeType(ValidatorNodeStruct, VALIDATOR_REGISTRY_LIMIT); export const Balances = new ListUintNum64Type(VALIDATOR_REGISTRY_LIMIT); export const RandaoMixes = new VectorCompositeType(Bytes32, EPOCHS_PER_HISTORICAL_VECTOR); export const Slashings = new VectorBasicType(Gwei, EPOCHS_PER_SLASHINGS_VECTOR); diff --git a/packages/ssz/test/lodestarTypes/phase0/validator.ts b/packages/ssz/test/lodestarTypes/phase0/validator.ts index 33d4cc3d..21fe18ad 100644 --- a/packages/ssz/test/lodestarTypes/phase0/validator.ts +++ b/packages/ssz/test/lodestarTypes/phase0/validator.ts @@ -2,8 +2,6 @@ import {ByteViews} from "../../../src/type/abstract"; import {ContainerNodeStructType} from "../../../src/type/containerNodeStruct"; import {ValueOfFields} from "../../../src/view/container"; import * as primitiveSsz from "../primitive/sszTypes"; -import { ValidatorTreeViewDU } from "./viewDU/validator"; -import {Node} from "@chainsafe/persistent-merkle-tree"; const {Boolean, Bytes32, UintNum64, BLSPubkey, EpochInf} = primitiveSsz; @@ -37,10 +35,6 @@ export class ValidatorNodeStructType extends ContainerNodeStructType { - constructor( - readonly type: ListCompositeType, - protected _rootNode: Node, - cache?: ArrayCompositeTreeViewDUCache - ) { - super(type, _rootNode, cache); - } - - commit(hashComps: HashComputationGroup | null = null): void { - const isOldRootHashed = this._rootNode.h0 !== null; - if (this.viewsChanged.size === 0) { - if (!isOldRootHashed && hashComps !== null) { - // not possible to get HashComputations due to BranchNodeStruct - this._rootNode.root; - } - return; - } - - // TODO - batch: remove this type cast - const indicesChanged = Array.from(this.viewsChanged.keys()).sort((a, b) => a - b); - const endBatch = indicesChanged.length - (indicesChanged.length % PARALLEL_FACTOR); - // nodesChanged is sorted by index - const nodesChanged: {index: number; node: Node}[] = []; - // commit every 16 validators in batch - for (let i = 0; i < endBatch; i++) { - const indexInBatch = i % PARALLEL_FACTOR; - const viewIndex = indicesChanged[i]; - const viewChanged = this.viewsChanged.get(viewIndex) as ValidatorTreeViewDU; - viewChanged.valueToMerkleBytes(level3ByteViewsArr[indexInBatch], level4BytesArr[indexInBatch]); - - if (indexInBatch === PARALLEL_FACTOR - 1) { - // hash level 4 - const pubkeyRoots = digestNLevelUnsafe(batchLevel4Bytes, 1); - if (pubkeyRoots.length !== PARALLEL_FACTOR * 32) { - throw new Error(`Invalid pubkeyRoots length, expect ${PARALLEL_FACTOR * 32}, got ${pubkeyRoots.length}`); - } - for (let j = 0; j < PARALLEL_FACTOR; j++) { - level3ByteViewsArr[j].uint8Array.set(pubkeyRoots.subarray(j * 32, (j + 1) * 32), 0); - } - const validatorRoots = digestNLevelUnsafe(batchLevel3Bytes, 3); - if (validatorRoots.length !== PARALLEL_FACTOR * 32) { - throw new Error(`Invalid validatorRoots length, expect ${PARALLEL_FACTOR * 32}, got ${validatorRoots.length}`); - } - // commit all validators in this batch - for (let j = PARALLEL_FACTOR - 1; j >= 0; j--) { - const viewIndex = indicesChanged[i - j]; - const indexInBatch = (i - j) % PARALLEL_FACTOR; - const hashObject = byteArrayToHashObject(validatorRoots.subarray(indexInBatch * 32, (indexInBatch + 1) * 32)); - const viewChanged = this.viewsChanged.get(viewIndex) as ValidatorTreeViewDU; - viewChanged.commitToHashObject(hashObject); - nodesChanged.push({index: viewIndex, node: viewChanged.node}); - // Set new node in nodes array to ensure data represented in the tree and fast nodes access is equal - this.nodes[viewIndex] = viewChanged.node; - } - } - } - - // commit the remaining validators, we can do in batch too but don't want to create new Uint8Array views - // it's not much different to commit one by one - for (let i = endBatch; i < indicesChanged.length; i++) { - const viewIndex = indicesChanged[i]; - const viewChanged = this.viewsChanged.get(viewIndex) as ValidatorTreeViewDU; - viewChanged.commit(); - nodesChanged.push({index: viewIndex, node: viewChanged.node}); - // Set new node in nodes array to ensure data represented in the tree and fast nodes access is equal - this.nodes[viewIndex] = viewChanged.node; - } - - // do the remaining commit step the same to parent (ArrayCompositeTreeViewDU) - const indexes = nodesChanged.map((entry) => entry.index); - const nodes = nodesChanged.map((entry) => entry.node); - const chunksNode = this.type.tree_getChunksNode(this._rootNode); - const hashCompsThis = - hashComps != null && isOldRootHashed - ? { - byLevel: hashComps.byLevel, - offset: hashComps.offset + this.type.tree_chunksNodeOffset(), - } - : null; - const newChunksNode = setNodesAtDepth(chunksNode, this.type.chunkDepth, indexes, nodes, hashCompsThis); - - this._rootNode = this.type.tree_setChunksNode( - this._rootNode, - newChunksNode, - this.dirtyLength ? this._length : null, - hashComps - ); - - if (!isOldRootHashed && hashComps !== null) { - // should never happen, handle just in case - // not possible to get HashComputations due to BranchNodeStruct - this._rootNode.root; - } - - this.viewsChanged.clear(); - this.dirtyLength = false; - } -} diff --git a/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts b/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts deleted file mode 100644 index 63086944..00000000 --- a/packages/ssz/test/lodestarTypes/phase0/viewDU/validator.ts +++ /dev/null @@ -1,275 +0,0 @@ -import { HashObject, byteArrayToHashObject } from "@chainsafe/as-sha256"; -import { BranchNodeStruct } from "../../../../src/branchNodeStruct"; -import { ContainerTypeGeneric } from "../../../../src/view/container"; -import { TreeViewDU } from "../../../../src/viewDU/abstract"; -import { ValidatorType } from "../validator"; -import { - Node, - digestNLevelUnsafe, -} from "@chainsafe/persistent-merkle-tree"; -import { ByteViews } from "../../../../src/type/abstract"; -type Validator = { - pubkey: Uint8Array; - withdrawalCredentials: Uint8Array; - effectiveBalance: number; - slashed: boolean; - activationEligibilityEpoch: number; - activationEpoch: number; - exitEpoch: number; - withdrawableEpoch: number; -}; - -const numFields = 8; -const NUMBER_2_POW_32 = 2 ** 32; -/* - * Below constants are respective to their ssz type in `ValidatorType`. - */ -const UINT32_SIZE = 4; -const CHUNK_SIZE = 32; - -// validator has 8 nodes at level 3 -const singleLevel3Bytes = new Uint8Array(8 * 32); -const singleLevel3ByteView = {uint8Array: singleLevel3Bytes, dataView: new DataView(singleLevel3Bytes.buffer)}; -// validator has 2 nodes at level 4 (pubkey has 48 bytes = 2 * nodes) -const singleLevel4Bytes = new Uint8Array(2 * 32); - -/** - * A specific ViewDU for validator designed to be efficient to batch hash and efficient to create tree - * because it uses prepopulated nodes to do that. - */ -export class ValidatorTreeViewDU extends TreeViewDU> { - protected valueChanged: Validator | null = null; - protected _rootNode: BranchNodeStruct; - - constructor(readonly type: ContainerTypeGeneric, node: Node) { - super(); - this._rootNode = node as BranchNodeStruct; - } - - get node(): Node { - return this._rootNode; - } - - get cache(): void { - return; - } - - commit(): void { - if (this.valueChanged !== null) { - this._rootNode = this.type.value_toTree(this.valueChanged) as BranchNodeStruct; - } - - if (this._rootNode.h0 === null) { - this.valueToMerkleBytes(singleLevel3ByteView, singleLevel4Bytes); - // level 4 hash - const pubkeyRoot = digestNLevelUnsafe(singleLevel4Bytes, 1); - if (pubkeyRoot.length !== 32) { - throw new Error(`Invalid pubkeyRoot length, expect 32, got ${pubkeyRoot.length}`); - } - singleLevel3ByteView.uint8Array.set(pubkeyRoot, 0); - // level 3 hash - const validatorRoot = digestNLevelUnsafe(singleLevel3ByteView.uint8Array, 3); - if (validatorRoot.length !== 32) { - throw new Error(`Invalid validatorRoot length, expect 32, got ${validatorRoot.length}`); - } - const hashObject = byteArrayToHashObject(validatorRoot); - this._rootNode.applyHash(hashObject); - } - this.valueChanged = null; - } - - get pubkey(): Uint8Array { - return (this.valueChanged || this._rootNode.value).pubkey; - } - - set pubkey(value: Uint8Array) { - if (this.valueChanged === null) { - this.valueChanged = this.type.clone(this._rootNode.value); - } - - this.valueChanged.pubkey = value.slice(); - } - - get withdrawalCredentials(): Uint8Array { - return (this.valueChanged || this._rootNode.value).withdrawalCredentials; - } - - set withdrawalCredentials(value: Uint8Array) { - if (this.valueChanged === null) { - this.valueChanged = this.type.clone(this._rootNode.value); - } - - this.valueChanged.withdrawalCredentials = value.slice(); - } - - get effectiveBalance(): number { - return (this.valueChanged || this._rootNode.value).effectiveBalance; - } - - set effectiveBalance(value: number) { - if (this.valueChanged === null) { - this.valueChanged = this.type.clone(this._rootNode.value); - } - - this.valueChanged.effectiveBalance = value; - } - - get slashed(): boolean { - return (this.valueChanged || this._rootNode.value).slashed; - } - - set slashed(value: boolean) { - if (this.valueChanged === null) { - this.valueChanged = this.type.clone(this._rootNode.value); - } - - this.valueChanged.slashed = value; - } - - get activationEligibilityEpoch(): number { - return (this.valueChanged || this._rootNode.value).activationEligibilityEpoch; - } - - set activationEligibilityEpoch(value: number) { - if (this.valueChanged === null) { - this.valueChanged = this.type.clone(this._rootNode.value); - } - - this.valueChanged.activationEligibilityEpoch = value; - } - - get activationEpoch(): number { - return (this.valueChanged || this._rootNode.value).activationEpoch; - } - - set activationEpoch(value: number) { - if (this.valueChanged === null) { - this.valueChanged = this.type.clone(this._rootNode.value); - } - - this.valueChanged.activationEpoch = value; - } - - get exitEpoch(): number { - return (this.valueChanged || this._rootNode.value).exitEpoch; - } - - set exitEpoch(value: number) { - if (this.valueChanged === null) { - this.valueChanged = this.type.clone(this._rootNode.value); - } - - this.valueChanged.exitEpoch = value; - } - - get withdrawableEpoch(): number { - return (this.valueChanged || this._rootNode.value).withdrawableEpoch; - } - - set withdrawableEpoch(value: number) { - if (this.valueChanged === null) { - this.valueChanged = this.type.clone(this._rootNode.value); - } - - this.valueChanged.withdrawableEpoch = value; - } - - /** - * Write to level3 and level4 bytes to compute merkle root. Note that this is to compute - * merkle root and it's different from serialization (which is more compressed). - * pub0 + pub1 are at level4, they will be hashed to 1st chunked of level 3 - * then use 8 chunks of level 3 to compute the root hash. - * reserved withdr eff sla actElig act exit with - * level 3 |----------|----------|----------|----------|----------|----------|----------|----------| - * - * pub0 pub1 - * level4 |----------|----------| - * - */ - valueToMerkleBytes(level3: ByteViews, level4: Uint8Array): void { - if (level3.uint8Array.byteLength !== 8 * CHUNK_SIZE) { - throw Error(`Expected level3 to be 8 * CHUNK_SIZE bytes, got ${level3.uint8Array.byteLength}`); - } - if (level4.length !== 2 * CHUNK_SIZE) { - throw Error(`Expected level4 to be 2 * CHUNK_SIZE bytes, got ${level4.length}`); - } - // in case pushing a new validator to array, valueChanged could be null - const value = this.valueChanged ?? this._rootNode.value; - validatorToMerkleBytes(level3, level4, value); - } - - /** - * Batch hash flow: parent will compute hash and call this function - */ - commitToHashObject(ho: HashObject): void { - // (this.valueChanged === null means this viewDU is new - if (this.valueChanged !== null) { - this._rootNode = this.type.value_toTree(this.valueChanged) as BranchNodeStruct; - } - this._rootNode.applyHash(ho); - this.valueChanged = null; - } - - protected clearCache(): void { - this.valueChanged = null; - } - - get name(): string { - return this.type.typeName; - } -} - - /** - * Write to level3 and level4 bytes to compute merkle root. Note that this is to compute - * merkle root and it's different from serialization (which is more compressed). - * pub0 + pub1 are at level4, they will be hashed to 1st chunked of level 3 - * then use 8 chunks of level 3 to compute the root hash. - * reserved withdr eff sla actElig act exit with - * level 3 |----------|----------|----------|----------|----------|----------|----------|----------| - * - * pub0 pub1 - * level4 |----------|----------| - * - */ -export function validatorToMerkleBytes(level3: ByteViews, level4: Uint8Array, value: Validator): void { - const { pubkey, withdrawalCredentials, effectiveBalance, slashed, activationEligibilityEpoch, activationEpoch, exitEpoch, withdrawableEpoch } = value; - const { uint8Array: outputLevel3, dataView } = level3; - - // pubkey = 48 bytes which is 2 * CHUNK_SIZE - level4.set(pubkey, 0); - let offset = CHUNK_SIZE; - outputLevel3.set(withdrawalCredentials, offset); - offset += CHUNK_SIZE; - // effectiveBalance is UintNum64 - dataView.setUint32(offset, effectiveBalance & 0xffffffff, true); - dataView.setUint32(offset + 4, (effectiveBalance / NUMBER_2_POW_32) & 0xffffffff, true); - - offset += CHUNK_SIZE; - // output[offset] = validator.slashed ? 1 : 0; - dataView.setUint32(offset, slashed ? 1 : 0, true); - offset += CHUNK_SIZE; - writeEpochInf(dataView, offset, activationEligibilityEpoch); - offset += CHUNK_SIZE; - writeEpochInf(dataView, offset, activationEpoch); - offset += CHUNK_SIZE; - writeEpochInf(dataView, offset, exitEpoch); - offset += CHUNK_SIZE; - writeEpochInf(dataView, offset, withdrawableEpoch); -} - -/** - * Write an epoch to DataView at offset. - */ -function writeEpochInf(dataView: DataView, offset: number, value: number): void { - if (value === Infinity) { - dataView.setUint32(offset, 0xffffffff, true); - offset += UINT32_SIZE; - dataView.setUint32(offset, 0xffffffff, true); - offset += UINT32_SIZE; - } else { - dataView.setUint32(offset, value & 0xffffffff, true); - offset += UINT32_SIZE; - dataView.setUint32(offset, (value / NUMBER_2_POW_32) & 0xffffffff, true); - offset += UINT32_SIZE; - } -} diff --git a/packages/ssz/test/unit/lodestarTypes/phase0/listValidator.test.ts b/packages/ssz/test/unit/lodestarTypes/phase0/listValidator.test.ts deleted file mode 100644 index 438ebe7f..00000000 --- a/packages/ssz/test/unit/lodestarTypes/phase0/listValidator.test.ts +++ /dev/null @@ -1,82 +0,0 @@ -import { ListCompositeType } from "../../../../src/type/listComposite"; -import { ValidatorType } from "../../../lodestarTypes/phase0/validator"; -import { - preset, -} from "../../../lodestarTypes/params"; -import { ssz } from "../../../lodestarTypes"; -import { expect } from "chai"; -import { ContainerType } from "../../../../src/type/container"; -import { Validator } from "../../../lodestarTypes/phase0"; -const {VALIDATOR_REGISTRY_LIMIT} = preset; - -describe("ListValidator ssz type", function () { - const seedValidator = { - activationEligibilityEpoch: 10, - activationEpoch: 11, - exitEpoch: Infinity, - slashed: false, - withdrawableEpoch: 13, - pubkey: Buffer.alloc(48, 100), - withdrawalCredentials: Buffer.alloc(32, 100), - effectiveBalance: 32000000000, - }; - - const testCases = [32, 33, 34, 35]; - const ValidatorContainer = new ContainerType(ValidatorType, {typeName: "Validator", jsonCase: "eth2"}); - const oldValidatorsType = new ListCompositeType(ValidatorContainer, VALIDATOR_REGISTRY_LIMIT); - for (const numValidators of testCases) { - it (`should commit ${numValidators} validators`, () => { - const validators = Array.from({length: numValidators}, (_, i) => ({...seedValidator, withdrawableEpoch: seedValidator.withdrawableEpoch + i})); - const oldViewDU = oldValidatorsType.toViewDU(validators); - const newViewDU = ssz.phase0.Validators.toViewDU(validators); - // modify all validators - for (let i = 0; i < numValidators; i++) { - oldViewDU.get(i).activationEpoch = 2024; - newViewDU.get(i).activationEpoch = 2024; - } - expect(newViewDU.hashTreeRoot()).to.be.deep.equal(oldViewDU.hashTreeRoot()); - expect(newViewDU.serialize()).to.be.deep.equal(oldViewDU.serialize()); - }); - } - - const testCases2 = [[1], [3, 5], [1,9, 7]]; - const numValidator = 33; - for (const modifiedIndices of testCases2) { - it(`should modify ${modifiedIndices.length} validators`, () => { - const validators = Array.from({length: numValidator}, (_, i) => ({...seedValidator, withdrawableEpoch: seedValidator.withdrawableEpoch + i})); - const oldViewDU = oldValidatorsType.toViewDU(validators); - const newViewDU = ssz.phase0.Validators.toViewDU(validators); - for (const index of modifiedIndices) { - oldViewDU.get(index).activationEpoch = 2024; - newViewDU.get(index).activationEpoch = 2024; - } - expect(newViewDU.hashTreeRoot()).to.be.deep.equal(oldViewDU.hashTreeRoot()); - expect(newViewDU.serialize()).to.be.deep.equal(oldViewDU.serialize()); - }) - } - - const testCases3 = [1, 3, 5, 7]; - for (const numPush of testCases3) { - it(`should push ${numPush} validators`, () => { - const validators = Array.from({length: numValidator}, (_, i) => ({...seedValidator, withdrawableEpoch: seedValidator.withdrawableEpoch + i})); - const oldViewDU = oldValidatorsType.toViewDU(validators); - const newViewDU = ssz.phase0.Validators.toViewDU(validators); - const newValidators: Validator[] = []; - // this ensure the commit() should update nodes array - newViewDU.getAllReadonlyValues(); - for (let i = 0; i < numPush; i++) { - const validator = {...seedValidator, withdrawableEpoch: seedValidator.withdrawableEpoch + numValidator + i}; - newValidators.push(validator); - oldViewDU.push(ValidatorContainer.toViewDU(validator)); - newViewDU.push(ssz.phase0.Validator.toViewDU(validator)); - } - oldViewDU.commit(); - expect(newViewDU.hashTreeRoot()).to.be.deep.equal(oldViewDU.node.root); - expect(newViewDU.serialize()).to.be.deep.equal(oldViewDU.serialize()); - const allValidators = newViewDU.getAllReadonlyValues(); - for (let i = 0; i < numPush; i++) { - expect(allValidators[numValidator + i]).to.be.deep.equal(newValidators[i]); - } - }) - } -}); diff --git a/packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts b/packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts index b6564d43..a7d44e6b 100644 --- a/packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts +++ b/packages/ssz/test/unit/lodestarTypes/phase0/validator.test.ts @@ -2,9 +2,7 @@ import { BranchNode, LeafNode, Node, digestNLevelUnsafe, subtreeFillToContents } import {ContainerType} from "../../../../../ssz/src/type/container"; import {ssz} from "../../../lodestarTypes"; import {ValidatorType} from "../../../lodestarTypes/phase0/validator"; -import {ValidatorTreeViewDU} from "../../../lodestarTypes/phase0/viewDU/validator"; import { expect } from "chai"; -import { byteArrayToHashObject } from "@chainsafe/as-sha256"; const ValidatorContainer = new ContainerType(ValidatorType, {typeName: "Validator", jsonCase: "eth2"}); @@ -41,34 +39,4 @@ describe("Validator ssz types", function () { } }); - it("ViewDU.commitToHashObject()", () => { - // transform validator from 0 to 1 - // TODO - batch: avoid this type casting - const viewDU = ssz.phase0.Validator.toViewDU(validators[0]) as ValidatorTreeViewDU; - viewDU.effectiveBalance = validators[1].effectiveBalance; - viewDU.slashed = validators[1].slashed; - // same logic to viewDU.commit(); - // validator has 8 nodes at level 3 - const singleLevel3Bytes = new Uint8Array(8 * 32); - const singleLevel3ByteView = {uint8Array: singleLevel3Bytes, dataView: new DataView(singleLevel3Bytes.buffer)}; - // validator has 2 nodes at level 4 (pubkey has 48 bytes = 2 * nodes) - const singleLevel4Bytes = new Uint8Array(2 * 32); - viewDU.valueToMerkleBytes(singleLevel3ByteView, singleLevel4Bytes); - // level 4 hash - const pubkeyRoot = digestNLevelUnsafe(singleLevel4Bytes, 1); - if (pubkeyRoot.length !== 32) { - throw new Error(`Invalid pubkeyRoot length, expect 32, got ${pubkeyRoot.length}`); - } - singleLevel3ByteView.uint8Array.set(pubkeyRoot, 0); - // level 3 hash - const validatorRoot = digestNLevelUnsafe(singleLevel3ByteView.uint8Array, 3); - if (validatorRoot.length !== 32) { - throw new Error(`Invalid validatorRoot length, expect 32, got ${validatorRoot.length}`); - } - const hashObject = byteArrayToHashObject(validatorRoot); - viewDU.commitToHashObject(hashObject); - const expectedRoot = ValidatorContainer.hashTreeRoot(validators[1]); - expect(viewDU.node.root).to.be.deep.equal(expectedRoot); - expect(viewDU.hashTreeRoot()).to.be.deep.equal(expectedRoot); - }); }); diff --git a/packages/ssz/test/unit/lodestarTypes/phase0/viewDU/validatorNodeStruct.test.ts b/packages/ssz/test/unit/lodestarTypes/phase0/viewDU/validatorNodeStruct.test.ts deleted file mode 100644 index 96085875..00000000 --- a/packages/ssz/test/unit/lodestarTypes/phase0/viewDU/validatorNodeStruct.test.ts +++ /dev/null @@ -1,53 +0,0 @@ -import { digestNLevelUnsafe } from "@chainsafe/persistent-merkle-tree"; -import { validatorToMerkleBytes } from "../../../../lodestarTypes/phase0/viewDU/validator"; -import { HashObject } from "@chainsafe/as-sha256"; -import { ValidatorNodeStruct } from "../../../../lodestarTypes/phase0/validator"; -import { expect } from "chai"; -import { Validator } from "../../../../lodestarTypes/phase0/sszTypes"; - -describe("validatorNodeStruct", () => { - const seedValidator = { - activationEligibilityEpoch: 10, - activationEpoch: 11, - exitEpoch: Infinity, - slashed: false, - withdrawableEpoch: 13, - pubkey: Buffer.alloc(48, 100), - withdrawalCredentials: Buffer.alloc(32, 100), - }; - - const validators = [ - {...seedValidator, effectiveBalance: 31000000000, slashed: false}, - {...seedValidator, effectiveBalance: 32000000000, slashed: true}, - ]; - - it("should populate validator value to merkle bytes", () => { - for (const validator of validators) { - const expectedRoot0 = ValidatorNodeStruct.hashTreeRoot(validator); - // validator has 8 fields - const level3 = new Uint8Array(32 * 8); - const dataView = new DataView(level3.buffer, level3.byteOffset, level3.byteLength); - // pubkey takes 2 chunks, has to go to another level - const level4 = new Uint8Array(32 * 2); - validatorToMerkleBytes({uint8Array: level3, dataView}, level4, validator); - // additional slice() call make it easier to debug - const pubkeyRoot = digestNLevelUnsafe(level4, 1).slice(); - level3.set(pubkeyRoot, 0); - const root = digestNLevelUnsafe(level3, 3).slice(); - const expectedRootNode2 = Validator.value_toTree(validator); - expect(root).to.be.deep.equals(expectedRoot0); - expect(root).to.be.deep.equals(expectedRootNode2.root); - } - }) -}); - -function expectEqualNode(node1: HashObject, node2: HashObject, message: string) { - expect(node1.h0 >>> 0).to.be.equal(node2.h0 >>> 0, `${message} h0`); - expect(node1.h1 >>> 0).to.be.equal(node2.h1 >>> 0, `${message} h1`); - expect(node1.h2 >>> 0).to.be.equal(node2.h2 >>> 0, `${message} h2`); - expect(node1.h3 >>> 0).to.be.equal(node2.h3 >>> 0, `${message} h3`); - expect(node1.h4 >>> 0).to.be.equal(node2.h4 >>> 0, `${message} h4`); - expect(node1.h5 >>> 0).to.be.equal(node2.h5 >>> 0, `${message} h5`); - expect(node1.h6 >>> 0).to.be.equal(node2.h6 >>> 0, `${message} h6`); - expect(node1.h7 >>> 0).to.be.equal(node2.h7 >>> 0, `${message} h7`); -} diff --git a/setHasher.mjs b/setHasher.mjs index c210cdba..821986ea 100644 --- a/setHasher.mjs +++ b/setHasher.mjs @@ -1,5 +1,5 @@ // Set the hasher to hashtree // Used to run benchmarks with with visibility into hashtree performance, useful for Lodestar import {setHasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/index.js"; -import {hasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/hashtree.js"; +import {hasher} from "@chainsafe/persistent-merkle-tree/lib/hasher/as-sha256.js"; setHasher(hasher);