From 99671a2626552fcbbcb91954e188aa4fcdad76cd Mon Sep 17 00:00:00 2001 From: harkamal Date: Wed, 24 Jan 2024 18:40:25 +0530 Subject: [PATCH] feat: implement peerDAS on electra add some presets add further params and types add data column to types repo and network move to max request data columns to preset add the datacolumns data in blockinput and fix breaking errors in seen gossip blockinput handle data columns in gossip and the seengossip further propagate forkaware blockdata and resolve build/type issues further handle datacolumns sync by range by root and forkaware data handling fix issues chore: update c-kzg to peerDas version feat: add peerDas ckzg functions to interface fix the lookups handle the publishing flow various sync try fixes fixes compute blob side car various misl debuggings and fixes debug and apply fixes and get range and by root sync to work will full custody enable syncing with lower custody requirement use node peerid rather than a dummy string get and use the nodeid from enr and correctly compute subnets and column indexes filterout and connect to peers only matching out custody requiremnt try adding custody requirement add protection for subnet calc get the sync working with devnet 0 correctly set the enr with custody subnet info rebase fixes --- packages/beacon-node/package.json | 2 +- .../src/api/impl/beacon/blocks/index.ts | 50 ++- .../src/chain/blocks/importBlock.ts | 31 +- .../beacon-node/src/chain/blocks/types.ts | 58 +++- .../blocks/verifyBlocksDataAvailability.ts | 47 ++- .../src/chain/blocks/writeBlockInputToDb.ts | 69 +++- packages/beacon-node/src/chain/chain.ts | 6 +- .../chain/errors/dataColumnSidecarError.ts | 17 + .../beacon-node/src/chain/errors/index.ts | 1 + .../chain/seenCache/seenGossipBlockInput.ts | 294 +++++++++++++----- .../src/chain/validation/dataColumnSidecar.ts | 66 ++++ packages/beacon-node/src/db/beacon.ts | 6 + packages/beacon-node/src/db/buckets.ts | 3 + packages/beacon-node/src/db/interface.ts | 4 + .../src/db/repositories/dataColumnSidecars.ts | 50 +++ .../repositories/dataColumnSidecarsArchive.ts | 28 ++ .../beacon-node/src/db/repositories/index.ts | 2 + packages/beacon-node/src/network/events.ts | 3 +- .../src/network/gossip/interface.ts | 6 +- .../beacon-node/src/network/gossip/topic.ts | 20 ++ packages/beacon-node/src/network/interface.ts | 14 +- packages/beacon-node/src/network/metadata.ts | 1 + packages/beacon-node/src/network/network.ts | 55 +++- .../beacon-node/src/network/peers/discover.ts | 44 ++- .../src/network/peers/peerManager.ts | 8 +- .../src/network/peers/peersData.ts | 2 + .../network/processor/extractSlotRootFns.ts | 9 + .../src/network/processor/gossipHandlers.ts | 130 +++++++- .../network/processor/gossipQueues/index.ts | 5 + .../src/network/processor/index.ts | 8 +- .../src/network/reqresp/ReqRespBeaconNode.ts | 7 + .../reqresp/beaconBlocksMaybeBlobsByRange.ts | 144 ++++++++- .../reqresp/beaconBlocksMaybeBlobsByRoot.ts | 208 ++++++++++--- .../reqresp/handlers/beaconBlocksByRange.ts | 2 +- .../handlers/dataColumnSidecarsByRange.ts | 125 ++++++++ .../handlers/dataColumnSidecarsByRoot.ts | 90 ++++++ .../src/network/reqresp/handlers/index.ts | 11 + .../src/network/reqresp/protocols.ts | 12 + .../src/network/reqresp/rateLimit.ts | 12 + .../beacon-node/src/network/reqresp/types.ts | 16 +- packages/beacon-node/src/node/nodejs.ts | 5 + packages/beacon-node/src/sync/range/chain.ts | 19 +- packages/beacon-node/src/sync/range/range.ts | 3 +- packages/beacon-node/src/sync/sync.ts | 2 +- packages/beacon-node/src/sync/unknownBlock.ts | 19 +- packages/beacon-node/src/util/blobs.ts | 57 +++- packages/beacon-node/src/util/dataColumns.ts | 70 +++++ packages/beacon-node/src/util/kzg.ts | 18 ++ packages/beacon-node/src/util/sszBytes.ts | 20 ++ .../db/api/repositories/dataColumn.test.ts | 103 ++++++ .../test/unit/network/gossip/topic.test.ts | 6 + .../test/unit/util/dataColumn.test.ts | 14 + .../beacon-node/test/utils/node/beacon.ts | 7 +- packages/cli/src/cmds/beacon/handler.ts | 8 +- .../cli/src/cmds/beacon/initPeerIdAndEnr.ts | 21 +- packages/cli/src/cmds/bootnode/handler.ts | 2 +- .../config/src/chainConfig/configs/mainnet.ts | 4 + .../config/src/chainConfig/configs/minimal.ts | 4 + packages/config/src/chainConfig/types.ts | 6 + packages/params/src/forkName.ts | 6 + packages/params/src/index.ts | 15 + packages/params/src/presets/mainnet.ts | 8 + packages/params/src/presets/minimal.ts | 8 + packages/params/src/types.ts | 16 + packages/types/src/electra/sszTypes.ts | 58 +++- packages/types/src/electra/types.ts | 12 +- packages/types/src/primitive/sszTypes.ts | 1 + packages/types/src/primitive/types.ts | 1 + packages/validator/src/util/params.ts | 12 + yarn.lock | 12 +- 70 files changed, 1967 insertions(+), 236 deletions(-) create mode 100644 packages/beacon-node/src/chain/errors/dataColumnSidecarError.ts create mode 100644 packages/beacon-node/src/chain/validation/dataColumnSidecar.ts create mode 100644 packages/beacon-node/src/db/repositories/dataColumnSidecars.ts create mode 100644 packages/beacon-node/src/db/repositories/dataColumnSidecarsArchive.ts create mode 100644 packages/beacon-node/src/network/reqresp/handlers/dataColumnSidecarsByRange.ts create mode 100644 packages/beacon-node/src/network/reqresp/handlers/dataColumnSidecarsByRoot.ts create mode 100644 packages/beacon-node/src/util/dataColumns.ts create mode 100644 packages/beacon-node/test/unit/db/api/repositories/dataColumn.test.ts create mode 100644 packages/beacon-node/test/unit/util/dataColumn.test.ts diff --git a/packages/beacon-node/package.json b/packages/beacon-node/package.json index 16bb5ce5ad28..f5e02a5a1084 100644 --- a/packages/beacon-node/package.json +++ b/packages/beacon-node/package.json @@ -132,7 +132,7 @@ "@lodestar/utils": "^1.18.1", "@lodestar/validator": "^1.18.1", "@multiformats/multiaddr": "^12.1.3", - "c-kzg": "^2.1.2", + "c-kzg": "matthewkeil/c-kzg-4844#67bf9367817f0fa5ebd390aeb8c3ae88bdbc170e", "datastore-core": "^9.1.1", "datastore-level": "^10.1.1", "deepmerge": "^4.3.1", diff --git a/packages/beacon-node/src/api/impl/beacon/blocks/index.ts b/packages/beacon-node/src/api/impl/beacon/blocks/index.ts index c568274177e6..83d27b3faab5 100644 --- a/packages/beacon-node/src/api/impl/beacon/blocks/index.ts +++ b/packages/beacon-node/src/api/impl/beacon/blocks/index.ts @@ -1,9 +1,9 @@ import {fromHexString, toHexString} from "@chainsafe/ssz"; import {routes, ServerApi, ResponseFormat} from "@lodestar/api"; import {computeEpochAtSlot, computeTimeAtSlot, reconstructFullBlockOrContents} from "@lodestar/state-transition"; -import {SLOTS_PER_HISTORICAL_ROOT} from "@lodestar/params"; +import {SLOTS_PER_HISTORICAL_ROOT, ForkName} from "@lodestar/params"; import {sleep, toHex} from "@lodestar/utils"; -import {allForks, deneb, isSignedBlockContents, ProducedBlockSource} from "@lodestar/types"; +import {allForks, deneb, electra, isSignedBlockContents, ProducedBlockSource} from "@lodestar/types"; import { BlockSource, getBlockInput, @@ -11,10 +11,13 @@ import { BlockInput, BlobsSource, BlockInputDataBlobs, + BlockInputDataDataColumns, + DataColumnsSource, + BlockInputData, } from "../../../../chain/blocks/types.js"; import {promiseAllMaybeAsync} from "../../../../util/promises.js"; import {isOptimisticBlock} from "../../../../util/forkChoice.js"; -import {computeBlobSidecars} from "../../../../util/blobs.js"; +import {computeBlobSidecars, computeDataColumnSidecars} from "../../../../util/blobs.js"; import {BlockError, BlockErrorCode, BlockGossipError} from "../../../../chain/errors/index.js"; import {OpSource} from "../../../../metrics/validatorMonitor.js"; import {NetworkEvent} from "../../../../network/index.js"; @@ -49,17 +52,40 @@ export function getBeaconBlockApi({ opts: PublishBlockOpts = {} ) => { const seenTimestampSec = Date.now() / 1000; - let blockForImport: BlockInput, signedBlock: allForks.SignedBeaconBlock, blobSidecars: deneb.BlobSidecars; + let blockForImport: BlockInput, + signedBlock: allForks.SignedBeaconBlock, + blobSidecars: deneb.BlobSidecars, + dataColumnSidecars: electra.DataColumnSidecars; if (isSignedBlockContents(signedBlockOrContents)) { ({signedBlock} = signedBlockOrContents); - blobSidecars = computeBlobSidecars(config, signedBlock, signedBlockOrContents); - const blockData = { - fork: config.getForkName(signedBlock.message.slot), - blobs: blobSidecars, - blobsSource: BlobsSource.api, - blobsBytes: blobSidecars.map(() => null), - } as BlockInputDataBlobs; + const fork = config.getForkName(signedBlock.message.slot); + let blockData: BlockInputData; + if (fork === ForkName.electra) { + dataColumnSidecars = computeDataColumnSidecars(config, signedBlock, signedBlockOrContents); + blockData = { + fork, + numColumns: dataColumnSidecars.length, + // custodyColumns is a 1 based index of ith column present in dataColumns[custodyColumns[i-1]] + custodyColumns: new Uint8Array(Array.from({length: dataColumnSidecars.length}, (_, j) => 1 + j)), + dataColumns: dataColumnSidecars, + dataColumnsBytes: dataColumnSidecars.map(() => null), + dataColumnsSource: DataColumnsSource.api, + } as BlockInputDataDataColumns; + blobSidecars = []; + } else if (fork === ForkName.deneb) { + blobSidecars = computeBlobSidecars(config, signedBlock, signedBlockOrContents); + blockData = { + fork, + blobs: blobSidecars, + blobsSource: BlobsSource.api, + blobsBytes: blobSidecars.map(() => null), + } as BlockInputDataBlobs; + dataColumnSidecars = []; + } else { + throw Error(`Invalid data fork=${fork} for publish`); + } + blockForImport = getBlockInput.availableData( config, signedBlock, @@ -71,6 +97,7 @@ export function getBeaconBlockApi({ } else { signedBlock = signedBlockOrContents; blobSidecars = []; + dataColumnSidecars = []; // TODO: Once API supports submitting data as SSZ, replace null with blockBytes blockForImport = getBlockInput.preData(config, signedBlock, BlockSource.api, null); } @@ -206,6 +233,7 @@ export function getBeaconBlockApi({ // b) they might require more hops to reach recipients in peerDAS kind of setup where // blobs might need to hop between nodes because of partial subnet subscription ...blobSidecars.map((blobSidecar) => () => network.publishBlobSidecar(blobSidecar)), + ...dataColumnSidecars.map((dataColumnSidecar) => () => network.publishDataColumnSidecar(dataColumnSidecar)), () => network.publishBeaconBlock(signedBlock) as Promise, () => // there is no rush to persist block since we published it to gossip anyway diff --git a/packages/beacon-node/src/chain/blocks/importBlock.ts b/packages/beacon-node/src/chain/blocks/importBlock.ts index 9c467c26ca50..f18745561add 100644 --- a/packages/beacon-node/src/chain/blocks/importBlock.ts +++ b/packages/beacon-node/src/chain/blocks/importBlock.ts @@ -1,6 +1,6 @@ import {toHexString} from "@chainsafe/ssz"; import {capella, ssz, allForks, altair} from "@lodestar/types"; -import {ForkSeq, INTERVALS_PER_SLOT, MAX_SEED_LOOKAHEAD, SLOTS_PER_EPOCH} from "@lodestar/params"; +import {ForkName, ForkSeq, INTERVALS_PER_SLOT, MAX_SEED_LOOKAHEAD, SLOTS_PER_EPOCH} from "@lodestar/params"; import { CachedBeaconStateAltair, computeEpochAtSlot, @@ -113,18 +113,23 @@ export async function importBlock( // out of data range blocks and import then in forkchoice although one would not be able to // attest and propose with such head similar to optimistic sync if (blockInput.type === BlockInputType.availableData) { - const {blobsSource, blobs} = blockInput.blockData; - - this.metrics?.importBlock.blobsBySource.inc({blobsSource}); - for (const blobSidecar of blobs) { - const {index, kzgCommitment} = blobSidecar; - this.emitter.emit(routes.events.EventType.blobSidecar, { - blockRoot: blockRootHex, - slot: blockSlot, - index, - kzgCommitment: toHexString(kzgCommitment), - versionedHash: toHexString(kzgCommitmentToVersionedHash(kzgCommitment)), - }); + const {blockData} = blockInput; + if (blockData.fork === ForkName.deneb) { + const {blobsSource, blobs} = blockData; + + this.metrics?.importBlock.blobsBySource.inc({blobsSource}); + for (const blobSidecar of blobs) { + const {index, kzgCommitment} = blobSidecar; + this.emitter.emit(routes.events.EventType.blobSidecar, { + blockRoot: blockRootHex, + slot: blockSlot, + index, + kzgCommitment: toHexString(kzgCommitment), + versionedHash: toHexString(kzgCommitmentToVersionedHash(kzgCommitment)), + }); + } + } else if (blockData.fork === ForkName.electra) { + // TODO peerDAS build and emit the event for the datacolumns } } }); diff --git a/packages/beacon-node/src/chain/blocks/types.ts b/packages/beacon-node/src/chain/blocks/types.ts index ec9c2037a2c5..f349d081b62c 100644 --- a/packages/beacon-node/src/chain/blocks/types.ts +++ b/packages/beacon-node/src/chain/blocks/types.ts @@ -1,6 +1,6 @@ import {CachedBeaconStateAllForks, computeEpochAtSlot} from "@lodestar/state-transition"; import {MaybeValidExecutionStatus, DataAvailabilityStatus} from "@lodestar/fork-choice"; -import {allForks, deneb, Slot, RootHex} from "@lodestar/types"; +import {allForks, deneb, Slot, RootHex, electra, ColumnIndex} from "@lodestar/types"; import {ForkSeq, ForkName} from "@lodestar/params"; import {ChainForkConfig} from "@lodestar/config"; @@ -28,26 +28,52 @@ export enum BlobsSource { byRoot = "req_resp_by_root", } +export enum DataColumnsSource { + gossip = "gossip", + api = "api", + byRange = "req_resp_by_range", + byRoot = "req_resp_by_root", +} + export enum GossipedInputType { block = "block", blob = "blob", + dataColumn = "dataColumn", } export type BlobsCache = Map; +export type DataColumnsCache = Map< + number, + {dataColumnSidecar: electra.DataColumnSidecar; dataColumnBytes: Uint8Array | null} +>; type ForkBlobsInfo = {fork: ForkName.deneb}; type BlobsData = {blobs: deneb.BlobSidecars; blobsBytes: (Uint8Array | null)[]; blobsSource: BlobsSource}; export type BlockInputDataBlobs = ForkBlobsInfo & BlobsData; -export type BlockInputData = BlockInputDataBlobs; -type BlobsInputCache = {blobsCache: BlobsCache}; -export type BlockInputCacheBlobs = ForkBlobsInfo & BlobsInputCache; +type ForkDataColumnsInfo = {fork: ForkName.electra}; +type DataColumnsData = { + // marker of that columns are to be custodied + numColumns: number; + custodyColumns: Uint8Array; + dataColumns: electra.DataColumnSidecars; + dataColumnsBytes: (Uint8Array | null)[]; + dataColumnsSource: DataColumnsSource; +}; +export type BlockInputDataDataColumns = ForkDataColumnsInfo & DataColumnsData; + +export type BlockInputData = BlockInputDataBlobs | BlockInputDataDataColumns; -export type BlockInputBlobs = {blobs: deneb.BlobSidecars; blobsBytes: (Uint8Array | null)[]; blobsSource: BlobsSource}; type Availability = {availabilityPromise: Promise; resolveAvailability: (data: T) => void}; +type BlobsInputCache = {blobsCache: BlobsCache}; +export type BlockInputCacheBlobs = ForkBlobsInfo & BlobsInputCache; type CachedBlobs = BlobsInputCache & Availability; -export type CachedData = ForkBlobsInfo & CachedBlobs; + +type DataColumnsInputCache = {dataColumnsCache: DataColumnsCache}; +type CachedDataColumns = DataColumnsInputCache & Availability; + +export type CachedData = (ForkBlobsInfo & CachedBlobs) | (ForkDataColumnsInfo & CachedDataColumns); export type BlockInput = {block: allForks.SignedBeaconBlock; source: BlockSource; blockBytes: Uint8Array | null} & ( | {type: BlockInputType.preData | BlockInputType.outOfRangeData} @@ -157,6 +183,26 @@ export function getBlockInputBlobs(blobsCache: BlobsCache): Omit { + const dataColumns = []; + const dataColumnsBytes = []; + + for (const index of columnIndexes) { + const dataColumnCache = dataColumnsCache.get(index); + if (dataColumnCache === undefined) { + // check if the index is correct as per the custody columns + throw Error(`Missing dataColumnCache at index=${index}`); + } + const {dataColumnSidecar, dataColumnBytes} = dataColumnCache; + dataColumns.push(dataColumnSidecar); + dataColumnsBytes.push(dataColumnBytes); + } + return {dataColumns, dataColumnsBytes}; +} + export enum AttestationImportOpt { Skip, Force, diff --git a/packages/beacon-node/src/chain/blocks/verifyBlocksDataAvailability.ts b/packages/beacon-node/src/chain/blocks/verifyBlocksDataAvailability.ts index 8393c91063de..9147ecbd82d1 100644 --- a/packages/beacon-node/src/chain/blocks/verifyBlocksDataAvailability.ts +++ b/packages/beacon-node/src/chain/blocks/verifyBlocksDataAvailability.ts @@ -3,10 +3,19 @@ import {DataAvailabilityStatus} from "@lodestar/fork-choice"; import {ChainForkConfig} from "@lodestar/config"; import {deneb, UintNum64} from "@lodestar/types"; import {Logger} from "@lodestar/utils"; +import {ForkName} from "@lodestar/params"; import {BlockError, BlockErrorCode} from "../errors/index.js"; import {validateBlobSidecars} from "../validation/blobSidecar.js"; +import {validateDataColumnsSidecars} from "../validation/dataColumnSidecar.js"; import {Metrics} from "../../metrics/metrics.js"; -import {BlockInput, BlockInputType, ImportBlockOpts, BlobSidecarValidation, getBlockInput} from "./types.js"; +import { + BlockInput, + BlockInputType, + ImportBlockOpts, + BlobSidecarValidation, + getBlockInput, + BlockInputData, +} from "./types.js"; // we can now wait for full 12 seconds because unavailable block sync will try pulling // the blobs from the network anyway after 500ms of seeing the block @@ -88,27 +97,37 @@ async function maybeValidateBlobs( // run full validation const {block} = blockInput; const blockSlot = block.message.slot; - - const blobsData = - blockInput.type === BlockInputType.availableData - ? blockInput.blockData - : await raceWithCutoff(chain, blockInput, blockInput.cachedData.availabilityPromise); - const {blobs} = blobsData; - const {blobKzgCommitments} = (block as deneb.SignedBeaconBlock).message.body; const beaconBlockRoot = chain.config.getForkTypes(blockSlot).BeaconBlock.hashTreeRoot(block.message); - - // if the blob siddecars have been individually verified then we can skip kzg proof check - // but other checks to match blobs with block data still need to be performed - const skipProofsCheck = opts.validBlobSidecars === BlobSidecarValidation.Individual; - validateBlobSidecars(blockSlot, beaconBlockRoot, blobKzgCommitments, blobs, {skipProofsCheck}); + const blockData = + blockInput.type === BlockInputType.availableData + ? blockInput.blockData + : await raceWithCutoff( + chain, + blockInput, + blockInput.cachedData.availabilityPromise as Promise + ); + + if (blockData.fork === ForkName.deneb) { + const {blobs} = blockData; + + // if the blob siddecars have been individually verified then we can skip kzg proof check + // but other checks to match blobs with block data still need to be performed + const skipProofsCheck = opts.validBlobSidecars === BlobSidecarValidation.Individual; + validateBlobSidecars(blockSlot, beaconBlockRoot, blobKzgCommitments, blobs, {skipProofsCheck}); + } else if (blockData.fork === ForkName.electra) { + const {dataColumns} = blockData; + const skipProofsCheck = opts.validBlobSidecars === BlobSidecarValidation.Individual; + // might require numColumns, custodyColumns from blockData as input to below + validateDataColumnsSidecars(blockSlot, beaconBlockRoot, blobKzgCommitments, dataColumns, {skipProofsCheck}); + } const availableBlockInput = getBlockInput.availableData( chain.config, blockInput.block, blockInput.source, blockInput.blockBytes, - blobsData + blockData ); return {dataAvailabilityStatus: DataAvailabilityStatus.Available, availableBlockInput: availableBlockInput}; } diff --git a/packages/beacon-node/src/chain/blocks/writeBlockInputToDb.ts b/packages/beacon-node/src/chain/blocks/writeBlockInputToDb.ts index b0f5ab159591..4601a1c5ebbd 100644 --- a/packages/beacon-node/src/chain/blocks/writeBlockInputToDb.ts +++ b/packages/beacon-node/src/chain/blocks/writeBlockInputToDb.ts @@ -1,4 +1,6 @@ +import {ForkName} from "@lodestar/params"; import {toHex} from "@lodestar/utils"; +import {electra, ssz} from "@lodestar/types"; import {BeaconChain} from "../chain.js"; import {BlockInput, BlockInputType} from "./types.js"; @@ -30,19 +32,44 @@ export async function writeBlockInputToDb(this: BeaconChain, blocksInput: BlockI }); if (blockInput.type === BlockInputType.availableData || blockInput.type === BlockInputType.dataPromise) { - const blobSidecars = - blockInput.type == BlockInputType.availableData - ? blockInput.blockData.blobs - : // At this point of import blobs are available and can be safely awaited - (await blockInput.cachedData.availabilityPromise).blobs; + const blockData = + blockInput.type === BlockInputType.availableData + ? blockInput.blockData + : await blockInput.cachedData.availabilityPromise; - // NOTE: Old blobs are pruned on archive - fnPromises.push(this.db.blobSidecars.add({blockRoot, slot: block.message.slot, blobSidecars})); - this.logger.debug("Persisted blobSidecars to hot DB", { - blobsLen: blobSidecars.length, - slot: block.message.slot, - root: blockRootHex, - }); + // NOTE: Old data is pruned on archive + if (blockData.fork === ForkName.deneb) { + const blobSidecars = blockData.blobs; + fnPromises.push(this.db.blobSidecars.add({blockRoot, slot: block.message.slot, blobSidecars})); + this.logger.debug("Persisted blobSidecars to hot DB", { + blobsLen: blobSidecars.length, + slot: block.message.slot, + root: blockRootHex, + }); + } else { + const {numColumns, custodyColumns, dataColumns: dataColumnSidecars} = blockData; + const blobsLen = (block.message as electra.BeaconBlock).body.blobKzgCommitments.length; + + const columnsSize = + ssz.electra.DataColumnSidecar.minSize + + blobsLen * (ssz.electra.Cell.fixedSize + ssz.deneb.KZGCommitment.fixedSize + ssz.deneb.KZGProof.fixedSize); + const slot = block.message.slot; + const writeData = { + blockRoot, + slot, + numColumns, + columnsSize, + custodyColumns, + dataColumnSidecars, + }; + fnPromises.push(this.db.dataColumnSidecars.add(writeData)); + + this.logger.debug("Persisted dataColumnSidecars to hot DB", { + dataColumnsLen: dataColumnSidecars.length, + slot: block.message.slot, + root: blockRootHex, + }); + } } } @@ -55,17 +82,28 @@ export async function writeBlockInputToDb(this: BeaconChain, blocksInput: BlockI export async function removeEagerlyPersistedBlockInputs(this: BeaconChain, blockInputs: BlockInput[]): Promise { const blockToRemove = []; const blobsToRemove = []; + const dataColumnsToRemove = []; for (const blockInput of blockInputs) { const {block, type} = blockInput; - const blockRoot = this.config.getForkTypes(block.message.slot).BeaconBlock.hashTreeRoot(block.message); + const slot = block.message.slot; + const blockRoot = this.config.getForkTypes(slot).BeaconBlock.hashTreeRoot(block.message); const blockRootHex = toHex(blockRoot); if (!this.forkChoice.hasBlockHex(blockRootHex)) { blockToRemove.push(block); if (type === BlockInputType.availableData) { - const blobSidecars = blockInput.blockData.blobs; - blobsToRemove.push({blockRoot, slot: block.message.slot, blobSidecars}); + const {blockData} = blockInput; + if (blockData.fork === ForkName.deneb) { + const blobSidecars = blockData.blobs; + blobsToRemove.push({blockRoot, slot, blobSidecars}); + } else { + const {numColumns, custodyColumns, dataColumns: dataColumnSidecars} = blockData; + const blobsLen = (block.message as electra.BeaconBlock).body.blobKzgCommitments.length; + const columnsSize = ssz.electra.Cell.fixedSize * blobsLen; + + dataColumnsToRemove.push({blockRoot, slot, numColumns, columnsSize, custodyColumns, dataColumnSidecars}); + } } } } @@ -74,5 +112,6 @@ export async function removeEagerlyPersistedBlockInputs(this: BeaconChain, block // TODO: Batch DB operations not with Promise.all but with level db ops this.db.block.batchRemove(blockToRemove), this.db.blobSidecars.batchRemove(blobsToRemove), + this.db.dataColumnSidecars.batchRemove(dataColumnsToRemove), ]); } diff --git a/packages/beacon-node/src/chain/chain.ts b/packages/beacon-node/src/chain/chain.ts index 31171101747f..2ca69465a1a3 100644 --- a/packages/beacon-node/src/chain/chain.ts +++ b/packages/beacon-node/src/chain/chain.ts @@ -43,6 +43,7 @@ import {Clock, ClockEvent, IClock} from "../util/clock.js"; import {ensureDir, writeIfNotExist} from "../util/file.js"; import {isOptimisticBlock} from "../util/forkChoice.js"; import {BufferPool} from "../util/bufferPool.js"; +import {NodeId} from "../network/subnets/interface.js"; import {BlockProcessor, ImportBlockOpts} from "./blocks/index.js"; import {ChainEventEmitter, ChainEvent} from "./emitter.js"; import {IBeaconChain, ProposerPreparationData, BlockHash, StateGetOpts, CommonBlockBody} from "./interface.js"; @@ -133,7 +134,7 @@ export class BeaconChain implements IBeaconChain { readonly seenSyncCommitteeMessages = new SeenSyncCommitteeMessages(); readonly seenContributionAndProof: SeenContributionAndProof; readonly seenAttestationDatas: SeenAttestationDatas; - readonly seenGossipBlockInput = new SeenGossipBlockInput(); + readonly seenGossipBlockInput: SeenGossipBlockInput; // Seen cache for liveness checks readonly seenBlockAttesters = new SeenBlockAttesters(); @@ -164,6 +165,7 @@ export class BeaconChain implements IBeaconChain { constructor( opts: IChainOptions, { + nodeId, config, db, logger, @@ -175,6 +177,7 @@ export class BeaconChain implements IBeaconChain { executionEngine, executionBuilder, }: { + nodeId: NodeId; config: BeaconConfig; db: IBeaconDb; logger: Logger; @@ -220,6 +223,7 @@ export class BeaconChain implements IBeaconChain { this.seenAggregatedAttestations = new SeenAggregatedAttestations(metrics); this.seenContributionAndProof = new SeenContributionAndProof(metrics); this.seenAttestationDatas = new SeenAttestationDatas(metrics, this.opts?.attDataCacheSlotDistance); + this.seenGossipBlockInput = new SeenGossipBlockInput(nodeId); this.beaconProposerCache = new BeaconProposerCache(opts); this.checkpointBalancesCache = new CheckpointBalancesCache(); diff --git a/packages/beacon-node/src/chain/errors/dataColumnSidecarError.ts b/packages/beacon-node/src/chain/errors/dataColumnSidecarError.ts new file mode 100644 index 000000000000..cc3d27e4652c --- /dev/null +++ b/packages/beacon-node/src/chain/errors/dataColumnSidecarError.ts @@ -0,0 +1,17 @@ +import {Slot, RootHex} from "@lodestar/types"; +import {GossipActionError} from "./gossipValidation.js"; + +export enum DataColumnSidecarErrorCode { + INVALID_INDEX = "DATA_COLUMN_SIDECAR_ERROR_INVALID_INDEX", + + // following errors are adapted from the block errors + FUTURE_SLOT = "DATA_COLUMN_SIDECAR_ERROR_FUTURE_SLOT", + PARENT_UNKNOWN = "DATA_COLUMN_SIDECAR_ERROR_PARENT_UNKNOWN", +} + +export type DataColumnSidecarErrorType = + | {code: DataColumnSidecarErrorCode.INVALID_INDEX; columnIndex: number; gossipIndex: number} + | {code: DataColumnSidecarErrorCode.FUTURE_SLOT; blockSlot: Slot; currentSlot: Slot} + | {code: DataColumnSidecarErrorCode.PARENT_UNKNOWN; parentRoot: RootHex}; + +export class DataColumnSidecarGossipError extends GossipActionError {} diff --git a/packages/beacon-node/src/chain/errors/index.ts b/packages/beacon-node/src/chain/errors/index.ts index 1bd8f8577305..2159b1562e2d 100644 --- a/packages/beacon-node/src/chain/errors/index.ts +++ b/packages/beacon-node/src/chain/errors/index.ts @@ -1,6 +1,7 @@ export * from "./attestationError.js"; export * from "./attesterSlashingError.js"; export * from "./blobSidecarError.js"; +export * from "./dataColumnSidecarError.js"; export * from "./blockError.js"; export * from "./gossipValidation.js"; export * from "./proposerSlashingError.js"; diff --git a/packages/beacon-node/src/chain/seenCache/seenGossipBlockInput.ts b/packages/beacon-node/src/chain/seenCache/seenGossipBlockInput.ts index 2af62d2844fa..e4eb6b6ded08 100644 --- a/packages/beacon-node/src/chain/seenCache/seenGossipBlockInput.ts +++ b/packages/beacon-node/src/chain/seenCache/seenGossipBlockInput.ts @@ -1,8 +1,8 @@ import {toHexString} from "@chainsafe/ssz"; -import {deneb, RootHex, ssz, allForks} from "@lodestar/types"; +import {deneb, RootHex, ssz, allForks, electra} from "@lodestar/types"; import {ChainForkConfig} from "@lodestar/config"; import {pruneSetToMax} from "@lodestar/utils"; -import {BLOBSIDECAR_FIXED_SIZE, isForkBlobs, ForkName} from "@lodestar/params"; +import {BLOBSIDECAR_FIXED_SIZE, isForkBlobs, ForkName, NUMBER_OF_COLUMNS} from "@lodestar/params"; import { BlockInput, @@ -14,8 +14,13 @@ import { GossipedInputType, getBlockInputBlobs, BlobsSource, + DataColumnsSource, + getBlockInputDataColumns, + BlockInputDataDataColumns, } from "../blocks/types.js"; import {Metrics} from "../../metrics/index.js"; +import {getCustodyColumns} from "../../util/dataColumns.js"; +import {NodeId} from "../../network/subnets/interface.js"; export enum BlockInputAvailabilitySource { GOSSIP = "gossip", @@ -24,7 +29,12 @@ export enum BlockInputAvailabilitySource { type GossipedBlockInput = | {type: GossipedInputType.block; signedBlock: allForks.SignedBeaconBlock; blockBytes: Uint8Array | null} - | {type: GossipedInputType.blob; blobSidecar: deneb.BlobSidecar; blobBytes: Uint8Array | null}; + | {type: GossipedInputType.blob; blobSidecar: deneb.BlobSidecar; blobBytes: Uint8Array | null} + | { + type: GossipedInputType.dataColumn; + dataColumnSidecar: electra.DataColumnSidecar; + dataColumnBytes: Uint8Array | null; + }; type BlockInputCacheType = { fork: ForkName; @@ -51,6 +61,8 @@ const MAX_GOSSIPINPUT_CACHE = 5; */ export class SeenGossipBlockInput { private blockInputCache = new Map(); + // TODO assign via constructor + constructor(private nodeId: NodeId) {} prune(): void { pruneSetToMax(this.blockInputCache, MAX_GOSSIPINPUT_CACHE); @@ -67,11 +79,16 @@ export class SeenGossipBlockInput { ): | { blockInput: BlockInput; - blockInputMeta: {pending: GossipedInputType.blob | null; haveBlobs: number; expectedBlobs: number}; + blockInputMeta: + | {pending: GossipedInputType.blob | null; haveBlobs: number; expectedBlobs: number} + | {pending: GossipedInputType.dataColumn | null; haveColumns: number; expectedColumns: number}; } | { blockInput: NullBlockInput; - blockInputMeta: {pending: GossipedInputType.block; haveBlobs: number; expectedBlobs: null}; + blockInputMeta: {pending: GossipedInputType.block} & ( + | {haveBlobs: number; expectedBlobs: null} + | {haveColumns: number; expectedColumns: null} + ); } { let blockHex; let blockCache; @@ -88,13 +105,16 @@ export class SeenGossipBlockInput { blockCache.block = signedBlock; blockCache.blockBytes = blockBytes; - } else { + } else if (gossipedInput.type === GossipedInputType.blob) { const {blobSidecar, blobBytes} = gossipedInput; const blockRoot = ssz.phase0.BeaconBlockHeader.hashTreeRoot(blobSidecar.signedBlockHeader.message); fork = config.getForkName(blobSidecar.signedBlockHeader.message.slot); blockHex = toHexString(blockRoot); blockCache = this.blockInputCache.get(blockHex) ?? getEmptyBlockInputCacheEntry(fork); + if (blockCache.cachedData?.fork !== ForkName.deneb) { + throw Error(`blob data at non deneb fork=${blockCache.fork}`); + } // TODO: freetheblobs check if its the same blob or a duplicate and throw/take actions blockCache.cachedData?.blobsCache.set(blobSidecar.index, { @@ -102,6 +122,26 @@ export class SeenGossipBlockInput { // easily splice out the unsigned message as blob is a fixed length type blobBytes: blobBytes?.slice(0, BLOBSIDECAR_FIXED_SIZE) ?? null, }); + } else if (gossipedInput.type === GossipedInputType.dataColumn) { + const {dataColumnSidecar, dataColumnBytes} = gossipedInput; + const blockRoot = ssz.phase0.BeaconBlockHeader.hashTreeRoot(dataColumnSidecar.signedBlockHeader.message); + fork = config.getForkName(dataColumnSidecar.signedBlockHeader.message.slot); + + blockHex = toHexString(blockRoot); + blockCache = this.blockInputCache.get(blockHex) ?? getEmptyBlockInputCacheEntry(fork); + if (blockCache.cachedData?.fork !== ForkName.electra) { + throw Error(`blob data at non electra fork=${blockCache.fork}`); + } + + // TODO: freetheblobs check if its the same blob or a duplicate and throw/take actions + blockCache.cachedData?.dataColumnsCache.set(dataColumnSidecar.index, { + dataColumnSidecar, + // easily splice out the unsigned message as blob is a fixed length type + dataColumnBytes: dataColumnBytes?.slice(0, dataColumnBytes.length) ?? null, + }); + } else { + // somehow helps resolve typescript that all types have been exausted + throw Error("Invalid gossipedInput type"); } if (!this.blockInputCache.has(blockHex)) { @@ -121,72 +161,164 @@ export class SeenGossipBlockInput { if (cachedData === undefined || !isForkBlobs(cachedData.fork)) { throw Error("Missing or Invalid fork cached Data for deneb+ block"); } - const {blobsCache, resolveAvailability} = cachedData; - // block is available, check if all blobs have shown up - const {slot, body} = signedBlock.message; - const {blobKzgCommitments} = body as deneb.BeaconBlockBody; - const blockInfo = `blockHex=${blockHex}, slot=${slot}`; + if (cachedData.fork === ForkName.deneb) { + const {blobsCache} = cachedData; - if (blobKzgCommitments.length < blobsCache.size) { - throw Error( - `Received more blobs=${blobsCache.size} than commitments=${blobKzgCommitments.length} for ${blockInfo}` - ); + // block is available, check if all blobs have shown up + const {slot, body} = signedBlock.message; + const {blobKzgCommitments} = body as deneb.BeaconBlockBody; + const blockInfo = `blockHex=${blockHex}, slot=${slot}`; + + if (blobKzgCommitments.length < blobsCache.size) { + throw Error( + `Received more blobs=${blobsCache.size} than commitments=${blobKzgCommitments.length} for ${blockInfo}` + ); + } + + if (blobKzgCommitments.length === blobsCache.size) { + const allBlobs = getBlockInputBlobs(blobsCache); + metrics?.syncUnknownBlock.resolveAvailabilitySource.inc({source: BlockInputAvailabilitySource.GOSSIP}); + const {blobs} = allBlobs; + const blockData = { + fork: cachedData.fork, + ...allBlobs, + blobsSource: BlobsSource.gossip, + } as BlockInputDataBlobs; + const blockInput = getBlockInput.availableData( + config, + signedBlock, + BlockSource.gossip, + blockBytes ?? null, + blockData + ); + + resolveBlockInput(blockInput); + return { + blockInput, + blockInputMeta: {pending: null, haveBlobs: blobs.length, expectedBlobs: blobKzgCommitments.length}, + }; + } else { + const blockInput = getBlockInput.dataPromise( + config, + signedBlock, + BlockSource.gossip, + blockBytes ?? null, + cachedData + ); + + resolveBlockInput(blockInput); + return { + blockInput, + blockInputMeta: { + pending: GossipedInputType.blob, + haveBlobs: blobsCache.size, + expectedBlobs: blobKzgCommitments.length, + }, + }; + } + } else if (cachedData.fork === ForkName.electra) { + const {dataColumnsCache} = cachedData; + + // block is available, check if all blobs have shown up + const {slot} = signedBlock.message; + const blockInfo = `blockHex=${blockHex}, slot=${slot}`; + + if (NUMBER_OF_COLUMNS < dataColumnsCache.size) { + throw Error( + `Received more dataColumns=${dataColumnsCache.size} than columns=${NUMBER_OF_COLUMNS} for ${blockInfo}` + ); + } + + // get the custody columns and see if we have got all the requisite columns + const blobKzgCommitmentsLen = (signedBlock.message.body as deneb.BeaconBlockBody).blobKzgCommitments.length; + const { + custodyColumns, + numColumns, + custodyColumnIndexes: columnIndexes, + } = getCustodyColumns(this.nodeId, config, blobKzgCommitmentsLen); + const custodyIndexesPresent = + dataColumnsCache.size >= numColumns && + columnIndexes.reduce((acc, columnIndex) => acc && dataColumnsCache.has(columnIndex), true); + if (custodyIndexesPresent) { + const allDataColumns = getBlockInputDataColumns(dataColumnsCache, columnIndexes); + metrics?.syncUnknownBlock.resolveAvailabilitySource.inc({source: BlockInputAvailabilitySource.GOSSIP}); + const {dataColumns} = allDataColumns; + const blockData = { + fork: cachedData.fork, + ...allDataColumns, + numColumns, + custodyColumns, + dataColumnsSource: DataColumnsSource.gossip, + } as BlockInputDataDataColumns; + const blockInput = getBlockInput.availableData( + config, + signedBlock, + BlockSource.gossip, + blockBytes ?? null, + blockData + ); + + resolveBlockInput(blockInput); + return { + blockInput, + blockInputMeta: {pending: null, haveColumns: dataColumns.length, expectedColumns: numColumns}, + }; + } else { + const blockInput = getBlockInput.dataPromise( + config, + signedBlock, + BlockSource.gossip, + blockBytes ?? null, + cachedData + ); + + resolveBlockInput(blockInput); + return { + blockInput, + blockInputMeta: { + pending: GossipedInputType.dataColumn, + haveColumns: dataColumnsCache.size, + expectedColumns: numColumns, + }, + }; + } + } else { + throw Error(`Invalid fork=${fork}`); + } + } else { + // will need to wait for the block to showup + if (cachedData === undefined) { + throw Error("Missing cachedData for deneb+ blobs"); } - if (blobKzgCommitments.length === blobsCache.size) { - const allBlobs = getBlockInputBlobs(blobsCache); - const blockData = {...allBlobs, blobsSource: BlobsSource.gossip, fork: cachedData.fork} as BlockInputDataBlobs; - resolveAvailability(blockData); - metrics?.syncUnknownBlock.resolveAvailabilitySource.inc({source: BlockInputAvailabilitySource.GOSSIP}); - const blockInput = getBlockInput.availableData( - config, - signedBlock, - BlockSource.gossip, - blockBytes ?? null, - blockData - ); + if (cachedData.fork === ForkName.deneb) { + const {blobsCache} = cachedData; - resolveBlockInput(blockInput); return { - blockInput, - blockInputMeta: {pending: null, haveBlobs: allBlobs.blobs.length, expectedBlobs: blobKzgCommitments.length}, + blockInput: { + block: null, + blockRootHex: blockHex, + cachedData, + blockInputPromise, + }, + blockInputMeta: {pending: GossipedInputType.block, haveBlobs: blobsCache.size, expectedBlobs: null}, }; - } else { - const blockInput = getBlockInput.dataPromise( - config, - signedBlock, - BlockSource.gossip, - blockBytes ?? null, - cachedData - ); + } else if (fork === ForkName.electra) { + const {dataColumnsCache} = cachedData; - resolveBlockInput(blockInput); return { - blockInput, - blockInputMeta: { - pending: GossipedInputType.blob, - haveBlobs: blobsCache.size, - expectedBlobs: blobKzgCommitments.length, + blockInput: { + block: null, + blockRootHex: blockHex, + cachedData, + blockInputPromise, }, + blockInputMeta: {pending: GossipedInputType.block, haveColumns: dataColumnsCache.size, expectedColumns: null}, }; + } else { + throw Error(`invalid fork=${fork} data not implemented`); } - } else { - // will need to wait for the block to showup - if (cachedData === undefined) { - throw Error("Missing cachedData for deneb+ blobs"); - } - const {blobsCache} = cachedData; - - return { - blockInput: { - block: null, - blockRootHex: blockHex, - cachedData, - blockInputPromise, - }, - blockInputMeta: {pending: GossipedInputType.block, haveBlobs: blobsCache.size, expectedBlobs: null}, - }; } } } @@ -205,16 +337,38 @@ function getEmptyBlockInputCacheEntry(fork: ForkName): BlockInputCacheType { return {fork, blockInputPromise, resolveBlockInput}; } - let resolveAvailability: ((blobs: BlockInputDataBlobs) => void) | null = null; - const availabilityPromise = new Promise((resolveCB) => { - resolveAvailability = resolveCB; - }); + if (fork === ForkName.deneb) { + let resolveAvailability: ((blobs: BlockInputDataBlobs) => void) | null = null; + const availabilityPromise = new Promise((resolveCB) => { + resolveAvailability = resolveCB; + }); - if (resolveAvailability === null) { - throw Error("Promise Constructor was not executed immediately"); - } + if (resolveAvailability === null) { + throw Error("Promise Constructor was not executed immediately"); + } + + const blobsCache = new Map(); + const cachedData: CachedData = {fork, blobsCache, availabilityPromise, resolveAvailability}; + return {fork, blockInputPromise, resolveBlockInput, cachedData}; + } else if (fork === ForkName.electra) { + let resolveAvailability: ((blobs: BlockInputDataDataColumns) => void) | null = null; + const availabilityPromise = new Promise((resolveCB) => { + resolveAvailability = resolveCB; + }); - const blobsCache = new Map(); - const cachedData: CachedData = {fork, blobsCache, availabilityPromise, resolveAvailability}; - return {fork, blockInputPromise, resolveBlockInput, cachedData}; + if (resolveAvailability === null) { + throw Error("Promise Constructor was not executed immediately"); + } + + const dataColumnsCache = new Map(); + const cachedData: CachedData = { + fork, + dataColumnsCache, + availabilityPromise, + resolveAvailability, + }; + return {fork, blockInputPromise, resolveBlockInput, cachedData}; + } else { + throw Error(`Invalid fork=${fork} for getEmptyBlockInputCacheEntry`); + } } diff --git a/packages/beacon-node/src/chain/validation/dataColumnSidecar.ts b/packages/beacon-node/src/chain/validation/dataColumnSidecar.ts new file mode 100644 index 000000000000..004a4a57c25e --- /dev/null +++ b/packages/beacon-node/src/chain/validation/dataColumnSidecar.ts @@ -0,0 +1,66 @@ +import { + KZG_COMMITMENTS_INCLUSION_PROOF_DEPTH, + KZG_COMMITMENTS_SUBTREE_INDEX, + DATA_COLUMN_SIDECAR_SUBNET_COUNT, + NUMBER_OF_COLUMNS, +} from "@lodestar/params"; +import {ssz, deneb, electra, Slot, Root} from "@lodestar/types"; +import {verifyMerkleBranch} from "@lodestar/utils"; + +import {DataColumnSidecarGossipError, DataColumnSidecarErrorCode} from "../errors/dataColumnSidecarError.js"; +import {GossipAction} from "../errors/gossipValidation.js"; +import {IBeaconChain} from "../interface.js"; + +export async function validateGossipDataColumnSidecar( + chain: IBeaconChain, + dataColumnSideCar: electra.DataColumnSidecar, + gossipIndex: number +): Promise { + const dataColumnSlot = dataColumnSideCar.signedBlockHeader.message.slot; + + if ( + dataColumnSideCar.index > NUMBER_OF_COLUMNS || + dataColumnSideCar.index % DATA_COLUMN_SIDECAR_SUBNET_COUNT !== gossipIndex + ) { + throw new DataColumnSidecarGossipError(GossipAction.REJECT, { + code: DataColumnSidecarErrorCode.INVALID_INDEX, + columnIndex: dataColumnSideCar.index, + gossipIndex, + }); + } + + // [IGNORE] The sidecar is not from a future slot (with a MAXIMUM_GOSSIP_CLOCK_DISPARITY allowance) -- + // i.e. validate that sidecar.slot <= current_slot (a client MAY queue future blocks for processing at + // the appropriate slot). + const currentSlotWithGossipDisparity = chain.clock.currentSlotWithGossipDisparity; + if (currentSlotWithGossipDisparity < dataColumnSlot) { + throw new DataColumnSidecarGossipError(GossipAction.IGNORE, { + code: DataColumnSidecarErrorCode.FUTURE_SLOT, + currentSlot: currentSlotWithGossipDisparity, + blockSlot: dataColumnSlot, + }); + } + + validateInclusionProof(dataColumnSideCar); +} + +export function validateDataColumnsSidecars( + _blockSlot: Slot, + _blockRoot: Root, + _expectedKzgCommitments: deneb.BlobKzgCommitments, + _dataColumnSidecars: electra.DataColumnSidecars, + _opts: {skipProofsCheck: boolean} = {skipProofsCheck: false} +): void { + // stubbed + return; +} + +function validateInclusionProof(dataColumnSidecar: electra.DataColumnSidecar): boolean { + return verifyMerkleBranch( + ssz.deneb.BlobKzgCommitments.hashTreeRoot(dataColumnSidecar.kzgCommitments), + dataColumnSidecar.kzgCommitmentsInclusionProof, + KZG_COMMITMENTS_INCLUSION_PROOF_DEPTH, + KZG_COMMITMENTS_SUBTREE_INDEX, + dataColumnSidecar.signedBlockHeader.message.bodyRoot + ); +} diff --git a/packages/beacon-node/src/db/beacon.ts b/packages/beacon-node/src/db/beacon.ts index 07cc47fa54d8..41b6daccd42d 100644 --- a/packages/beacon-node/src/db/beacon.ts +++ b/packages/beacon-node/src/db/beacon.ts @@ -18,6 +18,8 @@ import { BackfilledRanges, BlobSidecarsRepository, BlobSidecarsArchiveRepository, + DataColumnSidecarsRepository, + DataColumnSidecarsArchiveRepository, BLSToExecutionChangeRepository, } from "./repositories/index.js"; import {PreGenesisState, PreGenesisStateLastProcessedBlock} from "./single/index.js"; @@ -34,6 +36,8 @@ export class BeaconDb implements IBeaconDb { blobSidecars: BlobSidecarsRepository; blobSidecarsArchive: BlobSidecarsArchiveRepository; + dataColumnSidecars: DataColumnSidecarsRepository; + dataColumnSidecarsArchive: DataColumnSidecarsArchiveRepository; stateArchive: StateArchiveRepository; checkpointState: CheckpointStateRepository; @@ -67,6 +71,8 @@ export class BeaconDb implements IBeaconDb { this.blobSidecars = new BlobSidecarsRepository(config, db); this.blobSidecarsArchive = new BlobSidecarsArchiveRepository(config, db); + this.dataColumnSidecars = new DataColumnSidecarsRepository(config, db); + this.dataColumnSidecarsArchive = new DataColumnSidecarsArchiveRepository(config, db); this.stateArchive = new StateArchiveRepository(config, db); this.checkpointState = new CheckpointStateRepository(config, db); diff --git a/packages/beacon-node/src/db/buckets.ts b/packages/beacon-node/src/db/buckets.ts index 9dffd0608d52..5f6a08df18b7 100644 --- a/packages/beacon-node/src/db/buckets.ts +++ b/packages/beacon-node/src/db/buckets.ts @@ -61,6 +61,9 @@ export enum Bucket { // 54 was for bestPartialLightClientUpdate, allocate a fresh one // lightClient_bestLightClientUpdate = 55, // SyncPeriod -> LightClientUpdate // DEPRECATED on v1.5.0 lightClient_bestLightClientUpdate = 56, // SyncPeriod -> [Slot, LightClientUpdate] + + allForks_dataColumnSidecars = 57, // ELECTRA BeaconBlockRoot -> DataColumnSidecars + allForks_dataColumnSidecarsArchive = 58, // ELECTRA BeaconBlockSlot -> DataColumnSidecars } export function getBucketNameByValue(enumValue: T): keyof typeof Bucket { diff --git a/packages/beacon-node/src/db/interface.ts b/packages/beacon-node/src/db/interface.ts index 6ffb8992f635..cf55d3d95a44 100644 --- a/packages/beacon-node/src/db/interface.ts +++ b/packages/beacon-node/src/db/interface.ts @@ -16,6 +16,8 @@ import { BackfilledRanges, BlobSidecarsRepository, BlobSidecarsArchiveRepository, + DataColumnSidecarsRepository, + DataColumnSidecarsArchiveRepository, BLSToExecutionChangeRepository, } from "./repositories/index.js"; import {PreGenesisState, PreGenesisStateLastProcessedBlock} from "./single/index.js"; @@ -34,6 +36,8 @@ export interface IBeaconDb { blobSidecars: BlobSidecarsRepository; blobSidecarsArchive: BlobSidecarsArchiveRepository; + dataColumnSidecars: DataColumnSidecarsRepository; + dataColumnSidecarsArchive: DataColumnSidecarsArchiveRepository; // finalized states stateArchive: StateArchiveRepository; diff --git a/packages/beacon-node/src/db/repositories/dataColumnSidecars.ts b/packages/beacon-node/src/db/repositories/dataColumnSidecars.ts new file mode 100644 index 000000000000..f41afd7a4195 --- /dev/null +++ b/packages/beacon-node/src/db/repositories/dataColumnSidecars.ts @@ -0,0 +1,50 @@ +import {ValueOf, ContainerType, ByteVectorType} from "@chainsafe/ssz"; +import {ChainForkConfig} from "@lodestar/config"; +import {Db, Repository} from "@lodestar/db"; +import {ssz} from "@lodestar/types"; +import {NUMBER_OF_COLUMNS} from "@lodestar/params"; + +import {Bucket, getBucketNameByValue} from "../buckets.js"; + +export const dataColumnSidecarsWrapperSsz = new ContainerType( + { + blockRoot: ssz.Root, + slot: ssz.Slot, + numColumns: ssz.Uint16, + columnsSize: ssz.UintNum64, + // // each byte[i] tells what index (1 based) the column i is stored, 0 means not custodied + // max value to represent will be 128 which can be represented in a byte + custodyColumns: new ByteVectorType(NUMBER_OF_COLUMNS), + dataColumnSidecars: ssz.electra.DataColumnSidecars, + }, + {typeName: "DataColumnSidecarsWrapper", jsonCase: "eth2"} +); + +export type DataColumnSidecarsWrapper = ValueOf; +export const BLOCK_ROOT_IN_WRAPPER_INDEX = 0; +export const BLOCK_SLOT_IN_WRAPPER_INDEX = 32; +export const NUM_COLUMNS_IN_WRAPPER_INDEX = 40; +export const COLUMN_SIZE_IN_WRAPPER_INDEX = 42; +export const CUSTODY_COLUMNS_IN_IN_WRAPPER_INDEX = 50; +export const DATA_COLUMN_SIDECARS_IN_WRAPPER_INDEX = + CUSTODY_COLUMNS_IN_IN_WRAPPER_INDEX + NUMBER_OF_COLUMNS + 4 + 4 * NUMBER_OF_COLUMNS; + +/** + * dataColumnSidecarsWrapper by block root (= hash_tree_root(SignedBeaconBlock.message)) + * + * Used to store unfinalized DataColumnSidecars + */ +export class DataColumnSidecarsRepository extends Repository { + constructor(config: ChainForkConfig, db: Db) { + const bucket = Bucket.allForks_dataColumnSidecars; + super(config, db, bucket, dataColumnSidecarsWrapperSsz, getBucketNameByValue(bucket)); + } + + /** + * Id is hashTreeRoot of unsigned BeaconBlock + */ + getId(value: DataColumnSidecarsWrapper): Uint8Array { + const {blockRoot} = value; + return blockRoot; + } +} diff --git a/packages/beacon-node/src/db/repositories/dataColumnSidecarsArchive.ts b/packages/beacon-node/src/db/repositories/dataColumnSidecarsArchive.ts new file mode 100644 index 000000000000..08a71dcbf646 --- /dev/null +++ b/packages/beacon-node/src/db/repositories/dataColumnSidecarsArchive.ts @@ -0,0 +1,28 @@ +import {ChainForkConfig} from "@lodestar/config"; +import {Db, Repository} from "@lodestar/db"; +import {Slot} from "@lodestar/types"; +import {bytesToInt} from "@lodestar/utils"; +import {Bucket, getBucketNameByValue} from "../buckets.js"; +import {dataColumnSidecarsWrapperSsz, DataColumnSidecarsWrapper} from "./dataColumnSidecars.js"; + +/** + * dataColumnSidecarsWrapper by slot + * + * Used to store finalized DataColumnSidecars + */ +export class DataColumnSidecarsArchiveRepository extends Repository { + constructor(config: ChainForkConfig, db: Db) { + const bucket = Bucket.allForks_dataColumnSidecarsArchive; + super(config, db, bucket, dataColumnSidecarsWrapperSsz, getBucketNameByValue(bucket)); + } + + // Handle key as slot + + getId(value: DataColumnSidecarsWrapper): Slot { + return value.slot; + } + + decodeKey(data: Uint8Array): number { + return bytesToInt(super.decodeKey(data) as unknown as Uint8Array, "be"); + } +} diff --git a/packages/beacon-node/src/db/repositories/index.ts b/packages/beacon-node/src/db/repositories/index.ts index 4a66a0ba9876..72e0c7224148 100644 --- a/packages/beacon-node/src/db/repositories/index.ts +++ b/packages/beacon-node/src/db/repositories/index.ts @@ -1,5 +1,7 @@ export {BlobSidecarsRepository} from "./blobSidecars.js"; export {BlobSidecarsArchiveRepository} from "./blobSidecarsArchive.js"; +export {DataColumnSidecarsRepository} from "./dataColumnSidecars.js"; +export {DataColumnSidecarsArchiveRepository} from "./dataColumnSidecarsArchive.js"; export {BlockRepository} from "./block.js"; export {BlockArchiveRepository} from "./blockArchive.js"; diff --git a/packages/beacon-node/src/network/events.ts b/packages/beacon-node/src/network/events.ts index 45759de61073..49b64f78d5f1 100644 --- a/packages/beacon-node/src/network/events.ts +++ b/packages/beacon-node/src/network/events.ts @@ -7,6 +7,7 @@ import {PeerIdStr} from "../util/peerId.js"; import {EventDirection} from "../util/workerEvents.js"; import {RequestTypedContainer} from "./reqresp/ReqRespBeaconNode.js"; import {PendingGossipsubMessage} from "./processor/types.js"; +import {NodeId} from "./subnets/interface.js"; export enum NetworkEvent { /** A relevant peer has connected or has been re-STATUS'd */ @@ -27,7 +28,7 @@ export enum NetworkEvent { } export type NetworkEventData = { - [NetworkEvent.peerConnected]: {peer: PeerIdStr; status: phase0.Status}; + [NetworkEvent.peerConnected]: {nodeId: NodeId; peer: PeerIdStr; status: phase0.Status}; [NetworkEvent.peerDisconnected]: {peer: PeerIdStr}; [NetworkEvent.reqRespRequest]: {request: RequestTypedContainer; peer: PeerId}; [NetworkEvent.unknownBlockParent]: {blockInput: BlockInput; peer: PeerIdStr}; diff --git a/packages/beacon-node/src/network/gossip/interface.ts b/packages/beacon-node/src/network/gossip/interface.ts index df26c2328c70..88d1d8d40776 100644 --- a/packages/beacon-node/src/network/gossip/interface.ts +++ b/packages/beacon-node/src/network/gossip/interface.ts @@ -2,7 +2,7 @@ import {Libp2p} from "libp2p"; import {Message, TopicValidatorResult} from "@libp2p/interface"; import {PeerIdStr} from "@chainsafe/libp2p-gossipsub/types"; import {ForkName} from "@lodestar/params"; -import {allForks, altair, capella, deneb, phase0, Slot} from "@lodestar/types"; +import {allForks, altair, capella, deneb, phase0, Slot, electra} from "@lodestar/types"; import {BeaconConfig} from "@lodestar/config"; import {Logger} from "@lodestar/utils"; import {IBeaconChain} from "../../chain/index.js"; @@ -13,6 +13,7 @@ import {GossipActionError} from "../../chain/errors/gossipValidation.js"; export enum GossipType { beacon_block = "beacon_block", blob_sidecar = "blob_sidecar", + data_column_sidecar = "data_column_sidecar", beacon_aggregate_and_proof = "beacon_aggregate_and_proof", beacon_attestation = "beacon_attestation", voluntary_exit = "voluntary_exit", @@ -41,6 +42,7 @@ export interface IGossipTopic { export type GossipTopicTypeMap = { [GossipType.beacon_block]: {type: GossipType.beacon_block}; [GossipType.blob_sidecar]: {type: GossipType.blob_sidecar; index: number}; + [GossipType.data_column_sidecar]: {type: GossipType.data_column_sidecar; index: number}; [GossipType.beacon_aggregate_and_proof]: {type: GossipType.beacon_aggregate_and_proof}; [GossipType.beacon_attestation]: {type: GossipType.beacon_attestation; subnet: number}; [GossipType.voluntary_exit]: {type: GossipType.voluntary_exit}; @@ -71,6 +73,7 @@ export type SSZTypeOfGossipTopic = T extends {type: infer export type GossipTypeMap = { [GossipType.beacon_block]: allForks.SignedBeaconBlock; [GossipType.blob_sidecar]: deneb.BlobSidecar; + [GossipType.data_column_sidecar]: electra.DataColumnSidecar; [GossipType.beacon_aggregate_and_proof]: phase0.SignedAggregateAndProof; [GossipType.beacon_attestation]: phase0.Attestation; [GossipType.voluntary_exit]: phase0.SignedVoluntaryExit; @@ -86,6 +89,7 @@ export type GossipTypeMap = { export type GossipFnByType = { [GossipType.beacon_block]: (signedBlock: allForks.SignedBeaconBlock) => Promise | void; [GossipType.blob_sidecar]: (blobSidecar: deneb.BlobSidecar) => Promise | void; + [GossipType.data_column_sidecar]: (blobSidecar: electra.DataColumnSidecar) => Promise | void; [GossipType.beacon_aggregate_and_proof]: (aggregateAndProof: phase0.SignedAggregateAndProof) => Promise | void; [GossipType.beacon_attestation]: (attestation: phase0.Attestation) => Promise | void; [GossipType.voluntary_exit]: (voluntaryExit: phase0.SignedVoluntaryExit) => Promise | void; diff --git a/packages/beacon-node/src/network/gossip/topic.ts b/packages/beacon-node/src/network/gossip/topic.ts index c5cd68ffa1de..7896d9abed25 100644 --- a/packages/beacon-node/src/network/gossip/topic.ts +++ b/packages/beacon-node/src/network/gossip/topic.ts @@ -7,6 +7,7 @@ import { SYNC_COMMITTEE_SUBNET_COUNT, isForkLightClient, MAX_BLOBS_PER_BLOCK, + DATA_COLUMN_SIDECAR_SUBNET_COUNT, } from "@lodestar/params"; import {GossipAction, GossipActionError, GossipErrorCode} from "../../chain/errors/gossipValidation.js"; @@ -75,6 +76,8 @@ function stringifyGossipTopicType(topic: GossipTopic): string { return `${topic.type}_${topic.subnet}`; case GossipType.blob_sidecar: return `${topic.type}_${topic.index}`; + case GossipType.data_column_sidecar: + return `${topic.type}_${topic.index}`; } } @@ -86,6 +89,8 @@ export function getGossipSSZType(topic: GossipTopic) { return ssz[topic.fork].SignedBeaconBlock; case GossipType.blob_sidecar: return ssz.deneb.BlobSidecar; + case GossipType.data_column_sidecar: + return ssz.electra.DataColumnSidecar; case GossipType.beacon_aggregate_and_proof: return ssz.phase0.SignedAggregateAndProof; case GossipType.beacon_attestation: @@ -189,6 +194,13 @@ export function parseGossipTopic(forkDigestContext: ForkDigestContext, topicStr: return {type: GossipType.blob_sidecar, index, fork, encoding}; } + if (gossipTypeStr.startsWith(GossipType.data_column_sidecar)) { + const indexStr = gossipTypeStr.slice(GossipType.data_column_sidecar.length + 1); // +1 for '_' concatenating the topic name and the index + const index = parseInt(indexStr, 10); + if (Number.isNaN(index)) throw Error(`index ${indexStr} is not a number`); + return {type: GossipType.data_column_sidecar, index, fork, encoding}; + } + throw Error(`Unknown gossip type ${gossipTypeStr}`); } catch (e) { (e as Error).message = `Invalid gossip topic ${topicStr}: ${(e as Error).message}`; @@ -212,6 +224,13 @@ export function getCoreTopicsAtFork( {type: GossipType.attester_slashing}, ]; + // After Electra also track data_column_sidecar_{index} + if (ForkSeq[fork] >= ForkSeq.electra) { + for (let index = 0; index < DATA_COLUMN_SIDECAR_SUBNET_COUNT; index++) { + topics.push({type: GossipType.data_column_sidecar, index}); + } + } + // After Deneb also track blob_sidecar_{index} if (ForkSeq[fork] >= ForkSeq.deneb) { for (let index = 0; index < MAX_BLOBS_PER_BLOCK; index++) { @@ -262,6 +281,7 @@ function parseEncodingStr(encodingStr: string): GossipEncoding { export const gossipTopicIgnoreDuplicatePublishError: Record = { [GossipType.beacon_block]: true, [GossipType.blob_sidecar]: true, + [GossipType.data_column_sidecar]: true, [GossipType.beacon_aggregate_and_proof]: true, [GossipType.beacon_attestation]: true, [GossipType.voluntary_exit]: true, diff --git a/packages/beacon-node/src/network/interface.ts b/packages/beacon-node/src/network/interface.ts index aeeb61f1feb2..2c2783ccff9b 100644 --- a/packages/beacon-node/src/network/interface.ts +++ b/packages/beacon-node/src/network/interface.ts @@ -16,13 +16,14 @@ import { import type {AddressManager, ConnectionManager, Registrar, TransportManager} from "@libp2p/interface-internal"; import type {Datastore} from "interface-datastore"; import {Identify} from "@chainsafe/libp2p-identify"; -import {Slot, SlotRootHex, allForks, altair, capella, deneb, phase0} from "@lodestar/types"; +import {Slot, SlotRootHex, allForks, altair, capella, deneb, phase0, electra} from "@lodestar/types"; import {PeerIdStr} from "../util/peerId.js"; import {INetworkEventBus} from "./events.js"; import {INetworkCorePublic} from "./core/types.js"; import {GossipType} from "./gossip/interface.js"; import {PendingGossipsubMessage} from "./processor/types.js"; import {PeerAction} from "./peers/index.js"; +import {NodeId} from "./subnets/interface.js"; export type WithBytes = {data: T; bytes: Uint8Array}; @@ -36,6 +37,8 @@ export type WithBytes = {data: T; bytes: Uint8Array}; */ export interface INetwork extends INetworkCorePublic { + readonly nodeId: NodeId; + readonly peerId: PeerId; readonly closed: boolean; events: INetworkEventBus; @@ -57,10 +60,19 @@ export interface INetwork extends INetworkCorePublic { ): Promise[]>; sendBlobSidecarsByRange(peerId: PeerIdStr, request: deneb.BlobSidecarsByRangeRequest): Promise; sendBlobSidecarsByRoot(peerId: PeerIdStr, request: deneb.BlobSidecarsByRootRequest): Promise; + sendDataColumnSidecarsByRange( + peerId: PeerIdStr, + request: electra.DataColumnSidecarsByRangeRequest + ): Promise; + sendDataColumnSidecarsByRoot( + peerId: PeerIdStr, + request: electra.DataColumnSidecarsByRootRequest + ): Promise; // Gossip publishBeaconBlock(signedBlock: allForks.SignedBeaconBlock): Promise; publishBlobSidecar(blobSidecar: deneb.BlobSidecar): Promise; + publishDataColumnSidecar(dataColumnSideCar: electra.DataColumnSidecar): Promise; publishBeaconAggregateAndProof(aggregateAndProof: phase0.SignedAggregateAndProof): Promise; publishBeaconAttestation(attestation: phase0.Attestation, subnet: number): Promise; publishVoluntaryExit(voluntaryExit: phase0.SignedVoluntaryExit): Promise; diff --git a/packages/beacon-node/src/network/metadata.ts b/packages/beacon-node/src/network/metadata.ts index fab220c1ebf8..6c3cd92a69d3 100644 --- a/packages/beacon-node/src/network/metadata.ts +++ b/packages/beacon-node/src/network/metadata.ts @@ -11,6 +11,7 @@ export enum ENRKey { eth2 = "eth2", attnets = "attnets", syncnets = "syncnets", + custody_subnet_count = "custody_subnet_count", } export enum SubnetType { attnets = "attnets", diff --git a/packages/beacon-node/src/network/network.ts b/packages/beacon-node/src/network/network.ts index be8bb5114d40..4190e334db12 100644 --- a/packages/beacon-node/src/network/network.ts +++ b/packages/beacon-node/src/network/network.ts @@ -5,10 +5,10 @@ import {BeaconConfig} from "@lodestar/config"; import {sleep} from "@lodestar/utils"; import {LoggerNode} from "@lodestar/logger/node"; import {computeStartSlotAtEpoch, computeTimeAtSlot} from "@lodestar/state-transition"; -import {phase0, allForks, deneb, altair, Root, capella, SlotRootHex} from "@lodestar/types"; +import {phase0, allForks, deneb, altair, Root, capella, SlotRootHex, electra} from "@lodestar/types"; import {routes} from "@lodestar/api"; import {ResponseIncoming} from "@lodestar/reqresp"; -import {ForkSeq, MAX_BLOBS_PER_BLOCK} from "@lodestar/params"; +import {ForkSeq, MAX_BLOBS_PER_BLOCK, NUMBER_OF_COLUMNS, DATA_COLUMN_SIDECAR_SUBNET_COUNT} from "@lodestar/params"; import {Metrics, RegistryMetricCreator} from "../metrics/index.js"; import {IBeaconChain} from "../chain/index.js"; import {IBeaconDb} from "../db/interface.js"; @@ -20,7 +20,7 @@ import {ReqRespMethod} from "./reqresp/index.js"; import {GossipHandlers, GossipTopicMap, GossipType, GossipTypeMap} from "./gossip/index.js"; import {PeerAction, PeerScoreStats} from "./peers/index.js"; import {INetworkEventBus, NetworkEvent, NetworkEventBus, NetworkEventData} from "./events.js"; -import {CommitteeSubscription} from "./subnets/index.js"; +import {CommitteeSubscription, NodeId} from "./subnets/index.js"; import {isPublishToZeroPeersError} from "./util.js"; import {NetworkProcessor, PendingGossipsubMessage} from "./processor/index.js"; import {INetworkCore, NetworkCore, WorkerNetworkCore} from "./core/index.js"; @@ -38,6 +38,7 @@ import {getActiveForks} from "./forks.js"; type NetworkModules = { opts: NetworkOptions; peerId: PeerId; + nodeId: NodeId; config: BeaconConfig; logger: LoggerNode; chain: IBeaconChain; @@ -51,6 +52,7 @@ export type NetworkInitModules = { opts: NetworkOptions; config: BeaconConfig; peerId: PeerId; + nodeId: NodeId; peerStoreDir?: string; logger: LoggerNode; metrics: Metrics | null; @@ -70,6 +72,7 @@ export type NetworkInitModules = { * - libp2p in main thread */ export class Network implements INetwork { + readonly nodeId: NodeId; readonly peerId: PeerId; // TODO: Make private readonly events: INetworkEventBus; @@ -87,10 +90,11 @@ export class Network implements INetwork { private readonly aggregatorTracker: AggregatorTracker; private subscribedToCoreTopics = false; - private connectedPeers = new Set(); + private connectedPeers = new Map(); private regossipBlsChangesPromise: Promise | null = null; constructor(modules: NetworkModules) { + this.nodeId = modules.nodeId; this.peerId = modules.peerId; this.config = modules.config; this.logger = modules.logger; @@ -122,6 +126,7 @@ export class Network implements INetwork { db, gossipHandlers, peerId, + nodeId, peerStoreDir, getReqRespHandler, }: NetworkInitModules): Promise { @@ -177,6 +182,7 @@ export class Network implements INetwork { return new Network({ opts, peerId, + nodeId, config, logger, chain, @@ -245,7 +251,7 @@ export class Network implements INetwork { // REST API queries getConnectedPeers(): PeerIdStr[] { - return Array.from(this.connectedPeers.values()); + return Array.from(this.connectedPeers.keys()); } getConnectedPeerCount(): number { return this.connectedPeers.size; @@ -304,6 +310,20 @@ export class Network implements INetwork { }); } + async publishDataColumnSidecar(dataColumnSidecar: electra.DataColumnSidecar): Promise { + const slot = dataColumnSidecar.signedBlockHeader.message.slot; + const fork = this.config.getForkName(slot); + const index = dataColumnSidecar.index % DATA_COLUMN_SIDECAR_SUBNET_COUNT; + + return this.publishGossip( + {type: GossipType.data_column_sidecar, fork, index}, + dataColumnSidecar, + { + ignoreDuplicatePublishError: true, + } + ); + } + async publishBeaconAggregateAndProof(aggregateAndProof: phase0.SignedAggregateAndProof): Promise { const fork = this.config.getForkName(aggregateAndProof.message.aggregate.data.slot); return this.publishGossip( @@ -504,6 +524,29 @@ export class Network implements INetwork { ); } + async sendDataColumnSidecarsByRange( + peerId: PeerIdStr, + request: electra.DataColumnSidecarsByRangeRequest + ): Promise { + return collectMaxResponseTyped( + this.sendReqRespRequest(peerId, ReqRespMethod.DataColumnSidecarsByRange, [Version.V1], request), + // request's count represent the slots, so the actual max count received could be slots * blobs per slot + request.count * NUMBER_OF_COLUMNS, + responseSszTypeByMethod[ReqRespMethod.DataColumnSidecarsByRange] + ); + } + + async sendDataColumnSidecarsByRoot( + peerId: PeerIdStr, + request: electra.DataColumnSidecarsByRootRequest + ): Promise { + return collectMaxResponseTyped( + this.sendReqRespRequest(peerId, ReqRespMethod.DataColumnSidecarsByRoot, [Version.V1], request), + request.length, + responseSszTypeByMethod[ReqRespMethod.DataColumnSidecarsByRoot] + ); + } + private sendReqRespRequest( peerId: PeerIdStr, method: ReqRespMethod, @@ -618,7 +661,7 @@ export class Network implements INetwork { }; private onPeerConnected = (data: NetworkEventData[NetworkEvent.peerConnected]): void => { - this.connectedPeers.add(data.peer); + this.connectedPeers.set(data.peer, data.nodeId); }; private onPeerDisconnected = (data: NetworkEventData[NetworkEvent.peerDisconnected]): void => { diff --git a/packages/beacon-node/src/network/peers/discover.ts b/packages/beacon-node/src/network/peers/discover.ts index 1cb084846f61..39e76f84f0e4 100644 --- a/packages/beacon-node/src/network/peers/discover.ts +++ b/packages/beacon-node/src/network/peers/discover.ts @@ -1,16 +1,20 @@ import {Multiaddr} from "@multiformats/multiaddr"; import type {PeerId, PeerInfo} from "@libp2p/interface"; import {ENR} from "@chainsafe/enr"; +import {fromHexString} from "@chainsafe/ssz"; import {BeaconConfig} from "@lodestar/config"; import {pruneSetToMax, sleep} from "@lodestar/utils"; import {ATTESTATION_SUBNET_COUNT, SYNC_COMMITTEE_SUBNET_COUNT} from "@lodestar/params"; import {LoggerNode} from "@lodestar/logger/node"; +import {ssz} from "@lodestar/types"; import {NetworkCoreMetrics} from "../core/metrics.js"; import {Libp2p} from "../interface.js"; import {ENRKey, SubnetType} from "../metadata.js"; import {getConnectionsMap, prettyPrintPeerId} from "../util.js"; import {Discv5Worker} from "../discv5/index.js"; import {LodestarDiscv5Opts} from "../discv5/types.js"; +import {NodeId} from "../subnets/interface.js"; +import {getCustodyColumnSubnets} from "../../util/dataColumns.js"; import {deserializeEnrSubnets, zeroAttnets, zeroSyncnets} from "./utils/enrSubnetsDeserialize.js"; import {IPeerRpcScoreStore, ScoreState} from "./score/index.js"; @@ -19,6 +23,8 @@ const MAX_CACHED_ENRS = 100; /** Max age a cached ENR will be considered for dial */ const MAX_CACHED_ENR_AGE_MS = 5 * 60 * 1000; +const MAX_CACHED_NODEIDS = 10000; + export type PeerDiscoveryOpts = { maxPeers: number; discv5FirstQueryDelayMs: number; @@ -76,6 +82,7 @@ type CachedENR = { multiaddrTCP: Multiaddr; subnets: Record; addedUnixMs: number; + custodySubnetCount: number; }; /** @@ -85,11 +92,15 @@ type CachedENR = { export class PeerDiscovery { readonly discv5: Discv5Worker; private libp2p: Libp2p; + private nodeId: NodeId; + private custodySubnets: number[]; private peerRpcScores: IPeerRpcScoreStore; private metrics: NetworkCoreMetrics | null; private logger: LoggerNode; private config: BeaconConfig; private cachedENRs = new Map(); + private peerIdToNodeId = new Map(); + private peerIdToCustodySubnetCount = new Map(); private randomNodeQuery: QueryStatus = {code: QueryStatusCode.NotActive}; private peersToConnect = 0; private subnetRequests: Record> = { @@ -112,6 +123,10 @@ export class PeerDiscovery { this.logger = logger; this.config = config; this.discv5 = discv5; + this.nodeId = fromHexString(ENR.decodeTxt(opts.discv5.enr).nodeId); + // we will only connect to peers that can provide us custody + this.custodySubnets = getCustodyColumnSubnets(this.nodeId, config.CUSTODY_REQUIREMENT); + this.maxPeers = opts.maxPeers; this.discv5StartMs = 0; this.discv5StartMs = Date.now(); @@ -304,7 +319,9 @@ export class PeerDiscovery { const attnets = zeroAttnets; const syncnets = zeroSyncnets; - const status = this.handleDiscoveredPeer(id, multiaddrs[0], attnets, syncnets); + const custodySubnetCount = 0; + + const status = this.handleDiscoveredPeer(id, multiaddrs[0], attnets, syncnets, custodySubnetCount); this.metrics?.discovery.discoveredStatus.inc({status}); }; @@ -317,6 +334,11 @@ export class PeerDiscovery { } // async due to some crypto that's no longer necessary const peerId = await enr.peerId(); + + const nodeId = fromHexString(enr.nodeId); + this.peerIdToNodeId.set(peerId.toString(), nodeId); + pruneSetToMax(this.peerIdToNodeId, MAX_CACHED_NODEIDS); + // tcp multiaddr is known to be be present, checked inside the worker const multiaddrTCP = enr.getLocationMultiaddr(ENRKey.tcp); if (!multiaddrTCP) { @@ -327,6 +349,7 @@ export class PeerDiscovery { // Are this fields mandatory? const attnetsBytes = enr.kvs.get(ENRKey.attnets); // 64 bits const syncnetsBytes = enr.kvs.get(ENRKey.syncnets); // 4 bits + const custodySubnetCountBytes = enr.kvs.get(ENRKey.custody_subnet_count); // 64 bits // Use faster version than ssz's implementation that leverages pre-cached. // Some nodes don't serialize the bitfields properly, encoding the syncnets as attnets, @@ -334,8 +357,10 @@ export class PeerDiscovery { // never throw and treat too long or too short bitfields as zero-ed const attnets = attnetsBytes ? deserializeEnrSubnets(attnetsBytes, ATTESTATION_SUBNET_COUNT) : zeroAttnets; const syncnets = syncnetsBytes ? deserializeEnrSubnets(syncnetsBytes, SYNC_COMMITTEE_SUBNET_COUNT) : zeroSyncnets; + const custodySubnetCount = custodySubnetCountBytes ? ssz.UintNum64.deserialize(custodySubnetCountBytes) : 1; + this.peerIdToCustodySubnetCount.set(peerId.toString(), custodySubnetCount); - const status = this.handleDiscoveredPeer(peerId, multiaddrTCP, attnets, syncnets); + const status = this.handleDiscoveredPeer(peerId, multiaddrTCP, attnets, syncnets, custodySubnetCount); this.metrics?.discovery.discoveredStatus.inc({status}); }; @@ -346,7 +371,8 @@ export class PeerDiscovery { peerId: PeerId, multiaddrTCP: Multiaddr, attnets: boolean[], - syncnets: boolean[] + syncnets: boolean[], + custodySubnetCount: number ): DiscoveredPeerStatus { try { // Check if peer is not banned or disconnected @@ -374,6 +400,7 @@ export class PeerDiscovery { multiaddrTCP, subnets: {attnets, syncnets}, addedUnixMs: Date.now(), + custodySubnetCount, }; // Only dial peer if necessary @@ -394,6 +421,17 @@ export class PeerDiscovery { } private shouldDialPeer(peer: CachedENR): boolean { + const nodeId = this.peerIdToNodeId.get(peer.peerId.toString()); + if (nodeId === undefined) { + return false; + } + const peerCustodySubnetCount = peer.custodySubnetCount; + const peerCustodySubnets = getCustodyColumnSubnets(nodeId, peerCustodySubnetCount); + const hasAllColumns = this.custodySubnets.reduce((acc, elem) => acc && peerCustodySubnets.includes(elem), true); + if (!hasAllColumns) { + return false; + } + for (const type of [SubnetType.attnets, SubnetType.syncnets]) { for (const [subnet, {toUnixMs, peersToConnect}] of this.subnetRequests[type].entries()) { if (toUnixMs < Date.now() || peersToConnect === 0) { diff --git a/packages/beacon-node/src/network/peers/peerManager.ts b/packages/beacon-node/src/network/peers/peerManager.ts index 5149b129c363..7139b45751a0 100644 --- a/packages/beacon-node/src/network/peers/peerManager.ts +++ b/packages/beacon-node/src/network/peers/peerManager.ts @@ -376,7 +376,11 @@ export class PeerManager { peerData.relevantStatus = RelevantPeerStatus.relevant; } if (getConnection(this.libp2p, peer.toString())) { - this.networkEventBus.emit(NetworkEvent.peerConnected, {peer: peer.toString(), status}); + const nodeId = peerData?.nodeId ?? this.discovery?.["peerIdToNodeId"].get(peer.toString()); + + if (nodeId !== undefined) { + this.networkEventBus.emit(NetworkEvent.peerConnected, {nodeId, peer: peer.toString(), status}); + } } } @@ -586,6 +590,7 @@ export class PeerManager { // NOTE: libp2p may emit two "peer:connect" events: One for inbound, one for outbound // If that happens, it's okay. Only the "outbound" connection triggers immediate action const now = Date.now(); + const nodeId = this.discovery?.["peerIdToNodeId"].get(remotePeer.toString()) ?? null; const peerData: PeerData = { lastReceivedMsgUnixTsMs: direction === "outbound" ? 0 : now, // If inbound, request after STATUS_INBOUND_GRACE_PERIOD @@ -593,6 +598,7 @@ export class PeerManager { connectedUnixTsMs: now, relevantStatus: RelevantPeerStatus.Unknown, direction, + nodeId, peerId: remotePeer, metadata: null, agentVersion: null, diff --git a/packages/beacon-node/src/network/peers/peersData.ts b/packages/beacon-node/src/network/peers/peersData.ts index 4f96548c73e4..366acd9ca461 100644 --- a/packages/beacon-node/src/network/peers/peersData.ts +++ b/packages/beacon-node/src/network/peers/peersData.ts @@ -1,6 +1,7 @@ import {PeerId} from "@libp2p/interface"; import {altair} from "@lodestar/types"; import {Encoding} from "@lodestar/reqresp"; +import {NodeId} from "../subnets/interface.js"; import {ClientKind} from "./client.js"; type PeerIdStr = string; @@ -18,6 +19,7 @@ export type PeerData = { relevantStatus: RelevantPeerStatus; direction: "inbound" | "outbound"; peerId: PeerId; + nodeId: NodeId | null; metadata: altair.Metadata | null; agentVersion: string | null; agentClient: ClientKind | null; diff --git a/packages/beacon-node/src/network/processor/extractSlotRootFns.ts b/packages/beacon-node/src/network/processor/extractSlotRootFns.ts index d31cb3e2d7f9..2c8d7462b121 100644 --- a/packages/beacon-node/src/network/processor/extractSlotRootFns.ts +++ b/packages/beacon-node/src/network/processor/extractSlotRootFns.ts @@ -6,6 +6,7 @@ import { getSlotFromSignedAggregateAndProofSerialized, getSlotFromBlobSidecarSerialized, getSlotFromSignedBeaconBlockSerialized, + getSlotFromDataColumnSidecarSerialized, } from "../../util/sszBytes.js"; import {GossipType} from "../gossip/index.js"; import {ExtractSlotRootFns} from "./types.js"; @@ -45,6 +46,14 @@ export function createExtractBlockSlotRootFns(): ExtractSlotRootFns { [GossipType.blob_sidecar]: (data: Uint8Array): SlotOptionalRoot | null => { const slot = getSlotFromBlobSidecarSerialized(data); + if (slot === null) { + return null; + } + return {slot}; + }, + [GossipType.data_column_sidecar]: (data: Uint8Array): SlotOptionalRoot | null => { + const slot = getSlotFromDataColumnSidecarSerialized(data); + if (slot === null) { return null; } diff --git a/packages/beacon-node/src/network/processor/gossipHandlers.ts b/packages/beacon-node/src/network/processor/gossipHandlers.ts index 1a71cc7de334..36fa294e6610 100644 --- a/packages/beacon-node/src/network/processor/gossipHandlers.ts +++ b/packages/beacon-node/src/network/processor/gossipHandlers.ts @@ -1,7 +1,7 @@ import {toHexString} from "@chainsafe/ssz"; import {BeaconConfig, ChainForkConfig} from "@lodestar/config"; import {LogLevel, Logger, prettyBytes} from "@lodestar/utils"; -import {Root, Slot, ssz, allForks, deneb, UintNum64} from "@lodestar/types"; +import {Root, Slot, ssz, allForks, deneb, UintNum64, electra} from "@lodestar/types"; import {ForkName, ForkSeq} from "@lodestar/params"; import {routes} from "@lodestar/api"; import {computeTimeAtSlot} from "@lodestar/state-transition"; @@ -15,6 +15,8 @@ import { BlockGossipError, BlobSidecarErrorCode, BlobSidecarGossipError, + DataColumnSidecarGossipError, + DataColumnSidecarErrorCode, GossipAction, GossipActionError, SyncCommitteeError, @@ -46,6 +48,7 @@ import {PeerAction} from "../peers/index.js"; import {validateLightClientFinalityUpdate} from "../../chain/validation/lightClientFinalityUpdate.js"; import {validateLightClientOptimisticUpdate} from "../../chain/validation/lightClientOptimisticUpdate.js"; import {validateGossipBlobSidecar} from "../../chain/validation/blobSidecar.js"; +import {validateGossipDataColumnSidecar} from "../../chain/validation/dataColumnSidecar.js"; import { BlockInput, GossipedInputType, @@ -252,6 +255,74 @@ function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandler } } + async function validateBeaconDataColumn( + dataColumnSidecar: electra.DataColumnSidecar, + dataColumnBytes: Uint8Array, + gossipIndex: number, + peerIdStr: string, + seenTimestampSec: number + ): Promise { + const dataColumnBlockHeader = dataColumnSidecar.signedBlockHeader.message; + const slot = dataColumnBlockHeader.slot; + const blockRoot = ssz.phase0.BeaconBlockHeader.hashTreeRoot(dataColumnBlockHeader); + const blockHex = prettyBytes(blockRoot); + + const delaySec = chain.clock.secFromSlot(slot, seenTimestampSec); + const recvToValLatency = Date.now() / 1000 - seenTimestampSec; + + const {blockInput, blockInputMeta} = chain.seenGossipBlockInput.getGossipBlockInput( + config, + { + type: GossipedInputType.dataColumn, + dataColumnSidecar, + dataColumnBytes, + }, + metrics + ); + + try { + await validateGossipDataColumnSidecar(chain, dataColumnSidecar, gossipIndex); + const recvToValidation = Date.now() / 1000 - seenTimestampSec; + const validationTime = recvToValidation - recvToValLatency; + + metrics?.gossipBlob.recvToValidation.observe(recvToValidation); + metrics?.gossipBlob.validationTime.observe(validationTime); + + logger.debug("Received gossip dataColumn", { + slot: slot, + root: blockHex, + curentSlot: chain.clock.currentSlot, + peerId: peerIdStr, + delaySec, + gossipIndex, + ...blockInputMeta, + recvToValLatency, + recvToValidation, + validationTime, + }); + + return blockInput; + } catch (e) { + if (e instanceof DataColumnSidecarGossipError) { + // Don't trigger this yet if full block and blobs haven't arrived yet + if (e.type.code === DataColumnSidecarErrorCode.PARENT_UNKNOWN && blockInput.block !== null) { + logger.debug("Gossip dataColumn has error", {slot, root: blockHex, code: e.type.code}); + events.emit(NetworkEvent.unknownBlockParent, {blockInput, peer: peerIdStr}); + } + + if (e.action === GossipAction.REJECT) { + chain.persistInvalidSszValue( + ssz.electra.DataColumnSidecar, + dataColumnSidecar, + `gossip_reject_slot_${slot}_index_${dataColumnSidecar.index}` + ); + } + } + + throw e; + } + } + function handleValidBeaconBlock(blockInput: BlockInput, peerIdStr: string, seenTimestampSec: number): void { const signedBlock = blockInput.block; @@ -408,6 +479,63 @@ function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandler } }, + [GossipType.data_column_sidecar]: async ({ + gossipData, + topic, + peerIdStr, + seenTimestampSec, + }: GossipHandlerParamGeneric) => { + const {serializedData} = gossipData; + const dataColumnSidecar = sszDeserialize(topic, serializedData); + const blobSlot = dataColumnSidecar.signedBlockHeader.message.slot; + const index = dataColumnSidecar.index; + + if (config.getForkSeq(blobSlot) < ForkSeq.deneb) { + throw new GossipActionError(GossipAction.REJECT, {code: "PRE_DENEB_BLOCK"}); + } + const blockInput = await validateBeaconDataColumn( + dataColumnSidecar, + serializedData, + topic.index, + peerIdStr, + seenTimestampSec + ); + if (blockInput.block !== null) { + // we can just queue up the blockInput in the processor, but block gossip handler would have already + // queued it up. + // + // handleValidBeaconBlock(blockInput, peerIdStr, seenTimestampSec); + } else { + // wait for the block to arrive till some cutoff else emit unknownBlockInput event + chain.logger.debug("Block not yet available, racing with cutoff", {blobSlot, index}); + const normalBlockInput = await raceWithCutoff( + chain, + blobSlot, + blockInput.blockInputPromise, + BLOCK_AVAILABILITY_CUTOFF_MS + ).catch((_e) => { + return null; + }); + + if (normalBlockInput !== null) { + chain.logger.debug("Block corresponding to blob is now available for processing", {blobSlot, index}); + // we can directly send it for processing but block gossip handler will queue it up anyway + // if we see any issues later, we can send it to handleValidBeaconBlock + // + // handleValidBeaconBlock(normalBlockInput, peerIdStr, seenTimestampSec); + // + // however we can emit the event which will atleast add the peer to the list of peers to pull + // data from + if (normalBlockInput.type === BlockInputType.dataPromise) { + events.emit(NetworkEvent.unknownBlockInput, {blockInput: normalBlockInput, peer: peerIdStr}); + } + } else { + chain.logger.debug("Block not available till BLOCK_AVAILABILITY_CUTOFF_MS", {blobSlot, index}); + events.emit(NetworkEvent.unknownBlockInput, {blockInput, peer: peerIdStr}); + } + } + }, + [GossipType.beacon_aggregate_and_proof]: async ({ gossipData, topic, diff --git a/packages/beacon-node/src/network/processor/gossipQueues/index.ts b/packages/beacon-node/src/network/processor/gossipQueues/index.ts index 366b23b30679..8d38e0e1e14a 100644 --- a/packages/beacon-node/src/network/processor/gossipQueues/index.ts +++ b/packages/beacon-node/src/network/processor/gossipQueues/index.ts @@ -39,6 +39,11 @@ const defaultGossipQueueOpts: { type: QueueType.FIFO, dropOpts: {type: DropType.count, count: 1}, }, + [GossipType.data_column_sidecar]: { + maxLength: 4096, + type: QueueType.FIFO, + dropOpts: {type: DropType.count, count: 1}, + }, // lighthoue has aggregate_queue 4096 and unknown_block_aggregate_queue 1024, we use single queue [GossipType.beacon_aggregate_and_proof]: { maxLength: 5120, diff --git a/packages/beacon-node/src/network/processor/index.ts b/packages/beacon-node/src/network/processor/index.ts index 9a1dcfb32fa0..2a88054b1498 100644 --- a/packages/beacon-node/src/network/processor/index.ts +++ b/packages/beacon-node/src/network/processor/index.ts @@ -66,6 +66,7 @@ type WorkOpts = { const executeGossipWorkOrderObj: Record = { [GossipType.beacon_block]: {bypassQueue: true}, [GossipType.blob_sidecar]: {bypassQueue: true}, + [GossipType.data_column_sidecar]: {bypassQueue: true}, [GossipType.beacon_aggregate_and_proof]: {}, [GossipType.voluntary_exit]: {}, [GossipType.bls_to_execution_change]: {}, @@ -268,7 +269,12 @@ export class NetworkProcessor { }); return; } - if (slot === clockSlot && (topicType === GossipType.beacon_block || topicType === GossipType.blob_sidecar)) { + if ( + slot === clockSlot && + (topicType === GossipType.beacon_block || + topicType === GossipType.blob_sidecar || + topicType === GossipType.data_column_sidecar) + ) { // in the worse case if the current slot block is not valid, this will be reset in the next slot this.isProcessingCurrentSlotBlock = true; } diff --git a/packages/beacon-node/src/network/reqresp/ReqRespBeaconNode.ts b/packages/beacon-node/src/network/reqresp/ReqRespBeaconNode.ts index cfe13b527183..2a0f8bb4e199 100644 --- a/packages/beacon-node/src/network/reqresp/ReqRespBeaconNode.ts +++ b/packages/beacon-node/src/network/reqresp/ReqRespBeaconNode.ts @@ -253,6 +253,13 @@ export class ReqRespBeaconNode extends ReqResp { ); } + if (ForkSeq[fork] >= ForkSeq.electra) { + protocolsAtFork.push( + [protocols.DataColumnSidecarsByRoot(this.config), this.getHandler(ReqRespMethod.DataColumnSidecarsByRoot)], + [protocols.DataColumnSidecarsByRange(this.config), this.getHandler(ReqRespMethod.DataColumnSidecarsByRange)] + ); + } + return protocolsAtFork; } diff --git a/packages/beacon-node/src/network/reqresp/beaconBlocksMaybeBlobsByRange.ts b/packages/beacon-node/src/network/reqresp/beaconBlocksMaybeBlobsByRange.ts index ff5689a7b8c3..926788db90de 100644 --- a/packages/beacon-node/src/network/reqresp/beaconBlocksMaybeBlobsByRange.ts +++ b/packages/beacon-node/src/network/reqresp/beaconBlocksMaybeBlobsByRange.ts @@ -1,11 +1,21 @@ import {ChainForkConfig} from "@lodestar/config"; -import {deneb, Epoch, phase0, allForks, Slot} from "@lodestar/types"; -import {ForkSeq} from "@lodestar/params"; +import {deneb, Epoch, phase0, allForks, Slot, electra} from "@lodestar/types"; +import {ForkSeq, NUMBER_OF_COLUMNS} from "@lodestar/params"; import {computeEpochAtSlot} from "@lodestar/state-transition"; -import {BlobsSource, BlockInput, BlockSource, getBlockInput, BlockInputDataBlobs} from "../../chain/blocks/types.js"; +import { + BlobsSource, + BlockInput, + BlockSource, + getBlockInput, + BlockInputDataBlobs, + BlockInputDataDataColumns, + DataColumnsSource, +} from "../../chain/blocks/types.js"; import {PeerIdStr} from "../../util/peerId.js"; import {INetwork, WithBytes} from "../interface.js"; +import {getCustodyColumns} from "../../util/dataColumns.js"; +import {NodeId} from "../subnets/interface.js"; export async function beaconBlocksMaybeBlobsByRange( config: ChainForkConfig, @@ -30,20 +40,42 @@ export async function beaconBlocksMaybeBlobsByRange( ); } + const forkSeq = config.getForkSeq(startSlot); + // Note: Assumes all blocks in the same epoch - if (config.getForkSeq(startSlot) < ForkSeq.deneb) { + if (forkSeq < ForkSeq.deneb) { const blocks = await network.sendBeaconBlocksByRange(peerId, request); return blocks.map((block) => getBlockInput.preData(config, block.data, BlockSource.byRange, block.bytes)); } // Only request blobs if they are recent enough else if (computeEpochAtSlot(startSlot) >= currentEpoch - config.MIN_EPOCHS_FOR_BLOB_SIDECARS_REQUESTS) { - const [allBlocks, allBlobSidecars] = await Promise.all([ - network.sendBeaconBlocksByRange(peerId, request), - network.sendBlobSidecarsByRange(peerId, request), - ]); + if (forkSeq < ForkSeq.electra) { + const [allBlocks, allBlobSidecars] = await Promise.all([ + network.sendBeaconBlocksByRange(peerId, request), + network.sendBlobSidecarsByRange(peerId, request), + ]); - return matchBlockWithBlobs(config, allBlocks, allBlobSidecars, endSlot, BlockSource.byRange, BlobsSource.byRange); + return matchBlockWithBlobs(config, allBlocks, allBlobSidecars, endSlot, BlockSource.byRange, BlobsSource.byRange); + } else { + const nodeId = network.nodeId; + // get columns + const {custodyColumnIndexes: columns} = getCustodyColumns(nodeId, config, 1); + const dataColumnRequest = {...request, columns}; + const [allBlocks, allDataColumnSidecars] = await Promise.all([ + network.sendBeaconBlocksByRange(peerId, request), + network.sendDataColumnSidecarsByRange(peerId, dataColumnRequest), + ]); + return matchBlockWithDataColumns( + network.nodeId, + config, + allBlocks, + allDataColumnSidecars, + endSlot, + BlockSource.byRange, + DataColumnsSource.byRange + ); + } } // Post Deneb but old blobs @@ -125,3 +157,97 @@ export function matchBlockWithBlobs( } return blockInputs; } + +export function matchBlockWithDataColumns( + nodeId: NodeId, + config: ChainForkConfig, + allBlocks: WithBytes[], + allDataColumnSidecars: electra.DataColumnSidecar[], + endSlot: Slot, + blockSource: BlockSource, + dataColumnsSource: DataColumnsSource +): BlockInput[] { + const blockInputs: BlockInput[] = []; + let dataColumnSideCarIndex = 0; + let lastMatchedSlot = -1; + + // Match dataColumnSideCar with the block as some blocks would have no dataColumns and hence + // would be omitted from the response. If there are any inconsitencies in the + // response, the validations during import will reject the block and hence this + // entire segment. + // + // Assuming that the blocks and blobs will come in same sorted order + for (let i = 0; i < allBlocks.length; i++) { + const block = allBlocks[i]; + + const forkSeq = config.getForkSeq(block.data.message.slot); + if (forkSeq < ForkSeq.electra) { + throw Error(`Invalid block forkSeq=${forkSeq} < ForSeq.electra for matchBlockWithDataColumns`); + } else { + const dataColumnSidecars: electra.DataColumnSidecar[] = []; + const blobKzgCommitmentsLen = (block.data.message.body as deneb.BeaconBlockBody).blobKzgCommitments.length; + + let dataColumnSidecar: electra.DataColumnSidecar; + while ( + (dataColumnSidecar = allDataColumnSidecars[dataColumnSideCarIndex])?.signedBlockHeader.message.slot === + block.data.message.slot + ) { + dataColumnSidecars.push(dataColumnSidecar); + lastMatchedSlot = block.data.message.slot; + dataColumnSideCarIndex++; + } + + // Quick inspect how many blobSidecars was expected + const { + custodyColumns, + numColumns, + custodyColumnIndexes: columnIndexes, + } = getCustodyColumns(nodeId, config, blobKzgCommitmentsLen); + const dataColumnIndexes = dataColumnSidecars.map((dataColumnSidecar) => dataColumnSidecar.index); + const custodyIndexesPresent = columnIndexes.reduce( + (acc, columnIndex) => acc && dataColumnIndexes.includes(columnIndex), + true + ); + + if ( + dataColumnSidecars.length < numColumns || + dataColumnSidecars.length > NUMBER_OF_COLUMNS || + !custodyIndexesPresent + ) { + throw Error( + `Missing or mismatching dataColumnSidecars for blockSlot=${block.data.message.slot} with numColumns=${columnIndexes.length} dataColumnSidecars=${dataColumnSidecars.length} custodyIndexesPresent=${custodyIndexesPresent}` + ); + } + + const blockData = { + fork: config.getForkName(block.data.message.slot), + numColumns, + custodyColumns, + dataColumns: dataColumnSidecars, + dataColumnsSource, + dataColumnsBytes: Array.from({length: dataColumnSidecars.length}, () => null), + } as BlockInputDataDataColumns; + + // TODO DENEB: instead of null, pass payload in bytes + blockInputs.push(getBlockInput.availableData(config, block.data, blockSource, null, blockData)); + } + } + + // If there are still unconsumed blobs this means that the response was inconsistent + // and matching was wrong and hence we should throw error + if ( + allDataColumnSidecars[dataColumnSideCarIndex] !== undefined && + // If there are no data columns, the data columns request can give 1 block outside the requested range + allDataColumnSidecars[dataColumnSideCarIndex].signedBlockHeader.message.slot <= endSlot + ) { + throw Error( + `Unmatched blobSidecars, blocks=${allBlocks.length}, blobs=${ + allDataColumnSidecars.length + } lastMatchedSlot=${lastMatchedSlot}, pending blobSidecars slots=${allDataColumnSidecars + .slice(dataColumnSideCarIndex) + .map((blb) => blb.signedBlockHeader.message.slot) + .join(",")}` + ); + } + return blockInputs; +} diff --git a/packages/beacon-node/src/network/reqresp/beaconBlocksMaybeBlobsByRoot.ts b/packages/beacon-node/src/network/reqresp/beaconBlocksMaybeBlobsByRoot.ts index 2b802ab1edd9..a07f4f5550fa 100644 --- a/packages/beacon-node/src/network/reqresp/beaconBlocksMaybeBlobsByRoot.ts +++ b/packages/beacon-node/src/network/reqresp/beaconBlocksMaybeBlobsByRoot.ts @@ -1,7 +1,7 @@ import {fromHexString} from "@chainsafe/ssz"; import {ChainForkConfig} from "@lodestar/config"; -import {phase0, deneb} from "@lodestar/types"; -import {ForkSeq} from "@lodestar/params"; +import {phase0, deneb, electra} from "@lodestar/types"; +import {ForkName, ForkSeq} from "@lodestar/params"; import { BlockInput, BlockInputType, @@ -11,12 +11,16 @@ import { NullBlockInput, BlobsSource, BlockInputDataBlobs, + DataColumnsSource, + getBlockInputDataColumns, + BlockInputDataDataColumns, } from "../../chain/blocks/types.js"; import {PeerIdStr} from "../../util/peerId.js"; import {INetwork} from "../interface.js"; import {BlockInputAvailabilitySource} from "../../chain/seenCache/seenGossipBlockInput.js"; import {Metrics} from "../../metrics/index.js"; -import {matchBlockWithBlobs} from "./beaconBlocksMaybeBlobsByRange.js"; +import {getCustodyColumns} from "../../util/dataColumns.js"; +import {matchBlockWithBlobs, matchBlockWithDataColumns} from "./beaconBlocksMaybeBlobsByRange.js"; export async function beaconBlocksMaybeBlobsByRoot( config: ChainForkConfig, @@ -25,31 +29,85 @@ export async function beaconBlocksMaybeBlobsByRoot( request: phase0.BeaconBlocksByRootRequest ): Promise { const allBlocks = await network.sendBeaconBlocksByRoot(peerId, request); + const preDataBlocks = []; + const blobsDataBlocks = []; + const dataColumnsDataBlocks = []; + const blobIdentifiers: deneb.BlobIdentifier[] = []; + const dataColumnIdentifiers: electra.DataColumnIdentifier[] = []; for (const block of allBlocks) { const slot = block.data.message.slot; const blockRoot = config.getForkTypes(slot).BeaconBlock.hashTreeRoot(block.data.message); const fork = config.getForkName(slot); - - if (ForkSeq[fork] >= ForkSeq.deneb) { + if (ForkSeq[fork] < ForkSeq.deneb) { + preDataBlocks.push(block); + } else if (fork === ForkName.deneb) { + blobsDataBlocks.push(block); const blobKzgCommitmentsLen = (block.data.message.body as deneb.BeaconBlockBody).blobKzgCommitments.length; for (let index = 0; index < blobKzgCommitmentsLen; index++) { blobIdentifiers.push({blockRoot, index}); } + } else if (fork === ForkName.electra) { + dataColumnsDataBlocks.push(block); + const blobKzgCommitmentsLen = (block.data.message.body as deneb.BeaconBlockBody).blobKzgCommitments.length; + const {custodyColumnIndexes} = getCustodyColumns(network.nodeId, config, blobKzgCommitmentsLen); + for (const columnIndex of custodyColumnIndexes) { + dataColumnIdentifiers.push({blockRoot, index: columnIndex}); + } + } else { + throw Error(`Invalid fork=${fork} in beaconBlocksMaybeBlobsByRoot`); } } - let allBlobSidecars: deneb.BlobSidecar[]; - if (blobIdentifiers.length > 0) { - allBlobSidecars = await network.sendBlobSidecarsByRoot(peerId, blobIdentifiers); - } else { - allBlobSidecars = []; + let blockInputs = preDataBlocks.map((block) => + getBlockInput.preData(config, block.data, BlockSource.byRoot, block.bytes) + ); + + if (blobsDataBlocks.length > 0) { + let allBlobSidecars: deneb.BlobSidecar[]; + if (blobIdentifiers.length > 0) { + allBlobSidecars = await network.sendBlobSidecarsByRoot(peerId, blobIdentifiers); + } else { + allBlobSidecars = []; + } + + // The last arg is to provide slot to which all blobs should be exausted in matching + // and here it should be infinity since all bobs should match + const blockInputWithBlobs = matchBlockWithBlobs( + config, + allBlocks, + allBlobSidecars, + Infinity, + BlockSource.byRoot, + BlobsSource.byRoot + ); + blockInputs = [...blockInputs, ...blockInputWithBlobs]; } - // The last arg is to provide slot to which all blobs should be exausted in matching - // and here it should be infinity since all bobs should match - return matchBlockWithBlobs(config, allBlocks, allBlobSidecars, Infinity, BlockSource.byRoot, BlobsSource.byRoot); + if (dataColumnsDataBlocks.length > 0) { + let allDataColumnsSidecars: electra.DataColumnSidecar[]; + if (dataColumnIdentifiers.length > 0) { + allDataColumnsSidecars = await network.sendDataColumnSidecarsByRoot(peerId, dataColumnIdentifiers); + } else { + allDataColumnsSidecars = []; + } + + // The last arg is to provide slot to which all blobs should be exausted in matching + // and here it should be infinity since all bobs should match + const blockInputWithBlobs = matchBlockWithDataColumns( + network.nodeId, + config, + allBlocks, + allDataColumnsSidecars, + Infinity, + BlockSource.byRoot, + DataColumnsSource.byRoot + ); + blockInputs = [...blockInputs, ...blockInputWithBlobs]; + } + + return blockInputs; } export async function unavailableBeaconBlobsByRoot( @@ -64,51 +122,109 @@ export async function unavailableBeaconBlobsByRoot( } // resolve the block if thats unavailable - let block, blobsCache, blockBytes, resolveAvailability, cachedData; + let block, blockBytes, cachedData; if (unavailableBlockInput.block === null) { const allBlocks = await network.sendBeaconBlocksByRoot(peerId, [fromHexString(unavailableBlockInput.blockRootHex)]); block = allBlocks[0].data; blockBytes = allBlocks[0].bytes; cachedData = unavailableBlockInput.cachedData; - ({blobsCache, resolveAvailability} = cachedData); } else { ({block, cachedData, blockBytes} = unavailableBlockInput); - ({blobsCache, resolveAvailability} = cachedData); } - // resolve missing blobs - const blobIdentifiers: deneb.BlobIdentifier[] = []; - const slot = block.message.slot; - const blockRoot = config.getForkTypes(slot).BeaconBlock.hashTreeRoot(block.message); + let availableBlockInput; + if (cachedData.fork === ForkName.deneb) { + const {blobsCache, resolveAvailability} = cachedData; - const blobKzgCommitmentsLen = (block.message.body as deneb.BeaconBlockBody).blobKzgCommitments.length; - for (let index = 0; index < blobKzgCommitmentsLen; index++) { - if (blobsCache.has(index) === false) blobIdentifiers.push({blockRoot, index}); - } + // resolve missing blobs + const blobIdentifiers: deneb.BlobIdentifier[] = []; + const slot = block.message.slot; + const blockRoot = config.getForkTypes(slot).BeaconBlock.hashTreeRoot(block.message); - let allBlobSidecars: deneb.BlobSidecar[]; - if (blobIdentifiers.length > 0) { - allBlobSidecars = await network.sendBlobSidecarsByRoot(peerId, blobIdentifiers); - } else { - allBlobSidecars = []; - } + const blobKzgCommitmentsLen = (block.message.body as deneb.BeaconBlockBody).blobKzgCommitments.length; + for (let index = 0; index < blobKzgCommitmentsLen; index++) { + if (blobsCache.has(index) === false) blobIdentifiers.push({blockRoot, index}); + } - // add them in cache so that its reflected in all the blockInputs that carry this - // for e.g. a blockInput that might be awaiting blobs promise fullfillment in - // verifyBlocksDataAvailability - for (const blobSidecar of allBlobSidecars) { - blobsCache.set(blobSidecar.index, {blobSidecar, blobBytes: null}); - } + let allBlobSidecars: deneb.BlobSidecar[]; + if (blobIdentifiers.length > 0) { + allBlobSidecars = await network.sendBlobSidecarsByRoot(peerId, blobIdentifiers); + } else { + allBlobSidecars = []; + } - // check and see if all blobs are now available and in that case resolve availability - // if not this will error and the leftover blobs will be tried from another peer - const allBlobs = getBlockInputBlobs(blobsCache); - const {blobs} = allBlobs; - if (blobs.length !== blobKzgCommitmentsLen) { - throw Error(`Not all blobs fetched missingBlobs=${blobKzgCommitmentsLen - blobs.length}`); + // add them in cache so that its reflected in all the blockInputs that carry this + // for e.g. a blockInput that might be awaiting blobs promise fullfillment in + // verifyBlocksDataAvailability + for (const blobSidecar of allBlobSidecars) { + blobsCache.set(blobSidecar.index, {blobSidecar, blobBytes: null}); + } + + // check and see if all blobs are now available and in that case resolve availability + // if not this will error and the leftover blobs will be tried from another peer + const allBlobs = getBlockInputBlobs(blobsCache); + const {blobs} = allBlobs; + if (blobs.length !== blobKzgCommitmentsLen) { + throw Error(`Not all blobs fetched missingBlobs=${blobKzgCommitmentsLen - blobs.length}`); + } + const blockData = {fork: cachedData.fork, ...allBlobs, blobsSource: BlobsSource.byRoot} as BlockInputDataBlobs; + resolveAvailability(blockData); + metrics?.syncUnknownBlock.resolveAvailabilitySource.inc({source: BlockInputAvailabilitySource.UNKNOWN_SYNC}); + availableBlockInput = getBlockInput.availableData(config, block, BlockSource.byRoot, blockBytes, blockData); + } else if (cachedData.fork === ForkName.electra) { + const {dataColumnsCache, resolveAvailability} = cachedData; + + // resolve missing blobs + const dataColumnIdentifiers: electra.DataColumnIdentifier[] = []; + const slot = block.message.slot; + const blockRoot = config.getForkTypes(slot).BeaconBlock.hashTreeRoot(block.message); + + const blobKzgCommitmentsLen = (block.message.body as deneb.BeaconBlockBody).blobKzgCommitments.length; + const {numColumns, custodyColumns, custodyColumnIndexes} = getCustodyColumns( + network.nodeId, + config, + blobKzgCommitmentsLen + ); + for (const columnIndex of custodyColumnIndexes) { + if (dataColumnsCache.has(columnIndex) === false) { + dataColumnIdentifiers.push({blockRoot, index: columnIndex}); + } + } + + let allDataColumnSidecars: electra.DataColumnSidecar[]; + if (dataColumnIdentifiers.length > 0) { + allDataColumnSidecars = await network.sendDataColumnSidecarsByRoot(peerId, dataColumnIdentifiers); + } else { + allDataColumnSidecars = []; + } + + // add them in cache so that its reflected in all the blockInputs that carry this + // for e.g. a blockInput that might be awaiting blobs promise fullfillment in + // verifyBlocksDataAvailability + for (const dataColumnSidecar of allDataColumnSidecars) { + dataColumnsCache.set(dataColumnSidecar.index, {dataColumnSidecar, dataColumnBytes: null}); + } + + // check and see if all blobs are now available and in that case resolve availability + // if not this will error and the leftover blobs will be tried from another peer + const allDataColumns = getBlockInputDataColumns(dataColumnsCache, custodyColumnIndexes); + const {dataColumns} = allDataColumns; + if (dataColumns.length !== numColumns) { + throw Error(`Not all blobs fetched missingColumns=${numColumns - dataColumns.length}`); + } + const blockData = { + fork: cachedData.fork, + ...allDataColumns, + numColumns, + custodyColumns, + dataColumnsSource: DataColumnsSource.byRoot, + } as BlockInputDataDataColumns; + resolveAvailability(blockData); + metrics?.syncUnknownBlock.resolveAvailabilitySource.inc({source: BlockInputAvailabilitySource.UNKNOWN_SYNC}); + availableBlockInput = getBlockInput.availableData(config, block, BlockSource.byRoot, blockBytes, blockData); + } else { + throw Error(`Invalid cachedData fork=${cachedData.fork} for unavailableBeaconBlobsByRoot`); } - const blockData = {fork: cachedData.fork, ...allBlobs, blobsSource: BlobsSource.byRoot} as BlockInputDataBlobs; - resolveAvailability(blockData); - metrics?.syncUnknownBlock.resolveAvailabilitySource.inc({source: BlockInputAvailabilitySource.UNKNOWN_SYNC}); - return getBlockInput.availableData(config, block, BlockSource.byRoot, blockBytes, blockData); + + return availableBlockInput; } diff --git a/packages/beacon-node/src/network/reqresp/handlers/beaconBlocksByRange.ts b/packages/beacon-node/src/network/reqresp/handlers/beaconBlocksByRange.ts index d1046db9651d..e892b2f412dd 100644 --- a/packages/beacon-node/src/network/reqresp/handlers/beaconBlocksByRange.ts +++ b/packages/beacon-node/src/network/reqresp/handlers/beaconBlocksByRange.ts @@ -69,7 +69,7 @@ export async function* onBeaconBlocksByRange( } export function validateBeaconBlocksByRangeRequest( - request: deneb.BlobSidecarsByRangeRequest + request: phase0.BeaconBlocksByRangeRequest ): deneb.BlobSidecarsByRangeRequest { const {startSlot} = request; let {count} = request; diff --git a/packages/beacon-node/src/network/reqresp/handlers/dataColumnSidecarsByRange.ts b/packages/beacon-node/src/network/reqresp/handlers/dataColumnSidecarsByRange.ts new file mode 100644 index 000000000000..903c4a64b69f --- /dev/null +++ b/packages/beacon-node/src/network/reqresp/handlers/dataColumnSidecarsByRange.ts @@ -0,0 +1,125 @@ +import {GENESIS_SLOT, MAX_REQUEST_BLOCKS_DENEB} from "@lodestar/params"; +import {ResponseError, ResponseOutgoing, RespStatus} from "@lodestar/reqresp"; +import {electra, Slot, ssz, ColumnIndex} from "@lodestar/types"; +import {fromHex} from "@lodestar/utils"; +import {IBeaconChain} from "../../../chain/index.js"; +import {IBeaconDb} from "../../../db/index.js"; +import { + DATA_COLUMN_SIDECARS_IN_WRAPPER_INDEX, + COLUMN_SIZE_IN_WRAPPER_INDEX, + CUSTODY_COLUMNS_IN_IN_WRAPPER_INDEX, +} from "../../../db/repositories/dataColumnSidecars.js"; + +export async function* onDataColumnSidecarsByRange( + request: electra.DataColumnSidecarsByRangeRequest, + chain: IBeaconChain, + db: IBeaconDb +): AsyncIterable { + // Non-finalized range of blobs + const {startSlot, count, columns} = validateDataColumnSidecarsByRangeRequest(request); + const endSlot = startSlot + count; + + const finalized = db.dataColumnSidecarsArchive; + const unfinalized = db.dataColumnSidecars; + const finalizedSlot = chain.forkChoice.getFinalizedBlock().slot; + + // Finalized range of blobs + if (startSlot <= finalizedSlot) { + // Chain of blobs won't change + for await (const {key, value: dataColumnSideCarsBytesWrapped} of finalized.binaryEntriesStream({ + gte: startSlot, + lt: endSlot, + })) { + yield* iterateDataColumnBytesFromWrapper( + chain, + dataColumnSideCarsBytesWrapped, + finalized.decodeKey(key), + columns + ); + } + } + + // Non-finalized range of blobs + if (endSlot > finalizedSlot) { + const headRoot = chain.forkChoice.getHeadRoot(); + // TODO DENEB: forkChoice should mantain an array of canonical blocks, and change only on reorg + const headChain = chain.forkChoice.getAllAncestorBlocks(headRoot); + + // Iterate head chain with ascending block numbers + for (let i = headChain.length - 1; i >= 0; i--) { + const block = headChain[i]; + + // Must include only blobs in the range requested + if (block.slot >= startSlot && block.slot < endSlot) { + // Note: Here the forkChoice head may change due to a re-org, so the headChain reflects the canonical chain + // at the time of the start of the request. Spec is clear the chain of blobs must be consistent, but on + // re-org there's no need to abort the request + // Spec: https://github.com/ethereum/consensus-specs/blob/a1e46d1ae47dd9d097725801575b46907c12a1f8/specs/eip4844/p2p-interface.md#blobssidecarsbyrange-v1 + + const blobSideCarsBytesWrapped = await unfinalized.getBinary(fromHex(block.blockRoot)); + if (!blobSideCarsBytesWrapped) { + // Handle the same to onBeaconBlocksByRange + throw new ResponseError(RespStatus.SERVER_ERROR, `No item for root ${block.blockRoot} slot ${block.slot}`); + } + yield* iterateDataColumnBytesFromWrapper(chain, blobSideCarsBytesWrapped, block.slot, columns); + } + + // If block is after endSlot, stop iterating + else if (block.slot >= endSlot) { + break; + } + } + } +} + +export function* iterateDataColumnBytesFromWrapper( + chain: IBeaconChain, + dataColumnSidecarsBytesWrapped: Uint8Array, + blockSlot: Slot, + // use the columns include to see if you want to yield in response + _columns: ColumnIndex[] +): Iterable { + const retrievedColumnsSizeBytes = dataColumnSidecarsBytesWrapped.slice( + COLUMN_SIZE_IN_WRAPPER_INDEX, + CUSTODY_COLUMNS_IN_IN_WRAPPER_INDEX + ); + const columnsSize = ssz.UintNum64.deserialize(retrievedColumnsSizeBytes); + const allDataColumnSidecarsBytes = dataColumnSidecarsBytesWrapped.slice(DATA_COLUMN_SIDECARS_IN_WRAPPER_INDEX); + + const columnsLen = allDataColumnSidecarsBytes.length / columnsSize; + + for (let index = 0; index < columnsLen; index++) { + const dataColumnSidecarBytes = allDataColumnSidecarsBytes.slice(index * columnsSize, (index + 1) * columnsSize); + if (dataColumnSidecarBytes.length !== columnsSize) { + throw new ResponseError( + RespStatus.SERVER_ERROR, + `Invalid dataColumnSidecar index=${index} bytes length=${dataColumnSidecarBytes.length} expected=${columnsSize} for slot ${blockSlot} blobsLen=${columnsLen}` + ); + } + yield { + data: dataColumnSidecarBytes, + fork: chain.config.getForkName(blockSlot), + }; + } +} + +export function validateDataColumnSidecarsByRangeRequest( + request: electra.DataColumnSidecarsByRangeRequest +): electra.DataColumnSidecarsByRangeRequest { + const {startSlot, columns} = request; + let {count} = request; + + if (count < 1) { + throw new ResponseError(RespStatus.INVALID_REQUEST, "count < 1"); + } + // TODO: validate against MIN_EPOCHS_FOR_BLOCK_REQUESTS + if (startSlot < GENESIS_SLOT) { + throw new ResponseError(RespStatus.INVALID_REQUEST, "startSlot < genesis"); + } + + if (count > MAX_REQUEST_BLOCKS_DENEB) { + count = MAX_REQUEST_BLOCKS_DENEB; + } + + return {startSlot, count, columns}; +} diff --git a/packages/beacon-node/src/network/reqresp/handlers/dataColumnSidecarsByRoot.ts b/packages/beacon-node/src/network/reqresp/handlers/dataColumnSidecarsByRoot.ts new file mode 100644 index 000000000000..dea728cd62ef --- /dev/null +++ b/packages/beacon-node/src/network/reqresp/handlers/dataColumnSidecarsByRoot.ts @@ -0,0 +1,90 @@ +import {ResponseError, ResponseOutgoing, RespStatus} from "@lodestar/reqresp"; +import {NUMBER_OF_COLUMNS} from "@lodestar/params"; +import {electra, RootHex, ssz} from "@lodestar/types"; +import {toHex, fromHex} from "@lodestar/utils"; +import {IBeaconChain} from "../../../chain/index.js"; +import {IBeaconDb} from "../../../db/index.js"; +import { + DATA_COLUMN_SIDECARS_IN_WRAPPER_INDEX, + CUSTODY_COLUMNS_IN_IN_WRAPPER_INDEX, + COLUMN_SIZE_IN_WRAPPER_INDEX, +} from "../../../db/repositories/dataColumnSidecars.js"; + +export async function* onDataColumnSidecarsByRoot( + requestBody: electra.DataColumnSidecarsByRootRequest, + chain: IBeaconChain, + db: IBeaconDb +): AsyncIterable { + const finalizedSlot = chain.forkChoice.getFinalizedBlock().slot; + + // In sidecars by root request, it can be expected that sidecar requests will be come + // clustured by blockroots, and this helps us save db lookups once we load sidecars + // for a root + let lastFetchedSideCars: { + blockRoot: RootHex; + bytes: Uint8Array; + custodyColumns: Uint8Array; + columnsSize: number; + } | null = null; + + for (const dataColumnIdentifier of requestBody) { + const {blockRoot, index} = dataColumnIdentifier; + const blockRootHex = toHex(blockRoot); + const block = chain.forkChoice.getBlockHex(blockRootHex); + + // NOTE: Only support non-finalized blocks. + // SPEC: Clients MUST support requesting blocks and sidecars since the latest finalized epoch. + // https://github.com/ethereum/consensus-specs/blob/11a037fd9227e29ee809c9397b09f8cc3383a8c0/specs/eip4844/p2p-interface.md#beaconblockandblobssidecarbyroot-v1 + if (!block || block.slot <= finalizedSlot) { + continue; + } + + // Check if we need to load sidecars for a new block root + if (lastFetchedSideCars === null || lastFetchedSideCars.blockRoot !== blockRootHex) { + const dataColumnSidecarsBytesWrapped = await db.dataColumnSidecars.getBinary(fromHex(block.blockRoot)); + if (!dataColumnSidecarsBytesWrapped) { + // Handle the same to onBeaconBlocksByRange + throw new ResponseError(RespStatus.SERVER_ERROR, `No item for root ${block.blockRoot} slot ${block.slot}`); + } + + const retrievedColumnsSizeBytes = dataColumnSidecarsBytesWrapped.slice( + COLUMN_SIZE_IN_WRAPPER_INDEX, + CUSTODY_COLUMNS_IN_IN_WRAPPER_INDEX + ); + const columnsSize = ssz.UintNum64.deserialize(retrievedColumnsSizeBytes); + const dataColumnSidecarsBytes = dataColumnSidecarsBytesWrapped.slice(DATA_COLUMN_SIDECARS_IN_WRAPPER_INDEX); + + const custodyColumns = dataColumnSidecarsBytesWrapped.slice( + CUSTODY_COLUMNS_IN_IN_WRAPPER_INDEX, + CUSTODY_COLUMNS_IN_IN_WRAPPER_INDEX + NUMBER_OF_COLUMNS + ); + + lastFetchedSideCars = {blockRoot: blockRootHex, bytes: dataColumnSidecarsBytes, columnsSize, custodyColumns}; + } + + const dataIndex = lastFetchedSideCars.custodyColumns[index]; + const {columnsSize} = lastFetchedSideCars; + + if (dataIndex === undefined || dataIndex === 0) { + throw Error( + `Missing dataColumnSidecar blockRoot=${blockRootHex} index=${index} calculated dataIndex=${dataIndex}` + ); + } + + // dataIndex is 1 based index + const dataColumnSidecarBytes = lastFetchedSideCars.bytes.slice( + (dataIndex - 1) * columnsSize, + dataIndex * columnsSize + ); + if (dataColumnSidecarBytes.length !== columnsSize) { + throw Error( + `Inconsistent state, dataColumnSidecar blockRoot=${blockRootHex} index=${index} dataColumnSidecarBytes=${dataColumnSidecarBytes.length} expected=${columnsSize}` + ); + } + + yield { + data: dataColumnSidecarBytes, + fork: chain.config.getForkName(block.slot), + }; + } +} diff --git a/packages/beacon-node/src/network/reqresp/handlers/index.ts b/packages/beacon-node/src/network/reqresp/handlers/index.ts index 50b8cc870844..0c4732310641 100644 --- a/packages/beacon-node/src/network/reqresp/handlers/index.ts +++ b/packages/beacon-node/src/network/reqresp/handlers/index.ts @@ -7,6 +7,8 @@ import {onBeaconBlocksByRange} from "./beaconBlocksByRange.js"; import {onBeaconBlocksByRoot} from "./beaconBlocksByRoot.js"; import {onBlobSidecarsByRoot} from "./blobSidecarsByRoot.js"; import {onBlobSidecarsByRange} from "./blobSidecarsByRange.js"; +import {onDataColumnSidecarsByRange} from "./dataColumnSidecarsByRange.js"; +import {onDataColumnSidecarsByRoot} from "./dataColumnSidecarsByRoot.js"; import {onLightClientBootstrap} from "./lightClientBootstrap.js"; import {onLightClientFinalityUpdate} from "./lightClientFinalityUpdate.js"; import {onLightClientOptimisticUpdate} from "./lightClientOptimisticUpdate.js"; @@ -44,6 +46,15 @@ export function getReqRespHandlers({db, chain}: {db: IBeaconDb; chain: IBeaconCh const body = ssz.deneb.BlobSidecarsByRangeRequest.deserialize(req.data); return onBlobSidecarsByRange(body, chain, db); }, + [ReqRespMethod.DataColumnSidecarsByRange]: (req) => { + const body = ssz.electra.DataColumnSidecarsByRangeRequest.deserialize(req.data); + return onDataColumnSidecarsByRange(body, chain, db); + }, + [ReqRespMethod.DataColumnSidecarsByRoot]: (req) => { + const body = ssz.electra.DataColumnSidecarsByRootRequest.deserialize(req.data); + return onDataColumnSidecarsByRoot(body, chain, db); + }, + [ReqRespMethod.LightClientBootstrap]: (req) => { const body = ssz.Root.deserialize(req.data); return onLightClientBootstrap(body, chain); diff --git a/packages/beacon-node/src/network/reqresp/protocols.ts b/packages/beacon-node/src/network/reqresp/protocols.ts index a0fa9576c93c..4fd96f6be711 100644 --- a/packages/beacon-node/src/network/reqresp/protocols.ts +++ b/packages/beacon-node/src/network/reqresp/protocols.ts @@ -71,6 +71,18 @@ export const BlobSidecarsByRoot = toProtocol({ contextBytesType: ContextBytesType.ForkDigest, }); +export const DataColumnSidecarsByRange = toProtocol({ + method: ReqRespMethod.DataColumnSidecarsByRange, + version: Version.V1, + contextBytesType: ContextBytesType.ForkDigest, +}); + +export const DataColumnSidecarsByRoot = toProtocol({ + method: ReqRespMethod.DataColumnSidecarsByRoot, + version: Version.V1, + contextBytesType: ContextBytesType.ForkDigest, +}); + export const LightClientBootstrap = toProtocol({ method: ReqRespMethod.LightClientBootstrap, version: Version.V1, diff --git a/packages/beacon-node/src/network/reqresp/rateLimit.ts b/packages/beacon-node/src/network/reqresp/rateLimit.ts index 881ab36bc05d..cf48d9d138fe 100644 --- a/packages/beacon-node/src/network/reqresp/rateLimit.ts +++ b/packages/beacon-node/src/network/reqresp/rateLimit.ts @@ -3,6 +3,8 @@ import { MAX_REQUEST_LIGHT_CLIENT_UPDATES, MAX_BLOBS_PER_BLOCK, MAX_REQUEST_BLOB_SIDECARS, + MAX_REQUEST_BLOCKS_DENEB, + NUMBER_OF_COLUMNS, } from "@lodestar/params"; import {InboundRateLimitQuota} from "@lodestar/reqresp"; import {ReqRespMethod, RequestBodyByMethod} from "./types.js"; @@ -46,6 +48,16 @@ export const rateLimitQuotas: Record = { byPeer: {quota: 128 * MAX_BLOBS_PER_BLOCK, quotaTimeMs: 10_000}, getRequestCount: getRequestCountFn(ReqRespMethod.BlobSidecarsByRoot, (req) => req.length), }, + [ReqRespMethod.DataColumnSidecarsByRange]: { + // Rationale: MAX_REQUEST_BLOCKS_DENEB * NUMBER_OF_COLUMNS + byPeer: {quota: MAX_REQUEST_BLOCKS_DENEB * NUMBER_OF_COLUMNS, quotaTimeMs: 10_000}, + getRequestCount: getRequestCountFn(ReqRespMethod.DataColumnSidecarsByRange, (req) => req.count), + }, + [ReqRespMethod.DataColumnSidecarsByRoot]: { + // Rationale: quota of BeaconBlocksByRoot * NUMBER_OF_COLUMNS + byPeer: {quota: 128 * NUMBER_OF_COLUMNS, quotaTimeMs: 10_000}, + getRequestCount: getRequestCountFn(ReqRespMethod.DataColumnSidecarsByRoot, (req) => req.length), + }, [ReqRespMethod.LightClientBootstrap]: { // As similar in the nature of `Status` protocol so we use the same rate limits. byPeer: {quota: 5, quotaTimeMs: 15_000}, diff --git a/packages/beacon-node/src/network/reqresp/types.ts b/packages/beacon-node/src/network/reqresp/types.ts index f690d282307f..cf930d8e1bc6 100644 --- a/packages/beacon-node/src/network/reqresp/types.ts +++ b/packages/beacon-node/src/network/reqresp/types.ts @@ -1,7 +1,7 @@ import {Type} from "@chainsafe/ssz"; import {ForkLightClient, ForkName, isForkLightClient} from "@lodestar/params"; import {Protocol, ProtocolHandler, ReqRespRequest} from "@lodestar/reqresp"; -import {Root, allForks, altair, deneb, phase0, ssz} from "@lodestar/types"; +import {Root, allForks, altair, deneb, phase0, ssz, electra} from "@lodestar/types"; export type ProtocolNoHandler = Omit; @@ -16,6 +16,8 @@ export enum ReqRespMethod { BeaconBlocksByRoot = "beacon_blocks_by_root", BlobSidecarsByRange = "blob_sidecars_by_range", BlobSidecarsByRoot = "blob_sidecars_by_root", + DataColumnSidecarsByRange = "data_column_sidecars_by_range", + DataColumnSidecarsByRoot = "data_column_sidecars_by_root", LightClientBootstrap = "light_client_bootstrap", LightClientUpdatesByRange = "light_client_updates_by_range", LightClientFinalityUpdate = "light_client_finality_update", @@ -32,6 +34,8 @@ export type RequestBodyByMethod = { [ReqRespMethod.BeaconBlocksByRoot]: phase0.BeaconBlocksByRootRequest; [ReqRespMethod.BlobSidecarsByRange]: deneb.BlobSidecarsByRangeRequest; [ReqRespMethod.BlobSidecarsByRoot]: deneb.BlobSidecarsByRootRequest; + [ReqRespMethod.DataColumnSidecarsByRange]: electra.DataColumnSidecarsByRangeRequest; + [ReqRespMethod.DataColumnSidecarsByRoot]: electra.DataColumnSidecarsByRootRequest; [ReqRespMethod.LightClientBootstrap]: Root; [ReqRespMethod.LightClientUpdatesByRange]: altair.LightClientUpdatesByRange; [ReqRespMethod.LightClientFinalityUpdate]: null; @@ -48,6 +52,9 @@ type ResponseBodyByMethod = { [ReqRespMethod.BeaconBlocksByRoot]: allForks.SignedBeaconBlock; [ReqRespMethod.BlobSidecarsByRange]: deneb.BlobSidecar; [ReqRespMethod.BlobSidecarsByRoot]: deneb.BlobSidecar; + [ReqRespMethod.DataColumnSidecarsByRange]: electra.DataColumnSidecar; + [ReqRespMethod.DataColumnSidecarsByRoot]: electra.DataColumnSidecar; + [ReqRespMethod.LightClientBootstrap]: altair.LightClientBootstrap; [ReqRespMethod.LightClientUpdatesByRange]: altair.LightClientUpdate; [ReqRespMethod.LightClientFinalityUpdate]: altair.LightClientFinalityUpdate; @@ -62,10 +69,14 @@ export const requestSszTypeByMethod: { [ReqRespMethod.Goodbye]: ssz.phase0.Goodbye, [ReqRespMethod.Ping]: ssz.phase0.Ping, [ReqRespMethod.Metadata]: null, + [ReqRespMethod.BeaconBlocksByRange]: ssz.phase0.BeaconBlocksByRangeRequest, [ReqRespMethod.BeaconBlocksByRoot]: ssz.phase0.BeaconBlocksByRootRequest, [ReqRespMethod.BlobSidecarsByRange]: ssz.deneb.BlobSidecarsByRangeRequest, [ReqRespMethod.BlobSidecarsByRoot]: ssz.deneb.BlobSidecarsByRootRequest, + [ReqRespMethod.DataColumnSidecarsByRange]: ssz.electra.DataColumnSidecarsByRangeRequest, + [ReqRespMethod.DataColumnSidecarsByRoot]: ssz.electra.DataColumnSidecarsByRootRequest, + [ReqRespMethod.LightClientBootstrap]: ssz.Root, [ReqRespMethod.LightClientUpdatesByRange]: ssz.altair.LightClientUpdatesByRange, [ReqRespMethod.LightClientFinalityUpdate]: null, @@ -91,6 +102,9 @@ export const responseSszTypeByMethod: {[K in ReqRespMethod]: ResponseTypeGetter< [ReqRespMethod.BeaconBlocksByRoot]: blocksResponseType, [ReqRespMethod.BlobSidecarsByRange]: () => ssz.deneb.BlobSidecar, [ReqRespMethod.BlobSidecarsByRoot]: () => ssz.deneb.BlobSidecar, + [ReqRespMethod.DataColumnSidecarsByRange]: () => ssz.electra.DataColumnSidecar, + [ReqRespMethod.DataColumnSidecarsByRoot]: () => ssz.electra.DataColumnSidecar, + [ReqRespMethod.LightClientBootstrap]: (fork) => ssz.allForksLightClient[onlyLightclientFork(fork)].LightClientBootstrap, [ReqRespMethod.LightClientUpdatesByRange]: (fork) => diff --git a/packages/beacon-node/src/node/nodejs.ts b/packages/beacon-node/src/node/nodejs.ts index 1e9c7794ac68..5f0c948fb1a3 100644 --- a/packages/beacon-node/src/node/nodejs.ts +++ b/packages/beacon-node/src/node/nodejs.ts @@ -21,6 +21,7 @@ import {getApi, BeaconRestApiServer} from "../api/index.js"; import {initializeExecutionEngine, initializeExecutionBuilder} from "../execution/index.js"; import {initializeEth1ForBlockProduction} from "../eth1/index.js"; import {initCKZG, loadEthereumTrustedSetup, TrustedFileMode} from "../util/kzg.js"; +import {NodeId} from "../network/subnets/interface.js"; import {IBeaconNodeOptions} from "./options.js"; import {runNodeNotifier} from "./notifier.js"; @@ -49,6 +50,7 @@ export type BeaconNodeInitModules = { logger: LoggerNode; processShutdownCallback: ProcessShutdownCallback; peerId: PeerId; + nodeId: NodeId; peerStoreDir?: string; anchorState: BeaconStateAllForks; wsCheckpoint?: phase0.Checkpoint; @@ -146,6 +148,7 @@ export class BeaconNode { logger, processShutdownCallback, peerId, + nodeId, peerStoreDir, anchorState, wsCheckpoint, @@ -195,6 +198,7 @@ export class BeaconNode { : null; const chain = new BeaconChain(opts.chain, { + nodeId, config, db, logger: logger.child({module: LoggerModule.chain}), @@ -231,6 +235,7 @@ export class BeaconNode { chain, db, peerId, + nodeId, peerStoreDir, getReqRespHandler: getReqRespHandlers({db, chain}), }); diff --git a/packages/beacon-node/src/sync/range/chain.ts b/packages/beacon-node/src/sync/range/chain.ts index 41bbce3da820..50b3bed833a2 100644 --- a/packages/beacon-node/src/sync/range/chain.ts +++ b/packages/beacon-node/src/sync/range/chain.ts @@ -2,6 +2,8 @@ import {toHexString} from "@chainsafe/ssz"; import {Epoch, Root, Slot, phase0} from "@lodestar/types"; import {ErrorAborted, Logger} from "@lodestar/utils"; import {ChainForkConfig} from "@lodestar/config"; +import {ForkName} from "@lodestar/params"; + import {BlockInput, BlockInputType} from "../../chain/blocks/types.js"; import {PeerAction} from "../../network/index.js"; import {ItTrigger} from "../../util/itTrigger.js"; @@ -404,12 +406,25 @@ export class SyncChain { const blobs = res.result.reduce((acc, blockInput) => { hasPostDenebBlocks ||= blockInput.type === BlockInputType.availableData; return hasPostDenebBlocks - ? acc + (blockInput.type === BlockInputType.availableData ? blockInput.blockData.blobs.length : 0) + ? acc + + (blockInput.type === BlockInputType.availableData && blockInput.blockData.fork === ForkName.deneb + ? blockInput.blockData.blobs.length + : 0) + : 0; + }, 0); + const dataColumns = res.result.reduce((acc, blockInput) => { + hasPostDenebBlocks ||= blockInput.type === BlockInputType.availableData; + return hasPostDenebBlocks + ? acc + + (blockInput.type === BlockInputType.availableData && blockInput.blockData.fork === ForkName.electra + ? blockInput.blockData.dataColumns.length + : 0) : 0; }, 0); + const downloadInfo = {blocks: res.result.length}; if (hasPostDenebBlocks) { - Object.assign(downloadInfo, {blobs}); + Object.assign(downloadInfo, {blobs, dataColumns}); } this.logger.debug("Downloaded batch", {id: this.logId, ...batch.getMetadata(), ...downloadInfo}); this.triggerBatchProcessor(); diff --git a/packages/beacon-node/src/sync/range/range.ts b/packages/beacon-node/src/sync/range/range.ts index d20e0c3690cd..047326b12d7c 100644 --- a/packages/beacon-node/src/sync/range/range.ts +++ b/packages/beacon-node/src/sync/range/range.ts @@ -11,6 +11,7 @@ import {RangeSyncType, rangeSyncTypes, getRangeSyncTarget} from "../utils/remote import {PeerIdStr} from "../../util/peerId.js"; import {ImportBlockOpts, AttestationImportOpt} from "../../chain/blocks/index.js"; import {beaconBlocksMaybeBlobsByRange} from "../../network/reqresp/beaconBlocksMaybeBlobsByRange.js"; +import {NodeId} from "../../network/subnets/interface.js"; import {updateChains} from "./utils/index.js"; import {ChainTarget, SyncChainFns, SyncChain, SyncChainDebugState} from "./chain.js"; @@ -111,7 +112,7 @@ export class RangeSync extends (EventEmitter as {new (): RangeSyncEmitter}) { * A peer with a relevant STATUS message has been found, which also is advanced from us. * Add this peer to an existing chain or create a new one. The update the chains status. */ - addPeer(peerId: PeerIdStr, localStatus: phase0.Status, peerStatus: phase0.Status): void { + addPeer(nodeId: NodeId, peerId: PeerIdStr, localStatus: phase0.Status, peerStatus: phase0.Status): void { // Compute if we should do a Finalized or Head sync with this peer const {syncType, startEpoch, target} = getRangeSyncTarget(localStatus, peerStatus, this.chain.forkChoice); this.logger.debug("Sync peer joined", { diff --git a/packages/beacon-node/src/sync/sync.ts b/packages/beacon-node/src/sync/sync.ts index f7492c57da38..6a74976b832d 100644 --- a/packages/beacon-node/src/sync/sync.ts +++ b/packages/beacon-node/src/sync/sync.ts @@ -190,7 +190,7 @@ export class BeaconSync implements IBeaconSync { this.peerSyncType.set(data.peer.toString(), syncType); if (syncType === PeerSyncType.Advanced) { - this.rangeSync.addPeer(data.peer, localStatus, data.status); + this.rangeSync.addPeer(data.nodeId, data.peer, localStatus, data.status); } this.updateSyncState(); diff --git a/packages/beacon-node/src/sync/unknownBlock.ts b/packages/beacon-node/src/sync/unknownBlock.ts index 3c15b32eb8d8..33fbb9fc97ab 100644 --- a/packages/beacon-node/src/sync/unknownBlock.ts +++ b/packages/beacon-node/src/sync/unknownBlock.ts @@ -2,7 +2,7 @@ import {fromHexString, toHexString} from "@chainsafe/ssz"; import {ChainForkConfig} from "@lodestar/config"; import {Logger, pruneSetToMax} from "@lodestar/utils"; import {Root, RootHex, deneb} from "@lodestar/types"; -import {INTERVALS_PER_SLOT} from "@lodestar/params"; +import {INTERVALS_PER_SLOT, ForkName} from "@lodestar/params"; import {sleep} from "@lodestar/utils"; import {INetwork, NetworkEvent, NetworkEventData, PeerAction} from "../network/index.js"; import {PeerIdStr} from "../util/peerId.js"; @@ -17,6 +17,7 @@ import { unavailableBeaconBlobsByRoot, } from "../network/reqresp/beaconBlocksMaybeBlobsByRoot.js"; import {wrapError} from "../util/wrapError.js"; +import {getCustodyColumns} from "../util/dataColumns.js"; import {PendingBlock, PendingBlockStatus, PendingBlockType} from "./interface.js"; import {getDescendantBlocks, getAllDescendantBlocks, getUnknownAndAncestorBlocks} from "./utils/pendingBlocksTree.js"; import {SyncOptions} from "./options.js"; @@ -515,21 +516,29 @@ export class UnknownBlockSync { const shuffledPeers = shuffle(connectedPeers); let blockRootHex; - let pendingBlobs; let blobKzgCommitmentsLen; let blockRoot; + const dataMeta: Record = {}; if (unavailableBlockInput.block === null) { blockRootHex = unavailableBlockInput.blockRootHex; blockRoot = fromHexString(blockRootHex); } else { - const unavailableBlock = unavailableBlockInput.block; + const {cachedData, block: unavailableBlock} = unavailableBlockInput; blockRoot = this.config .getForkTypes(unavailableBlock.message.slot) .BeaconBlock.hashTreeRoot(unavailableBlock.message); blockRootHex = toHexString(blockRoot); blobKzgCommitmentsLen = (unavailableBlock.message.body as deneb.BeaconBlockBody).blobKzgCommitments.length; - pendingBlobs = blobKzgCommitmentsLen - unavailableBlockInput.cachedData.blobsCache.size; + + if (cachedData.fork === ForkName.deneb) { + const pendingBlobs = blobKzgCommitmentsLen - cachedData.blobsCache.size; + Object.assign(dataMeta, {pendingBlobs}); + } else if (cachedData.fork === ForkName.electra) { + const {numColumns} = getCustodyColumns(this.network.nodeId, this.config, blobKzgCommitmentsLen); + const pendingColumns = numColumns - cachedData.dataColumnsCache.size; + Object.assign(dataMeta, {pendingColumns}); + } } let lastError: Error | null = null; @@ -559,7 +568,7 @@ export class UnknownBlockSync { if (unavailableBlockInput.block === null) { this.logger.debug("Fetched NullBlockInput", {attempts: i, blockRootHex}); } else { - this.logger.debug("Fetched UnavailableBlockInput", {attempts: i, pendingBlobs, blobKzgCommitmentsLen}); + this.logger.debug("Fetched UnavailableBlockInput", {attempts: i, ...dataMeta, blobKzgCommitmentsLen}); } return {blockInput, peerIdStr: peer}; diff --git a/packages/beacon-node/src/util/blobs.ts b/packages/beacon-node/src/util/blobs.ts index 13d935ba29da..3fceb7838d04 100644 --- a/packages/beacon-node/src/util/blobs.ts +++ b/packages/beacon-node/src/util/blobs.ts @@ -1,9 +1,16 @@ import {digest as sha256Digest} from "@chainsafe/as-sha256"; import {Tree} from "@chainsafe/persistent-merkle-tree"; -import {VERSIONED_HASH_VERSION_KZG, KZG_COMMITMENT_GINDEX0, ForkName} from "@lodestar/params"; -import {deneb, ssz, allForks} from "@lodestar/types"; +import { + VERSIONED_HASH_VERSION_KZG, + KZG_COMMITMENT_GINDEX0, + KZG_COMMITMENTS_GINDEX, + ForkName, + NUMBER_OF_COLUMNS, +} from "@lodestar/params"; +import {electra, deneb, ssz, allForks} from "@lodestar/types"; import {ChainForkConfig} from "@lodestar/config"; import {signedBlockToSignedHeader} from "@lodestar/state-transition"; +import {ckzg} from "./kzg.js"; type VersionHash = Uint8Array; @@ -24,6 +31,14 @@ export function computeInclusionProof( return new Tree(bodyView.node).getSingleProof(BigInt(commitmentGindex)); } +export function computeKzgCommitmentsInclusionProof( + fork: ForkName, + body: allForks.BeaconBlockBody +): electra.KzgCommitmentsInclusionProof { + const bodyView = (ssz[fork].BeaconBlockBody as allForks.AllForksSSZTypes["BeaconBlockBody"]).toView(body); + return new Tree(bodyView.node).getSingleProof(BigInt(KZG_COMMITMENTS_GINDEX)); +} + export function computeBlobSidecars( config: ChainForkConfig, signedBlock: allForks.SignedBeaconBlock, @@ -46,3 +61,41 @@ export function computeBlobSidecars( return {index, blob, kzgCommitment, kzgProof, signedBlockHeader, kzgCommitmentInclusionProof}; }); } + +export function computeDataColumnSidecars( + config: ChainForkConfig, + signedBlock: allForks.SignedBeaconBlock, + contents: deneb.Contents & {kzgCommitmentsInclusionProof?: electra.KzgCommitmentsInclusionProof} +): electra.DataColumnSidecars { + const blobKzgCommitments = (signedBlock as deneb.SignedBeaconBlock).message.body.blobKzgCommitments; + if (blobKzgCommitments === undefined) { + throw Error("Invalid block with missing blobKzgCommitments for computeBlobSidecars"); + } + if (blobKzgCommitments.length === 0) { + return []; + } + + const signedBlockHeader = signedBlockToSignedHeader(config, signedBlock); + const fork = config.getForkName(signedBlockHeader.message.slot); + const kzgCommitmentsInclusionProof = + contents.kzgCommitmentsInclusionProof ?? computeKzgCommitmentsInclusionProof(fork, signedBlock.message.body); + const cellsAndProofs = contents.blobs.map((blob) => ckzg.computeCellsAndKzgProofs(blob)); + const dataColumnSidecars = Array.from({length: NUMBER_OF_COLUMNS}, (_, j) => { + // j'th column + const column = Array.from({length: contents.blobs.length}, (_, i) => cellsAndProofs[i][0][j]); + const kzgProofs = Array.from({length: contents.blobs.length}, (_, i) => cellsAndProofs[i][1][j]); + + const dataColumnSidecar = { + index: j, + column, + kzgCommitments: blobKzgCommitments, + kzgProofs, + signedBlockHeader, + kzgCommitmentsInclusionProof, + } as electra.DataColumnSidecar; + + return dataColumnSidecar; + }); + + return dataColumnSidecars; +} diff --git a/packages/beacon-node/src/util/dataColumns.ts b/packages/beacon-node/src/util/dataColumns.ts new file mode 100644 index 000000000000..30390aebb0fe --- /dev/null +++ b/packages/beacon-node/src/util/dataColumns.ts @@ -0,0 +1,70 @@ +import {digest} from "@chainsafe/as-sha256"; +import {NUMBER_OF_COLUMNS, DATA_COLUMN_SIDECAR_SUBNET_COUNT} from "@lodestar/params"; +import {ColumnIndex} from "@lodestar/types"; +import {ChainForkConfig} from "@lodestar/config"; +import {ssz} from "@lodestar/types"; +import {NodeId} from "../network/subnets/index.js"; + +export function getCustodyColumns( + nodeId: NodeId, + config: ChainForkConfig, + blobKzgCommitmentsLen: number +): {custodyColumns: Uint8Array; numColumns: number; custodyColumnIndexes: ColumnIndex[]} { + const custodyColumnIndexes = + blobKzgCommitmentsLen > 0 ? getCustodyColumnIndexes(nodeId, config.CUSTODY_REQUIREMENT) : []; + const numColumns = custodyColumnIndexes.length; + // custody columns map which column maps to which index in the array of columns custodied + // with zero representing it is not custodied + const custodyColumns = new Uint8Array(NUMBER_OF_COLUMNS); + let custodyAtIndex = 1; + for (const columnIndex of custodyColumnIndexes) { + custodyColumns[columnIndex] = custodyAtIndex; + custodyAtIndex++; + } + return {custodyColumns, numColumns, custodyColumnIndexes}; +} + +// optimize by having a size limited index/map +export function getCustodyColumnIndexes(nodeId: NodeId, custodySubnetCount: number): ColumnIndex[] { + const subnetIds = getCustodyColumnSubnets(nodeId, custodySubnetCount); + const columnsPerSubnet = Number(NUMBER_OF_COLUMNS / DATA_COLUMN_SIDECAR_SUBNET_COUNT); + + const columnIndexes = []; + for (const subnetId of subnetIds) { + for (let i = 0; i < columnsPerSubnet; i++) { + const columnIndex = DATA_COLUMN_SIDECAR_SUBNET_COUNT * i + subnetId; + columnIndexes.push(columnIndex); + } + } + + columnIndexes.sort((a, b) => a - b); + return columnIndexes; +} + +export function getCustodyColumnSubnets(nodeId: NodeId, custodySubnetCount: number): number[] { + const subnetIds: number[] = []; + if (custodySubnetCount > DATA_COLUMN_SIDECAR_SUBNET_COUNT) { + custodySubnetCount = DATA_COLUMN_SIDECAR_SUBNET_COUNT; + } + + let currentId = ssz.UintBn256.deserialize(nodeId); + while (subnetIds.length < custodySubnetCount) { + // could be optimized + const currentIdBytes = ssz.UintBn256.serialize(currentId); + const subnetId = Number( + ssz.UintBn64.deserialize(digest(currentIdBytes).slice(0, 8)) % BigInt(DATA_COLUMN_SIDECAR_SUBNET_COUNT) + ); + if (!subnetIds.includes(subnetId)) { + subnetIds.push(subnetId); + } + + const willOverflow = currentIdBytes.reduce((acc, elem) => acc && elem === 0xff, true); + if (willOverflow) { + currentId = BigInt(0); + } else { + currentId++; + } + } + + return subnetIds; +} diff --git a/packages/beacon-node/src/util/kzg.ts b/packages/beacon-node/src/util/kzg.ts index e20a379d62ff..0fc45eff04fe 100644 --- a/packages/beacon-node/src/util/kzg.ts +++ b/packages/beacon-node/src/util/kzg.ts @@ -19,6 +19,18 @@ export let ckzg: { computeBlobKzgProof(blob: Uint8Array, commitment: Uint8Array): Uint8Array; verifyBlobKzgProof(blob: Uint8Array, commitment: Uint8Array, proof: Uint8Array): boolean; verifyBlobKzgProofBatch(blobs: Uint8Array[], expectedKzgCommitments: Uint8Array[], kzgProofs: Uint8Array[]): boolean; + computeCells(blob: Uint8Array): Uint8Array[]; + computeCellsAndKzgProofs(blob: Uint8Array): [Uint8Array[], Uint8Array[]]; + cellsToBlob(cells: Uint8Array[]): Uint8Array; + recoverAllCells(cellIds: number[], cells: Uint8Array[]): Uint8Array[]; + verifyCellKzgProof(commitmentBytes: Uint8Array, cellId: number, cell: Uint8Array, proofBytes: Uint8Array): boolean; + verifyCellKzgProofBatch( + commitmentsBytes: Uint8Array[], + rowIndices: number[], + columnIndices: number[], + cells: Uint8Array[], + proofsBytes: Uint8Array[] + ): boolean; } = { freeTrustedSetup: ckzgNotLoaded, loadTrustedSetup: ckzgNotLoaded, @@ -26,6 +38,12 @@ export let ckzg: { computeBlobKzgProof: ckzgNotLoaded, verifyBlobKzgProof: ckzgNotLoaded, verifyBlobKzgProofBatch: ckzgNotLoaded, + computeCells: ckzgNotLoaded, + computeCellsAndKzgProofs: ckzgNotLoaded, + cellsToBlob: ckzgNotLoaded, + recoverAllCells: ckzgNotLoaded, + verifyCellKzgProof: ckzgNotLoaded, + verifyCellKzgProofBatch: ckzgNotLoaded, }; // Global variable __dirname no longer available in ES6 modules. diff --git a/packages/beacon-node/src/util/sszBytes.ts b/packages/beacon-node/src/util/sszBytes.ts index 802b9a266ab1..df0a087b482b 100644 --- a/packages/beacon-node/src/util/sszBytes.ts +++ b/packages/beacon-node/src/util/sszBytes.ts @@ -198,6 +198,26 @@ export function getSlotFromBlobSidecarSerialized(data: Uint8Array): Slot | null return getSlotFromOffset(data, SLOT_BYTES_POSITION_IN_SIGNED_BLOB_SIDECAR); } +/** + * { + index: ColumnIndex [ fixed - 8 bytes], + column: DataColumn BYTES_PER_FIELD_ELEMENT * FIELD_ELEMENTS_PER_CELL * , + kzgCommitments: denebSsz.BlobKzgCommitments, + kzgProofs: denebSsz.KZGProofs, + signedBlockHeader: phase0Ssz.SignedBeaconBlockHeader, + kzgCommitmentsInclusionProof: KzgCommitmentsInclusionProof, + } + */ + +const SLOT_BYTES_POSITION_IN_SIGNED_DATA_COLUMN_SIDECAR = 20; +export function getSlotFromDataColumnSidecarSerialized(data: Uint8Array): Slot | null { + if (data.length < SLOT_BYTES_POSITION_IN_SIGNED_DATA_COLUMN_SIDECAR + SLOT_SIZE) { + return null; + } + + return getSlotFromOffset(data, SLOT_BYTES_POSITION_IN_SIGNED_DATA_COLUMN_SIDECAR); +} + function getSlotFromOffset(data: Uint8Array, offset: number): Slot { // TODO: Optimize const dv = new DataView(data.buffer, data.byteOffset, data.byteLength); diff --git a/packages/beacon-node/test/unit/db/api/repositories/dataColumn.test.ts b/packages/beacon-node/test/unit/db/api/repositories/dataColumn.test.ts new file mode 100644 index 000000000000..93ac6f4cde1e --- /dev/null +++ b/packages/beacon-node/test/unit/db/api/repositories/dataColumn.test.ts @@ -0,0 +1,103 @@ +import {rimraf} from "rimraf"; +import {describe, it, expect, beforeEach, afterEach, beforeAll} from "vitest"; +import {ssz} from "@lodestar/types"; +import {createChainForkConfig} from "@lodestar/config"; +import {LevelDbController} from "@lodestar/db"; +import {NUMBER_OF_COLUMNS} from "@lodestar/params"; + +import { + DataColumnSidecarsRepository, + dataColumnSidecarsWrapperSsz, + DATA_COLUMN_SIDECARS_IN_WRAPPER_INDEX, + COLUMN_SIZE_IN_WRAPPER_INDEX, + CUSTODY_COLUMNS_IN_IN_WRAPPER_INDEX, +} from "../../../../../src/db/repositories/dataColumnSidecars.js"; +import {testLogger} from "../../../../utils/logger.js"; +import {computeDataColumnSidecars} from "../../../../../src/util/blobs.js"; +import {loadEthereumTrustedSetup, initCKZG} from "../../../../../src/util/kzg.js"; + +/* eslint-disable @typescript-eslint/naming-convention */ +const config = createChainForkConfig({ + ALTAIR_FORK_EPOCH: 0, + BELLATRIX_FORK_EPOCH: 0, + DENEB_FORK_EPOCH: 0, + ELECTRA_FORK_EPOCH: 0, +}); +describe("block archive repository", function () { + const testDir = "./.tmp"; + let dataColumnRepo: DataColumnSidecarsRepository; + let db: LevelDbController; + + beforeEach(async function () { + db = await LevelDbController.create({name: testDir}, {logger: testLogger()}); + dataColumnRepo = new DataColumnSidecarsRepository(config, db); + }); + afterEach(async function () { + await db.close(); + rimraf.sync(testDir); + }); + + beforeAll(async function () { + await initCKZG(); + loadEthereumTrustedSetup(); + }); + + it("should get block by parent root", async function () { + const dataColumn = ssz.electra.DataColumnSidecar.defaultValue(); + const blockRoot = ssz.phase0.BeaconBlockHeader.hashTreeRoot(dataColumn.signedBlockHeader.message); + const slot = dataColumn.signedBlockHeader.message.slot; + const blob = ssz.deneb.Blob.defaultValue(); + const commitment = ssz.deneb.KZGCommitment.defaultValue(); + const singedBlock = ssz.electra.SignedBeaconBlock.defaultValue(); + + singedBlock.message.body.blobKzgCommitments.push(commitment); + singedBlock.message.body.blobKzgCommitments.push(commitment); + singedBlock.message.body.blobKzgCommitments.push(commitment); + const dataColumnSidecars = computeDataColumnSidecars(config, singedBlock, {blobs: [blob, blob, blob]}); + for (let j = 0; j < dataColumnSidecars.length; j++) { + dataColumnSidecars[j].index = j; + } + + const blobKzgCommitmentsLen = 3; + const columnsSize = + ssz.electra.DataColumnSidecar.minSize + + blobKzgCommitmentsLen * + (ssz.electra.Cell.fixedSize + ssz.deneb.KZGCommitment.fixedSize + ssz.deneb.KZGProof.fixedSize); + + const numColumns = NUMBER_OF_COLUMNS; + const custodyColumns = new Uint8Array(numColumns); + + const writeData = { + blockRoot, + slot, + numColumns, + columnsSize, + custodyColumns, + dataColumnSidecars, + }; + + await dataColumnRepo.add(writeData); + const retrievedBinary = await dataColumnRepo.getBinary(blockRoot); + if (!retrievedBinary) throw Error("get by root returned null"); + + const retrieved = dataColumnSidecarsWrapperSsz.deserialize(retrievedBinary); + expect(dataColumnSidecarsWrapperSsz.equals(retrieved, writeData)).toBe(true); + + const retrievedColumnsSizeBytes = retrievedBinary.slice( + COLUMN_SIZE_IN_WRAPPER_INDEX, + CUSTODY_COLUMNS_IN_IN_WRAPPER_INDEX + ); + + const retrievedColumnsSize = ssz.UintNum64.deserialize(retrievedColumnsSizeBytes); + expect(retrievedColumnsSize === columnsSize).toBe(true); + const dataColumnSidecarsBytes = retrievedBinary.slice(DATA_COLUMN_SIDECARS_IN_WRAPPER_INDEX); + expect(dataColumnSidecarsBytes.length === columnsSize * numColumns).toBe(true); + + for (let j = 0; j < numColumns; j++) { + const dataColumnBytes = dataColumnSidecarsBytes.slice(j * columnsSize, (j + 1) * columnsSize); + const retrivedDataColumnSidecar = ssz.electra.DataColumnSidecar.deserialize(dataColumnBytes); + const index = retrivedDataColumnSidecar.index; + expect(j === index).toBe(true); + } + }); +}); diff --git a/packages/beacon-node/test/unit/network/gossip/topic.test.ts b/packages/beacon-node/test/unit/network/gossip/topic.test.ts index dbaa4002bfcc..430f45ee6807 100644 --- a/packages/beacon-node/test/unit/network/gossip/topic.test.ts +++ b/packages/beacon-node/test/unit/network/gossip/topic.test.ts @@ -21,6 +21,12 @@ describe("network / gossip / topic", function () { topicStr: "/eth2/46acb19a/blob_sidecar_1/ssz_snappy", }, ], + [GossipType.data_column_sidecar]: [ + { + topic: {type: GossipType.data_column_sidecar, index: 1, fork: ForkName.electra, encoding}, + topicStr: "/eth2/46acb19a/data_column_sidecar/ssz_snappy", + }, + ], [GossipType.beacon_aggregate_and_proof]: [ { topic: {type: GossipType.beacon_aggregate_and_proof, fork: ForkName.phase0, encoding}, diff --git a/packages/beacon-node/test/unit/util/dataColumn.test.ts b/packages/beacon-node/test/unit/util/dataColumn.test.ts new file mode 100644 index 000000000000..3ccbc902c75f --- /dev/null +++ b/packages/beacon-node/test/unit/util/dataColumn.test.ts @@ -0,0 +1,14 @@ +import {describe, it, expect} from "vitest"; +import {ssz} from "@lodestar/types"; + +import {getCustodyColumnIndexes} from "../../../src/util/dataColumns.js"; + +describe("custody columns", () => { + it("getCustodyColumnIndexes", async () => { + const nodeId = ssz.UintBn256.serialize( + BigInt("84065159290331321853352677657753050104170032838956724170714636178275273565505") + ); + const columnIndexs = getCustodyColumnIndexes(nodeId, 1); + expect(columnIndexs).toEqual([27, 59, 91, 123]); + }); +}); diff --git a/packages/beacon-node/test/utils/node/beacon.ts b/packages/beacon-node/test/utils/node/beacon.ts index 0163fa148102..f8fee8294907 100644 --- a/packages/beacon-node/test/utils/node/beacon.ts +++ b/packages/beacon-node/test/utils/node/beacon.ts @@ -1,3 +1,4 @@ +import crypto from "node:crypto"; import deepmerge from "deepmerge"; import tmp from "tmp"; import {PeerId} from "@libp2p/interface"; @@ -19,6 +20,7 @@ import {defaultOptions} from "../../../src/node/options.js"; import {BeaconDb} from "../../../src/db/index.js"; import {testLogger} from "../logger.js"; import {InteropStateOpts} from "../../../src/node/utils/interop/state.js"; +import {NodeId} from "../../../src/network/subnets/interface.js"; export async function getDevBeaconNode( opts: { @@ -27,15 +29,17 @@ export async function getDevBeaconNode( validatorCount?: number; logger?: LoggerNode; peerId?: PeerId; + nodeId?: NodeId; peerStoreDir?: string; anchorState?: BeaconStateAllForks; wsCheckpoint?: phase0.Checkpoint; } & InteropStateOpts ): Promise { const {params, validatorCount = 8, peerStoreDir} = opts; - let {options = {}, logger, peerId} = opts; + let {options = {}, logger, peerId, nodeId} = opts; if (!peerId) peerId = await createSecp256k1PeerId(); + if (!nodeId) nodeId = crypto.randomBytes(32); const tmpDir = tmp.dirSync({unsafeCleanup: true}); const config = createChainForkConfig({...minimalConfig, ...params}); logger = logger ?? testLogger(); @@ -94,6 +98,7 @@ export async function getDevBeaconNode( logger, processShutdownCallback: () => {}, peerId, + nodeId, peerStoreDir, anchorState, wsCheckpoint: opts.wsCheckpoint, diff --git a/packages/cli/src/cmds/beacon/handler.ts b/packages/cli/src/cmds/beacon/handler.ts index 444bbba8510e..81852b50ccca 100644 --- a/packages/cli/src/cmds/beacon/handler.ts +++ b/packages/cli/src/cmds/beacon/handler.ts @@ -35,7 +35,8 @@ const EIGHT_GB = 8 * 1024 * 1024 * 1024; * Runs a beacon node. */ export async function beaconHandler(args: BeaconArgs & GlobalArgs): Promise { - const {config, options, beaconPaths, network, version, commit, peerId, logger} = await beaconHandlerInit(args); + const {config, options, beaconPaths, network, version, commit, nodeId, peerId, logger} = + await beaconHandlerInit(args); const heapSizeLimit = getHeapStatistics().heap_size_limit; if (heapSizeLimit < EIGHT_GB) { @@ -90,6 +91,7 @@ export async function beaconHandler(args: BeaconArgs & GlobalArgs): Promise { +): Promise<{peerId: PeerId; enr: SignableENR; nodeId: Uint8Array}> { const {persistNetworkIdentity} = args; const newPeerIdAndENR = async (): Promise<{peerId: PeerId; enr: SignableENR}> => { @@ -181,14 +188,16 @@ export async function initPeerIdAndEnr( const enrFile = path.join(beaconDir, "enr"); const peerIdFile = path.join(beaconDir, "peer-id.json"); const {peerId, enr, newEnr} = await readPersistedPeerIdAndENR(peerIdFile, enrFile); - overwriteEnrWithCliArgs(enr, args, logger, {newEnr, bootnode}); + overwriteEnrWithCliArgs(config, enr, args, logger, {newEnr, bootnode}); // Re-persist peer-id and enr writeFile600Perm(peerIdFile, exportToJSON(peerId)); writeFile600Perm(enrFile, enr.encodeTxt()); - return {peerId, enr}; + const nodeId = fromHex(enr.nodeId); + return {peerId, enr, nodeId}; } else { const {peerId, enr} = await newPeerIdAndENR(); - overwriteEnrWithCliArgs(enr, args, logger, {newEnr: true, bootnode}); - return {peerId, enr}; + overwriteEnrWithCliArgs(config, enr, args, logger, {newEnr: true, bootnode}); + const nodeId = fromHex(enr.nodeId); + return {peerId, enr, nodeId}; } } diff --git a/packages/cli/src/cmds/bootnode/handler.ts b/packages/cli/src/cmds/bootnode/handler.ts index 77851b1fcb88..990bce624551 100644 --- a/packages/cli/src/cmds/bootnode/handler.ts +++ b/packages/cli/src/cmds/bootnode/handler.ts @@ -181,7 +181,7 @@ export async function bootnodeHandlerInit(args: BootnodeArgs & GlobalArgs) { ); const logger = initLogger(args, beaconPaths.dataDir, config, "bootnode.log"); - const {peerId, enr} = await initPeerIdAndEnr(args as unknown as BeaconArgs, bootnodeDir, logger, true); + const {peerId, enr} = await initPeerIdAndEnr(config, args as unknown as BeaconArgs, bootnodeDir, logger, true); return {discv5Args, metricsArgs, bootnodeDir, network, version, commit, peerId, enr, logger}; } diff --git a/packages/config/src/chainConfig/configs/mainnet.ts b/packages/config/src/chainConfig/configs/mainnet.ts index 0de1bee666ec..eed180b4a7ea 100644 --- a/packages/config/src/chainConfig/configs/mainnet.ts +++ b/packages/config/src/chainConfig/configs/mainnet.ts @@ -102,4 +102,8 @@ export const chainConfig: ChainConfig = { // Deneb // `2**12` (= 4096 epochs, ~18 days) MIN_EPOCHS_FOR_BLOB_SIDECARS_REQUESTS: 4096, + + // Electra + SAMPLES_PER_SLOT: 8, + CUSTODY_REQUIREMENT: 1, }; diff --git a/packages/config/src/chainConfig/configs/minimal.ts b/packages/config/src/chainConfig/configs/minimal.ts index c99a76d1ee40..978161ed01d0 100644 --- a/packages/config/src/chainConfig/configs/minimal.ts +++ b/packages/config/src/chainConfig/configs/minimal.ts @@ -100,4 +100,8 @@ export const chainConfig: ChainConfig = { // Deneb // `2**12` (= 4096 epochs, ~18 days) MIN_EPOCHS_FOR_BLOB_SIDECARS_REQUESTS: 4096, + + // Electra + SAMPLES_PER_SLOT: 8, + CUSTODY_REQUIREMENT: 1, }; diff --git a/packages/config/src/chainConfig/types.ts b/packages/config/src/chainConfig/types.ts index 0d306aa6545a..0fec8e2a7ef8 100644 --- a/packages/config/src/chainConfig/types.ts +++ b/packages/config/src/chainConfig/types.ts @@ -72,6 +72,9 @@ export type ChainConfig = { // Networking MIN_EPOCHS_FOR_BLOB_SIDECARS_REQUESTS: number; + + SAMPLES_PER_SLOT: number; + CUSTODY_REQUIREMENT: number; }; export const chainConfigTypes: SpecTypes = { @@ -134,6 +137,9 @@ export const chainConfigTypes: SpecTypes = { // Networking MIN_EPOCHS_FOR_BLOB_SIDECARS_REQUESTS: "number", + + SAMPLES_PER_SLOT: "number", + CUSTODY_REQUIREMENT: "number", }; /** Allows values in a Spec file */ diff --git a/packages/params/src/forkName.ts b/packages/params/src/forkName.ts index bbb72a7972fa..9aefabce14cf 100644 --- a/packages/params/src/forkName.ts +++ b/packages/params/src/forkName.ts @@ -45,3 +45,9 @@ export type ForkBlobs = Exclude; export function isForkBlobs(fork: ForkName): fork is ForkBlobs { return isForkWithdrawals(fork) && fork !== ForkName.capella; } + +export type ForkPrePeerDAS = ForkPreBlobs | ForkName.deneb; +export type ForkPeerDAS = Exclude; +export function isForkPeerDAS(fork: ForkName): fork is ForkPeerDAS { + return isForkBlobs(fork) && fork !== ForkName.deneb; +} diff --git a/packages/params/src/index.ts b/packages/params/src/index.ts index 6a95e3ca632e..d2f53859bbe9 100644 --- a/packages/params/src/index.ts +++ b/packages/params/src/index.ts @@ -93,6 +93,12 @@ export const { MAX_BLOB_COMMITMENTS_PER_BLOCK, MAX_BLOBS_PER_BLOCK, KZG_COMMITMENT_INCLUSION_PROOF_DEPTH, + + FIELD_ELEMENTS_PER_CELL, + FIELD_ELEMENTS_PER_EXT_BLOB, + KZG_COMMITMENTS_INCLUSION_PROOF_DEPTH, + MAX_REQUEST_DATA_COLUMN_SIDECARS, + DATA_COLUMN_SIDECAR_SUBNET_COUNT, } = activePreset; //////////// @@ -244,3 +250,12 @@ export const KZG_COMMITMENT_SUBTREE_INDEX0 = KZG_COMMITMENT_GINDEX0 - 2 ** KZG_C // ssz.deneb.BlobSidecars.elementType.fixedSize export const BLOBSIDECAR_FIXED_SIZE = ACTIVE_PRESET === PresetName.minimal ? 131672 : 131928; + +// 128 +export const NUMBER_OF_COLUMNS = (FIELD_ELEMENTS_PER_BLOB * 2) / FIELD_ELEMENTS_PER_CELL; +export const BYTES_PER_CELL = FIELD_ELEMENTS_PER_CELL * BYTES_PER_FIELD_ELEMENT; +export const CELLS_PER_BLOB = FIELD_ELEMENTS_PER_EXT_BLOB / FIELD_ELEMENTS_PER_CELL; + +// ssz.electra.BeaconBlockBody.getPathInfo(['blobKzgCommitments']).gindex +export const KZG_COMMITMENTS_GINDEX = 27; +export const KZG_COMMITMENTS_SUBTREE_INDEX = KZG_COMMITMENTS_GINDEX - 2 ** KZG_COMMITMENTS_INCLUSION_PROOF_DEPTH; diff --git a/packages/params/src/presets/mainnet.ts b/packages/params/src/presets/mainnet.ts index 42a705a07f03..4be082d90ecc 100644 --- a/packages/params/src/presets/mainnet.ts +++ b/packages/params/src/presets/mainnet.ts @@ -118,4 +118,12 @@ export const mainnetPreset: BeaconPreset = { MAX_BLOB_COMMITMENTS_PER_BLOCK: 4096, MAX_BLOBS_PER_BLOCK: 6, KZG_COMMITMENT_INCLUSION_PROOF_DEPTH: 17, + + // ELECTRA + /////////// + FIELD_ELEMENTS_PER_CELL: 64, + FIELD_ELEMENTS_PER_EXT_BLOB: 8192, + KZG_COMMITMENTS_INCLUSION_PROOF_DEPTH: 4, + MAX_REQUEST_DATA_COLUMN_SIDECARS: 16384, + DATA_COLUMN_SIDECAR_SUBNET_COUNT: 32, }; diff --git a/packages/params/src/presets/minimal.ts b/packages/params/src/presets/minimal.ts index b940841a0429..e3ccc5909ca3 100644 --- a/packages/params/src/presets/minimal.ts +++ b/packages/params/src/presets/minimal.ts @@ -119,4 +119,12 @@ export const minimalPreset: BeaconPreset = { MAX_BLOB_COMMITMENTS_PER_BLOCK: 16, MAX_BLOBS_PER_BLOCK: 6, KZG_COMMITMENT_INCLUSION_PROOF_DEPTH: 9, + + // ELECTRA + /////////// + FIELD_ELEMENTS_PER_CELL: 64, + FIELD_ELEMENTS_PER_EXT_BLOB: 8192, + KZG_COMMITMENTS_INCLUSION_PROOF_DEPTH: 4, + MAX_REQUEST_DATA_COLUMN_SIDECARS: 16384, + DATA_COLUMN_SIDECAR_SUBNET_COUNT: 32, }; diff --git a/packages/params/src/types.ts b/packages/params/src/types.ts index 3c5ba6381131..ee4d4c8b85b2 100644 --- a/packages/params/src/types.ts +++ b/packages/params/src/types.ts @@ -82,6 +82,14 @@ export type BeaconPreset = { MAX_BLOB_COMMITMENTS_PER_BLOCK: number; MAX_BLOBS_PER_BLOCK: number; KZG_COMMITMENT_INCLUSION_PROOF_DEPTH: number; + + // ELECTRA + /////////// + FIELD_ELEMENTS_PER_CELL: number; + FIELD_ELEMENTS_PER_EXT_BLOB: number; + KZG_COMMITMENTS_INCLUSION_PROOF_DEPTH: number; + MAX_REQUEST_DATA_COLUMN_SIDECARS: number; + DATA_COLUMN_SIDECAR_SUBNET_COUNT: number; }; /** @@ -167,6 +175,14 @@ export const beaconPresetTypes: BeaconPresetTypes = { MAX_BLOB_COMMITMENTS_PER_BLOCK: "number", MAX_BLOBS_PER_BLOCK: "number", KZG_COMMITMENT_INCLUSION_PROOF_DEPTH: "number", + + // ELECTRA + /////////// + FIELD_ELEMENTS_PER_CELL: "number", + FIELD_ELEMENTS_PER_EXT_BLOB: "number", + KZG_COMMITMENTS_INCLUSION_PROOF_DEPTH: "number", + MAX_REQUEST_DATA_COLUMN_SIDECARS: "number", + DATA_COLUMN_SIDECAR_SUBNET_COUNT: "number", }; type BeaconPresetTypes = { diff --git a/packages/types/src/electra/sszTypes.ts b/packages/types/src/electra/sszTypes.ts index 30690a499845..ebd86dd482c7 100644 --- a/packages/types/src/electra/sszTypes.ts +++ b/packages/types/src/electra/sszTypes.ts @@ -1,8 +1,62 @@ -import {ContainerType} from "@chainsafe/ssz"; +import {ContainerType, ByteVectorType, ListCompositeType, VectorCompositeType, ListBasicType} from "@chainsafe/ssz"; +import { + BYTES_PER_FIELD_ELEMENT, + FIELD_ELEMENTS_PER_CELL, + MAX_BLOB_COMMITMENTS_PER_BLOCK, + NUMBER_OF_COLUMNS, + KZG_COMMITMENTS_INCLUSION_PROOF_DEPTH, + MAX_REQUEST_DATA_COLUMN_SIDECARS, +} from "@lodestar/params"; + import {ssz as primitiveSsz} from "../primitive/index.js"; +import {ssz as phase0Ssz} from "../phase0/index.js"; import {ssz as denebSsz} from "../deneb/index.js"; -const {BLSSignature} = primitiveSsz; +const {BLSSignature, Root, ColumnIndex, Bytes32, Slot, UintNum64} = primitiveSsz; + +export const Cell = new ByteVectorType(BYTES_PER_FIELD_ELEMENT * FIELD_ELEMENTS_PER_CELL); +export const DataColumn = new ListCompositeType(Cell, MAX_BLOB_COMMITMENTS_PER_BLOCK); +export const ExtendedMatrix = new ListCompositeType(Cell, MAX_BLOB_COMMITMENTS_PER_BLOCK * NUMBER_OF_COLUMNS); +export const KzgCommitmentsInclusionProof = new VectorCompositeType(Bytes32, KZG_COMMITMENTS_INCLUSION_PROOF_DEPTH); + +export const DataColumnSidecar = new ContainerType( + { + index: ColumnIndex, + column: DataColumn, + kzgCommitments: denebSsz.BlobKzgCommitments, + kzgProofs: denebSsz.KZGProofs, + signedBlockHeader: phase0Ssz.SignedBeaconBlockHeader, + kzgCommitmentsInclusionProof: KzgCommitmentsInclusionProof, + }, + {typeName: "DataColumnSidecar", jsonCase: "eth2"} +); + +export const DataColumnSidecars = new ListCompositeType(DataColumnSidecar, NUMBER_OF_COLUMNS); + +// ReqResp types +// ============= + +export const DataColumnIdentifier = new ContainerType( + { + blockRoot: Root, + index: ColumnIndex, + }, + {typeName: "DataColumnIdentifier", jsonCase: "eth2"} +); + +export const DataColumnSidecarsByRootRequest = new ListCompositeType( + DataColumnIdentifier, + MAX_REQUEST_DATA_COLUMN_SIDECARS +); + +export const DataColumnSidecarsByRangeRequest = new ContainerType( + { + startSlot: Slot, + count: UintNum64, + columns: new ListBasicType(ColumnIndex, NUMBER_OF_COLUMNS), + }, + {typeName: "DataColumnSidecarsByRangeRequest", jsonCase: "eth2"} +); export const ExecutionPayload = new ContainerType( { diff --git a/packages/types/src/electra/types.ts b/packages/types/src/electra/types.ts index 198259eed1dd..a59bb4e673fa 100644 --- a/packages/types/src/electra/types.ts +++ b/packages/types/src/electra/types.ts @@ -1,7 +1,17 @@ import {ValueOf} from "@chainsafe/ssz"; import * as ssz from "./sszTypes.js"; -export type BlobSidecar = ValueOf; +export type Cell = ValueOf; +export type DataColumn = ValueOf; +export type ExtendedMatrix = ValueOf; +export type KzgCommitmentsInclusionProof = ValueOf; +export type DataColumnSidecar = ValueOf; +export type DataColumnSidecars = ValueOf; + +export type DataColumnIdentifier = ValueOf; +export type DataColumnSidecarsByRootRequest = ValueOf; +export type DataColumnSidecarsByRangeRequest = ValueOf; + export type ExecutionPayloadAndBlobsBundle = ValueOf; export type ExecutionPayload = ValueOf; diff --git a/packages/types/src/primitive/sszTypes.ts b/packages/types/src/primitive/sszTypes.ts index 068a32e2cc17..88193d2902fe 100644 --- a/packages/types/src/primitive/sszTypes.ts +++ b/packages/types/src/primitive/sszTypes.ts @@ -63,3 +63,4 @@ export const BLSSignature = Bytes96; export const Domain = Bytes32; export const ParticipationFlags = new UintNumberType(1, {setBitwiseOR: true}); export const ExecutionAddress = new ExecutionAddressType(); +export const ColumnIndex = UintNum64; diff --git a/packages/types/src/primitive/types.ts b/packages/types/src/primitive/types.ts index 53422cc9b995..90e7eadb178e 100644 --- a/packages/types/src/primitive/types.ts +++ b/packages/types/src/primitive/types.ts @@ -47,3 +47,4 @@ export type ExecutionAddress = Bytes20; export type RootHex = string; /** Non-spec type to signal time is represented in seconds */ export type TimeSeconds = number; +export type ColumnIndex = UintNum64; diff --git a/packages/validator/src/util/params.ts b/packages/validator/src/util/params.ts index 0afede39b951..2e2066992898 100644 --- a/packages/validator/src/util/params.ts +++ b/packages/validator/src/util/params.ts @@ -222,5 +222,17 @@ function getSpecCriticalParams(localConfig: ChainConfig): Record