diff --git a/packages/beacon-node/package.json b/packages/beacon-node/package.json index ee17ff0c0c08..c6202f3178f9 100644 --- a/packages/beacon-node/package.json +++ b/packages/beacon-node/package.json @@ -133,7 +133,7 @@ "@lodestar/utils": "^1.19.0", "@lodestar/validator": "^1.19.0", "@multiformats/multiaddr": "^12.1.3", - "c-kzg": "^2.1.2", + "c-kzg": "matthewkeil/c-kzg-4844#67bf9367817f0fa5ebd390aeb8c3ae88bdbc170e", "datastore-core": "^9.1.1", "datastore-level": "^10.1.1", "deepmerge": "^4.3.1", diff --git a/packages/beacon-node/src/api/impl/beacon/blocks/index.ts b/packages/beacon-node/src/api/impl/beacon/blocks/index.ts index 613ffeaacfdd..ac81dd403ff2 100644 --- a/packages/beacon-node/src/api/impl/beacon/blocks/index.ts +++ b/packages/beacon-node/src/api/impl/beacon/blocks/index.ts @@ -1,9 +1,9 @@ import {routes} from "@lodestar/api"; import {ApplicationMethods} from "@lodestar/api/server"; import {computeEpochAtSlot, computeTimeAtSlot, reconstructFullBlockOrContents} from "@lodestar/state-transition"; -import {SLOTS_PER_HISTORICAL_ROOT} from "@lodestar/params"; +import {SLOTS_PER_HISTORICAL_ROOT, ForkName} from "@lodestar/params"; import {sleep, fromHex, toHex} from "@lodestar/utils"; -import {allForks, deneb, isSignedBlockContents, ProducedBlockSource} from "@lodestar/types"; +import {allForks, deneb, electra, isSignedBlockContents, ProducedBlockSource} from "@lodestar/types"; import { BlockSource, getBlockInput, @@ -11,10 +11,13 @@ import { BlockInput, BlobsSource, BlockInputDataBlobs, + BlockInputDataDataColumns, + DataColumnsSource, + BlockInputData, } from "../../../../chain/blocks/types.js"; import {promiseAllMaybeAsync} from "../../../../util/promises.js"; import {isOptimisticBlock} from "../../../../util/forkChoice.js"; -import {computeBlobSidecars} from "../../../../util/blobs.js"; +import {computeBlobSidecars, computeDataColumnSidecars} from "../../../../util/blobs.js"; import {BlockError, BlockErrorCode, BlockGossipError} from "../../../../chain/errors/index.js"; import {OpSource} from "../../../../metrics/validatorMonitor.js"; import {NetworkEvent} from "../../../../network/index.js"; @@ -53,17 +56,40 @@ export function getBeaconBlockApi({ opts: PublishBlockOpts = {} ) => { const seenTimestampSec = Date.now() / 1000; - let blockForImport: BlockInput, signedBlock: allForks.SignedBeaconBlock, blobSidecars: deneb.BlobSidecars; + let blockForImport: BlockInput, + signedBlock: allForks.SignedBeaconBlock, + blobSidecars: deneb.BlobSidecars, + dataColumnSidecars: electra.DataColumnSidecars; if (isSignedBlockContents(signedBlockOrContents)) { ({signedBlock} = signedBlockOrContents); - blobSidecars = computeBlobSidecars(config, signedBlock, signedBlockOrContents); - const blockData = { - fork: config.getForkName(signedBlock.message.slot), - blobs: blobSidecars, - blobsSource: BlobsSource.api, - blobsBytes: blobSidecars.map(() => null), - } as BlockInputDataBlobs; + const fork = config.getForkName(signedBlock.message.slot); + let blockData: BlockInputData; + if (fork === ForkName.electra) { + dataColumnSidecars = computeDataColumnSidecars(config, signedBlock, signedBlockOrContents); + blockData = { + fork, + dataColumnsLen: dataColumnSidecars.length, + // custodyColumns is a 1 based index of ith column present in dataColumns[custodyColumns[i-1]] + dataColumnsIndex: new Uint8Array(Array.from({length: dataColumnSidecars.length}, (_, j) => 1 + j)), + dataColumns: dataColumnSidecars, + dataColumnsBytes: dataColumnSidecars.map(() => null), + dataColumnsSource: DataColumnsSource.api, + } as BlockInputDataDataColumns; + blobSidecars = []; + } else if (fork === ForkName.deneb) { + blobSidecars = computeBlobSidecars(config, signedBlock, signedBlockOrContents); + blockData = { + fork, + blobs: blobSidecars, + blobsSource: BlobsSource.api, + blobsBytes: blobSidecars.map(() => null), + } as BlockInputDataBlobs; + dataColumnSidecars = []; + } else { + throw Error(`Invalid data fork=${fork} for publish`); + } + blockForImport = getBlockInput.availableData( config, signedBlock, @@ -75,6 +101,7 @@ export function getBeaconBlockApi({ } else { signedBlock = signedBlockOrContents; blobSidecars = []; + dataColumnSidecars = []; blockForImport = getBlockInput.preData(config, signedBlock, BlockSource.api, context?.sszBytes ?? null); } @@ -209,6 +236,7 @@ export function getBeaconBlockApi({ // b) they might require more hops to reach recipients in peerDAS kind of setup where // blobs might need to hop between nodes because of partial subnet subscription ...blobSidecars.map((blobSidecar) => () => network.publishBlobSidecar(blobSidecar)), + ...dataColumnSidecars.map((dataColumnSidecar) => () => network.publishDataColumnSidecar(dataColumnSidecar)), () => network.publishBeaconBlock(signedBlock) as Promise, () => // there is no rush to persist block since we published it to gossip anyway diff --git a/packages/beacon-node/src/chain/blocks/importBlock.ts b/packages/beacon-node/src/chain/blocks/importBlock.ts index 9c467c26ca50..f18745561add 100644 --- a/packages/beacon-node/src/chain/blocks/importBlock.ts +++ b/packages/beacon-node/src/chain/blocks/importBlock.ts @@ -1,6 +1,6 @@ import {toHexString} from "@chainsafe/ssz"; import {capella, ssz, allForks, altair} from "@lodestar/types"; -import {ForkSeq, INTERVALS_PER_SLOT, MAX_SEED_LOOKAHEAD, SLOTS_PER_EPOCH} from "@lodestar/params"; +import {ForkName, ForkSeq, INTERVALS_PER_SLOT, MAX_SEED_LOOKAHEAD, SLOTS_PER_EPOCH} from "@lodestar/params"; import { CachedBeaconStateAltair, computeEpochAtSlot, @@ -113,18 +113,23 @@ export async function importBlock( // out of data range blocks and import then in forkchoice although one would not be able to // attest and propose with such head similar to optimistic sync if (blockInput.type === BlockInputType.availableData) { - const {blobsSource, blobs} = blockInput.blockData; - - this.metrics?.importBlock.blobsBySource.inc({blobsSource}); - for (const blobSidecar of blobs) { - const {index, kzgCommitment} = blobSidecar; - this.emitter.emit(routes.events.EventType.blobSidecar, { - blockRoot: blockRootHex, - slot: blockSlot, - index, - kzgCommitment: toHexString(kzgCommitment), - versionedHash: toHexString(kzgCommitmentToVersionedHash(kzgCommitment)), - }); + const {blockData} = blockInput; + if (blockData.fork === ForkName.deneb) { + const {blobsSource, blobs} = blockData; + + this.metrics?.importBlock.blobsBySource.inc({blobsSource}); + for (const blobSidecar of blobs) { + const {index, kzgCommitment} = blobSidecar; + this.emitter.emit(routes.events.EventType.blobSidecar, { + blockRoot: blockRootHex, + slot: blockSlot, + index, + kzgCommitment: toHexString(kzgCommitment), + versionedHash: toHexString(kzgCommitmentToVersionedHash(kzgCommitment)), + }); + } + } else if (blockData.fork === ForkName.electra) { + // TODO peerDAS build and emit the event for the datacolumns } } }); diff --git a/packages/beacon-node/src/chain/blocks/types.ts b/packages/beacon-node/src/chain/blocks/types.ts index 2996bac7887f..d1722bd3b89b 100644 --- a/packages/beacon-node/src/chain/blocks/types.ts +++ b/packages/beacon-node/src/chain/blocks/types.ts @@ -1,6 +1,6 @@ import {CachedBeaconStateAllForks, computeEpochAtSlot} from "@lodestar/state-transition"; import {MaybeValidExecutionStatus, DataAvailabilityStatus} from "@lodestar/fork-choice"; -import {allForks, deneb, Slot, RootHex} from "@lodestar/types"; +import {allForks, deneb, Slot, RootHex, electra, ColumnIndex} from "@lodestar/types"; import {ForkSeq, ForkName} from "@lodestar/params"; import {ChainForkConfig} from "@lodestar/config"; @@ -29,23 +29,45 @@ export enum BlobsSource { byRoot = "req_resp_by_root", } +export enum DataColumnsSource { + gossip = "gossip", + api = "api", + byRange = "req_resp_by_range", + byRoot = "req_resp_by_root", +} + export enum GossipedInputType { block = "block", blob = "blob", + dataColumn = "dataColumn", } -type BlobsCacheMap = Map; +export type BlobsCacheMap = Map; +export type DataColumnsCacheMap = Map< + number, + {dataColumnSidecar: electra.DataColumnSidecar; dataColumnBytes: Uint8Array | null} +>; type ForkBlobsInfo = {fork: ForkName.deneb}; type BlobsData = {blobs: deneb.BlobSidecars; blobsBytes: (Uint8Array | null)[]; blobsSource: BlobsSource}; export type BlockInputDataBlobs = ForkBlobsInfo & BlobsData; -export type BlockInputData = BlockInputDataBlobs; -export type BlockInputBlobs = {blobs: deneb.BlobSidecars; blobsBytes: (Uint8Array | null)[]; blobsSource: BlobsSource}; -type Availability = {availabilityPromise: Promise; resolveAvailability: (data: T) => void}; +type ForkDataColumnsInfo = {fork: ForkName.electra}; +type DataColumnsData = { + // marker of that columns are to be custodied + dataColumnsLen: number; + dataColumnsIndex: Uint8Array; + dataColumns: electra.DataColumnSidecars; + dataColumnsBytes: (Uint8Array | null)[]; + dataColumnsSource: DataColumnsSource; +}; +export type BlockInputDataDataColumns = ForkDataColumnsInfo & DataColumnsData; +export type BlockInputData = BlockInputDataBlobs | BlockInputDataDataColumns; +type Availability = {availabilityPromise: Promise; resolveAvailability: (data: T) => void}; type CachedBlobs = {blobsCache: BlobsCacheMap} & Availability; -export type CachedData = ForkBlobsInfo & CachedBlobs; +type CachedDataColumns = {dataColumnsCache: DataColumnsCacheMap} & Availability; +export type CachedData = (ForkBlobsInfo & CachedBlobs) | (ForkDataColumnsInfo & CachedDataColumns); export type BlockInput = {block: allForks.SignedBeaconBlock; source: BlockSource; blockBytes: Uint8Array | null} & ( | {type: BlockInputType.preData | BlockInputType.outOfRangeData} @@ -161,6 +183,26 @@ export function getBlockInputBlobs(blobsCache: BlobsCacheMap): Omit { + const dataColumns = []; + const dataColumnsBytes = []; + + for (const index of columnIndexes) { + const dataColumnCache = dataColumnsCache.get(index); + if (dataColumnCache === undefined) { + // check if the index is correct as per the custody columns + throw Error(`Missing dataColumnCache at index=${index}`); + } + const {dataColumnSidecar, dataColumnBytes} = dataColumnCache; + dataColumns.push(dataColumnSidecar); + dataColumnsBytes.push(dataColumnBytes); + } + return {dataColumns, dataColumnsBytes}; +} + export enum AttestationImportOpt { Skip, Force, diff --git a/packages/beacon-node/src/chain/blocks/verifyBlocksDataAvailability.ts b/packages/beacon-node/src/chain/blocks/verifyBlocksDataAvailability.ts index 8393c91063de..9147ecbd82d1 100644 --- a/packages/beacon-node/src/chain/blocks/verifyBlocksDataAvailability.ts +++ b/packages/beacon-node/src/chain/blocks/verifyBlocksDataAvailability.ts @@ -3,10 +3,19 @@ import {DataAvailabilityStatus} from "@lodestar/fork-choice"; import {ChainForkConfig} from "@lodestar/config"; import {deneb, UintNum64} from "@lodestar/types"; import {Logger} from "@lodestar/utils"; +import {ForkName} from "@lodestar/params"; import {BlockError, BlockErrorCode} from "../errors/index.js"; import {validateBlobSidecars} from "../validation/blobSidecar.js"; +import {validateDataColumnsSidecars} from "../validation/dataColumnSidecar.js"; import {Metrics} from "../../metrics/metrics.js"; -import {BlockInput, BlockInputType, ImportBlockOpts, BlobSidecarValidation, getBlockInput} from "./types.js"; +import { + BlockInput, + BlockInputType, + ImportBlockOpts, + BlobSidecarValidation, + getBlockInput, + BlockInputData, +} from "./types.js"; // we can now wait for full 12 seconds because unavailable block sync will try pulling // the blobs from the network anyway after 500ms of seeing the block @@ -88,27 +97,37 @@ async function maybeValidateBlobs( // run full validation const {block} = blockInput; const blockSlot = block.message.slot; - - const blobsData = - blockInput.type === BlockInputType.availableData - ? blockInput.blockData - : await raceWithCutoff(chain, blockInput, blockInput.cachedData.availabilityPromise); - const {blobs} = blobsData; - const {blobKzgCommitments} = (block as deneb.SignedBeaconBlock).message.body; const beaconBlockRoot = chain.config.getForkTypes(blockSlot).BeaconBlock.hashTreeRoot(block.message); - - // if the blob siddecars have been individually verified then we can skip kzg proof check - // but other checks to match blobs with block data still need to be performed - const skipProofsCheck = opts.validBlobSidecars === BlobSidecarValidation.Individual; - validateBlobSidecars(blockSlot, beaconBlockRoot, blobKzgCommitments, blobs, {skipProofsCheck}); + const blockData = + blockInput.type === BlockInputType.availableData + ? blockInput.blockData + : await raceWithCutoff( + chain, + blockInput, + blockInput.cachedData.availabilityPromise as Promise + ); + + if (blockData.fork === ForkName.deneb) { + const {blobs} = blockData; + + // if the blob siddecars have been individually verified then we can skip kzg proof check + // but other checks to match blobs with block data still need to be performed + const skipProofsCheck = opts.validBlobSidecars === BlobSidecarValidation.Individual; + validateBlobSidecars(blockSlot, beaconBlockRoot, blobKzgCommitments, blobs, {skipProofsCheck}); + } else if (blockData.fork === ForkName.electra) { + const {dataColumns} = blockData; + const skipProofsCheck = opts.validBlobSidecars === BlobSidecarValidation.Individual; + // might require numColumns, custodyColumns from blockData as input to below + validateDataColumnsSidecars(blockSlot, beaconBlockRoot, blobKzgCommitments, dataColumns, {skipProofsCheck}); + } const availableBlockInput = getBlockInput.availableData( chain.config, blockInput.block, blockInput.source, blockInput.blockBytes, - blobsData + blockData ); return {dataAvailabilityStatus: DataAvailabilityStatus.Available, availableBlockInput: availableBlockInput}; } diff --git a/packages/beacon-node/src/chain/blocks/writeBlockInputToDb.ts b/packages/beacon-node/src/chain/blocks/writeBlockInputToDb.ts index b0f5ab159591..8c6c4cb4af26 100644 --- a/packages/beacon-node/src/chain/blocks/writeBlockInputToDb.ts +++ b/packages/beacon-node/src/chain/blocks/writeBlockInputToDb.ts @@ -1,4 +1,6 @@ +import {ForkName} from "@lodestar/params"; import {toHex} from "@lodestar/utils"; +import {electra, ssz} from "@lodestar/types"; import {BeaconChain} from "../chain.js"; import {BlockInput, BlockInputType} from "./types.js"; @@ -30,19 +32,44 @@ export async function writeBlockInputToDb(this: BeaconChain, blocksInput: BlockI }); if (blockInput.type === BlockInputType.availableData || blockInput.type === BlockInputType.dataPromise) { - const blobSidecars = - blockInput.type == BlockInputType.availableData - ? blockInput.blockData.blobs - : // At this point of import blobs are available and can be safely awaited - (await blockInput.cachedData.availabilityPromise).blobs; + const blockData = + blockInput.type === BlockInputType.availableData + ? blockInput.blockData + : await blockInput.cachedData.availabilityPromise; - // NOTE: Old blobs are pruned on archive - fnPromises.push(this.db.blobSidecars.add({blockRoot, slot: block.message.slot, blobSidecars})); - this.logger.debug("Persisted blobSidecars to hot DB", { - blobsLen: blobSidecars.length, - slot: block.message.slot, - root: blockRootHex, - }); + // NOTE: Old data is pruned on archive + if (blockData.fork === ForkName.deneb) { + const blobSidecars = blockData.blobs; + fnPromises.push(this.db.blobSidecars.add({blockRoot, slot: block.message.slot, blobSidecars})); + this.logger.debug("Persisted blobSidecars to hot DB", { + blobsLen: blobSidecars.length, + slot: block.message.slot, + root: blockRootHex, + }); + } else { + const {dataColumnsLen, dataColumnsIndex, dataColumns: dataColumnSidecars} = blockData; + const blobsLen = (block.message as electra.BeaconBlock).body.blobKzgCommitments.length; + + const dataColumnsSize = + ssz.electra.DataColumnSidecar.minSize + + blobsLen * (ssz.electra.Cell.fixedSize + ssz.deneb.KZGCommitment.fixedSize + ssz.deneb.KZGProof.fixedSize); + const slot = block.message.slot; + const writeData = { + blockRoot, + slot, + dataColumnsLen, + dataColumnsSize, + dataColumnsIndex, + dataColumnSidecars, + }; + fnPromises.push(this.db.dataColumnSidecars.add(writeData)); + + this.logger.debug("Persisted dataColumnSidecars to hot DB", { + dataColumnsLen: dataColumnSidecars.length, + slot: block.message.slot, + root: blockRootHex, + }); + } } } @@ -55,17 +82,35 @@ export async function writeBlockInputToDb(this: BeaconChain, blocksInput: BlockI export async function removeEagerlyPersistedBlockInputs(this: BeaconChain, blockInputs: BlockInput[]): Promise { const blockToRemove = []; const blobsToRemove = []; + const dataColumnsToRemove = []; for (const blockInput of blockInputs) { const {block, type} = blockInput; - const blockRoot = this.config.getForkTypes(block.message.slot).BeaconBlock.hashTreeRoot(block.message); + const slot = block.message.slot; + const blockRoot = this.config.getForkTypes(slot).BeaconBlock.hashTreeRoot(block.message); const blockRootHex = toHex(blockRoot); if (!this.forkChoice.hasBlockHex(blockRootHex)) { blockToRemove.push(block); if (type === BlockInputType.availableData) { - const blobSidecars = blockInput.blockData.blobs; - blobsToRemove.push({blockRoot, slot: block.message.slot, blobSidecars}); + const {blockData} = blockInput; + if (blockData.fork === ForkName.deneb) { + const blobSidecars = blockData.blobs; + blobsToRemove.push({blockRoot, slot, blobSidecars}); + } else { + const {dataColumnsLen, dataColumnsIndex, dataColumns: dataColumnSidecars} = blockData; + const blobsLen = (block.message as electra.BeaconBlock).body.blobKzgCommitments.length; + const dataColumnsSize = ssz.electra.Cell.fixedSize * blobsLen; + + dataColumnsToRemove.push({ + blockRoot, + slot, + dataColumnsLen, + dataColumnsSize, + dataColumnsIndex, + dataColumnSidecars, + }); + } } } } @@ -74,5 +119,6 @@ export async function removeEagerlyPersistedBlockInputs(this: BeaconChain, block // TODO: Batch DB operations not with Promise.all but with level db ops this.db.block.batchRemove(blockToRemove), this.db.blobSidecars.batchRemove(blobsToRemove), + this.db.dataColumnSidecars.batchRemove(dataColumnsToRemove), ]); } diff --git a/packages/beacon-node/src/chain/chain.ts b/packages/beacon-node/src/chain/chain.ts index 2f58962f3cc5..f91d9dfd3110 100644 --- a/packages/beacon-node/src/chain/chain.ts +++ b/packages/beacon-node/src/chain/chain.ts @@ -43,6 +43,8 @@ import {Clock, ClockEvent, IClock} from "../util/clock.js"; import {ensureDir, writeIfNotExist} from "../util/file.js"; import {isOptimisticBlock} from "../util/forkChoice.js"; import {BufferPool} from "../util/bufferPool.js"; +import {NodeId} from "../network/subnets/interface.js"; +import {getCustodyConfig} from "../util/dataColumns.js"; import {BlockProcessor, ImportBlockOpts} from "./blocks/index.js"; import {ChainEventEmitter, ChainEvent} from "./emitter.js"; import { @@ -140,7 +142,7 @@ export class BeaconChain implements IBeaconChain { readonly seenSyncCommitteeMessages = new SeenSyncCommitteeMessages(); readonly seenContributionAndProof: SeenContributionAndProof; readonly seenAttestationDatas: SeenAttestationDatas; - readonly seenGossipBlockInput = new SeenGossipBlockInput(); + readonly seenGossipBlockInput: SeenGossipBlockInput; // Seen cache for liveness checks readonly seenBlockAttesters = new SeenBlockAttesters(); @@ -171,6 +173,7 @@ export class BeaconChain implements IBeaconChain { constructor( opts: IChainOptions, { + nodeId, config, db, logger, @@ -182,6 +185,7 @@ export class BeaconChain implements IBeaconChain { executionEngine, executionBuilder, }: { + nodeId: NodeId; config: BeaconConfig; db: IBeaconDb; logger: Logger; @@ -227,6 +231,8 @@ export class BeaconChain implements IBeaconChain { this.seenAggregatedAttestations = new SeenAggregatedAttestations(metrics); this.seenContributionAndProof = new SeenContributionAndProof(metrics); this.seenAttestationDatas = new SeenAttestationDatas(metrics, this.opts?.attDataCacheSlotDistance); + const custodyConfig = getCustodyConfig(nodeId, config); + this.seenGossipBlockInput = new SeenGossipBlockInput(custodyConfig); this.beaconProposerCache = new BeaconProposerCache(opts); this.checkpointBalancesCache = new CheckpointBalancesCache(); diff --git a/packages/beacon-node/src/chain/errors/dataColumnSidecarError.ts b/packages/beacon-node/src/chain/errors/dataColumnSidecarError.ts new file mode 100644 index 000000000000..cc3d27e4652c --- /dev/null +++ b/packages/beacon-node/src/chain/errors/dataColumnSidecarError.ts @@ -0,0 +1,17 @@ +import {Slot, RootHex} from "@lodestar/types"; +import {GossipActionError} from "./gossipValidation.js"; + +export enum DataColumnSidecarErrorCode { + INVALID_INDEX = "DATA_COLUMN_SIDECAR_ERROR_INVALID_INDEX", + + // following errors are adapted from the block errors + FUTURE_SLOT = "DATA_COLUMN_SIDECAR_ERROR_FUTURE_SLOT", + PARENT_UNKNOWN = "DATA_COLUMN_SIDECAR_ERROR_PARENT_UNKNOWN", +} + +export type DataColumnSidecarErrorType = + | {code: DataColumnSidecarErrorCode.INVALID_INDEX; columnIndex: number; gossipIndex: number} + | {code: DataColumnSidecarErrorCode.FUTURE_SLOT; blockSlot: Slot; currentSlot: Slot} + | {code: DataColumnSidecarErrorCode.PARENT_UNKNOWN; parentRoot: RootHex}; + +export class DataColumnSidecarGossipError extends GossipActionError {} diff --git a/packages/beacon-node/src/chain/errors/index.ts b/packages/beacon-node/src/chain/errors/index.ts index 1bd8f8577305..2159b1562e2d 100644 --- a/packages/beacon-node/src/chain/errors/index.ts +++ b/packages/beacon-node/src/chain/errors/index.ts @@ -1,6 +1,7 @@ export * from "./attestationError.js"; export * from "./attesterSlashingError.js"; export * from "./blobSidecarError.js"; +export * from "./dataColumnSidecarError.js"; export * from "./blockError.js"; export * from "./gossipValidation.js"; export * from "./proposerSlashingError.js"; diff --git a/packages/beacon-node/src/chain/seenCache/seenGossipBlockInput.ts b/packages/beacon-node/src/chain/seenCache/seenGossipBlockInput.ts index 7e8d8a7ebcbc..eaf52bc1cf69 100644 --- a/packages/beacon-node/src/chain/seenCache/seenGossipBlockInput.ts +++ b/packages/beacon-node/src/chain/seenCache/seenGossipBlockInput.ts @@ -1,8 +1,8 @@ import {toHexString} from "@chainsafe/ssz"; -import {deneb, RootHex, ssz, allForks} from "@lodestar/types"; +import {deneb, RootHex, ssz, allForks, electra} from "@lodestar/types"; import {ChainForkConfig} from "@lodestar/config"; import {pruneSetToMax} from "@lodestar/utils"; -import {BLOBSIDECAR_FIXED_SIZE, isForkBlobs, ForkName} from "@lodestar/params"; +import {BLOBSIDECAR_FIXED_SIZE, isForkBlobs, ForkName, NUMBER_OF_COLUMNS} from "@lodestar/params"; import { BlockInput, @@ -14,8 +14,12 @@ import { GossipedInputType, getBlockInputBlobs, BlobsSource, + DataColumnsSource, + getBlockInputDataColumns, + BlockInputDataDataColumns, } from "../blocks/types.js"; import {Metrics} from "../../metrics/index.js"; +import {CustodyConfig} from "../../util/dataColumns.js"; export enum BlockInputAvailabilitySource { GOSSIP = "gossip", @@ -24,7 +28,12 @@ export enum BlockInputAvailabilitySource { type GossipedBlockInput = | {type: GossipedInputType.block; signedBlock: allForks.SignedBeaconBlock; blockBytes: Uint8Array | null} - | {type: GossipedInputType.blob; blobSidecar: deneb.BlobSidecar; blobBytes: Uint8Array | null}; + | {type: GossipedInputType.blob; blobSidecar: deneb.BlobSidecar; blobBytes: Uint8Array | null} + | { + type: GossipedInputType.dataColumn; + dataColumnSidecar: electra.DataColumnSidecar; + dataColumnBytes: Uint8Array | null; + }; type BlockInputCacheType = { fork: ForkName; @@ -51,6 +60,7 @@ const MAX_GOSSIPINPUT_CACHE = 5; */ export class SeenGossipBlockInput { private blockInputCache = new Map(); + constructor(private custodyConfig: CustodyConfig) {} prune(): void { pruneSetToMax(this.blockInputCache, MAX_GOSSIPINPUT_CACHE); @@ -67,11 +77,16 @@ export class SeenGossipBlockInput { ): | { blockInput: BlockInput; - blockInputMeta: {pending: GossipedInputType.blob | null; haveBlobs: number; expectedBlobs: number}; + blockInputMeta: + | {pending: GossipedInputType.blob | null; haveBlobs: number; expectedBlobs: number} + | {pending: GossipedInputType.dataColumn | null; haveColumns: number; expectedColumns: number}; } | { blockInput: NullBlockInput; - blockInputMeta: {pending: GossipedInputType.block; haveBlobs: number; expectedBlobs: null}; + blockInputMeta: {pending: GossipedInputType.block} & ( + | {haveBlobs: number; expectedBlobs: null} + | {haveColumns: number; expectedColumns: null} + ); } { let blockHex; let blockCache; @@ -88,13 +103,16 @@ export class SeenGossipBlockInput { blockCache.block = signedBlock; blockCache.blockBytes = blockBytes; - } else { + } else if (gossipedInput.type === GossipedInputType.blob) { const {blobSidecar, blobBytes} = gossipedInput; const blockRoot = ssz.phase0.BeaconBlockHeader.hashTreeRoot(blobSidecar.signedBlockHeader.message); fork = config.getForkName(blobSidecar.signedBlockHeader.message.slot); blockHex = toHexString(blockRoot); blockCache = this.blockInputCache.get(blockHex) ?? getEmptyBlockInputCacheEntry(fork); + if (blockCache.cachedData?.fork !== ForkName.deneb) { + throw Error(`blob data at non deneb fork=${blockCache.fork}`); + } // TODO: freetheblobs check if its the same blob or a duplicate and throw/take actions blockCache.cachedData?.blobsCache.set(blobSidecar.index, { @@ -102,6 +120,26 @@ export class SeenGossipBlockInput { // easily splice out the unsigned message as blob is a fixed length type blobBytes: blobBytes?.slice(0, BLOBSIDECAR_FIXED_SIZE) ?? null, }); + } else if (gossipedInput.type === GossipedInputType.dataColumn) { + const {dataColumnSidecar, dataColumnBytes} = gossipedInput; + const blockRoot = ssz.phase0.BeaconBlockHeader.hashTreeRoot(dataColumnSidecar.signedBlockHeader.message); + fork = config.getForkName(dataColumnSidecar.signedBlockHeader.message.slot); + + blockHex = toHexString(blockRoot); + blockCache = this.blockInputCache.get(blockHex) ?? getEmptyBlockInputCacheEntry(fork); + if (blockCache.cachedData?.fork !== ForkName.electra) { + throw Error(`blob data at non electra fork=${blockCache.fork}`); + } + + // TODO: freetheblobs check if its the same blob or a duplicate and throw/take actions + blockCache.cachedData?.dataColumnsCache.set(dataColumnSidecar.index, { + dataColumnSidecar, + // easily splice out the unsigned message as blob is a fixed length type + dataColumnBytes: dataColumnBytes?.slice(0, dataColumnBytes.length) ?? null, + }); + } else { + // somehow helps resolve typescript that all types have been exausted + throw Error("Invalid gossipedInput type"); } if (!this.blockInputCache.has(blockHex)) { @@ -121,72 +159,192 @@ export class SeenGossipBlockInput { if (cachedData === undefined || !isForkBlobs(cachedData.fork)) { throw Error("Missing or Invalid fork cached Data for deneb+ block"); } - const {blobsCache, resolveAvailability} = cachedData; - // block is available, check if all blobs have shown up - const {slot, body} = signedBlock.message; - const {blobKzgCommitments} = body as deneb.BeaconBlockBody; - const blockInfo = `blockHex=${blockHex}, slot=${slot}`; + if (cachedData.fork === ForkName.deneb) { + const {blobsCache} = cachedData; - if (blobKzgCommitments.length < blobsCache.size) { - throw Error( - `Received more blobs=${blobsCache.size} than commitments=${blobKzgCommitments.length} for ${blockInfo}` - ); + // block is available, check if all blobs have shown up + const {slot, body} = signedBlock.message; + const {blobKzgCommitments} = body as deneb.BeaconBlockBody; + const blockInfo = `blockHex=${blockHex}, slot=${slot}`; + + if (blobKzgCommitments.length < blobsCache.size) { + throw Error( + `Received more blobs=${blobsCache.size} than commitments=${blobKzgCommitments.length} for ${blockInfo}` + ); + } + + if (blobKzgCommitments.length === blobsCache.size) { + const allBlobs = getBlockInputBlobs(blobsCache); + metrics?.syncUnknownBlock.resolveAvailabilitySource.inc({source: BlockInputAvailabilitySource.GOSSIP}); + const {blobs} = allBlobs; + const blockData = { + fork: cachedData.fork, + ...allBlobs, + blobsSource: BlobsSource.gossip, + }; + const blockInput = getBlockInput.availableData( + config, + signedBlock, + BlockSource.gossip, + blockBytes ?? null, + blockData + ); + + resolveBlockInput(blockInput); + return { + blockInput, + blockInputMeta: {pending: null, haveBlobs: blobs.length, expectedBlobs: blobKzgCommitments.length}, + }; + } else { + const blockInput = getBlockInput.dataPromise( + config, + signedBlock, + BlockSource.gossip, + blockBytes ?? null, + cachedData + ); + + resolveBlockInput(blockInput); + return { + blockInput, + blockInputMeta: { + pending: GossipedInputType.blob, + haveBlobs: blobsCache.size, + expectedBlobs: blobKzgCommitments.length, + }, + }; + } + } else if (cachedData.fork === ForkName.electra) { + const {dataColumnsCache} = cachedData; + + // block is available, check if all blobs have shown up + const {slot} = signedBlock.message; + const blockInfo = `blockHex=${blockHex}, slot=${slot}`; + + if (NUMBER_OF_COLUMNS < dataColumnsCache.size) { + throw Error( + `Received more dataColumns=${dataColumnsCache.size} than columns=${NUMBER_OF_COLUMNS} for ${blockInfo}` + ); + } + + // get the custody columns and see if we have got all the requisite columns + const blobKzgCommitmentsLen = (signedBlock.message.body as deneb.BeaconBlockBody).blobKzgCommitments.length; + if (blobKzgCommitmentsLen === 0) { + const blockData = { + fork: cachedData.fork, + dataColumns: [], + dataColumnsBytes: [], + dataColumnsLen: 0, + dataColumnsIndex: new Uint8Array(NUMBER_OF_COLUMNS), + dataColumnsSource: DataColumnsSource.gossip, + }; + + const blockInput = getBlockInput.availableData( + config, + signedBlock, + BlockSource.gossip, + blockBytes ?? null, + blockData + ); + + resolveBlockInput(blockInput); + return { + blockInput, + blockInputMeta: {pending: null, haveColumns: 0, expectedColumns: 0}, + }; + } + + const custodyIndexesPresent = + dataColumnsCache.size >= this.custodyConfig.custodyColumnsLen && + this.custodyConfig.custodyColumns.reduce( + (acc, columnIndex) => acc && dataColumnsCache.has(columnIndex), + true + ); + + if (custodyIndexesPresent) { + const allDataColumns = getBlockInputDataColumns(dataColumnsCache, this.custodyConfig.custodyColumns); + metrics?.syncUnknownBlock.resolveAvailabilitySource.inc({source: BlockInputAvailabilitySource.GOSSIP}); + const {dataColumns} = allDataColumns; + const blockData = { + fork: cachedData.fork, + ...allDataColumns, + dataColumnsLen: this.custodyConfig.custodyColumnsLen, + dataColumnsIndex: this.custodyConfig.custodyColumnsIndex, + dataColumnsSource: DataColumnsSource.gossip, + }; + const blockInput = getBlockInput.availableData( + config, + signedBlock, + BlockSource.gossip, + blockBytes ?? null, + blockData + ); + + resolveBlockInput(blockInput); + return { + blockInput, + blockInputMeta: { + pending: null, + haveColumns: dataColumns.length, + expectedColumns: this.custodyConfig.custodyColumnsLen, + }, + }; + } else { + const blockInput = getBlockInput.dataPromise( + config, + signedBlock, + BlockSource.gossip, + blockBytes ?? null, + cachedData + ); + + resolveBlockInput(blockInput); + return { + blockInput, + blockInputMeta: { + pending: GossipedInputType.dataColumn, + haveColumns: dataColumnsCache.size, + expectedColumns: this.custodyConfig.custodyColumnsLen, + }, + }; + } + } else { + throw Error(`Invalid fork=${fork}`); + } + } else { + // will need to wait for the block to showup + if (cachedData === undefined) { + throw Error("Missing cachedData for deneb+ blobs"); } - if (blobKzgCommitments.length === blobsCache.size) { - const allBlobs = getBlockInputBlobs(blobsCache); - const blockData = {...allBlobs, blobsSource: BlobsSource.gossip, fork: cachedData.fork}; - resolveAvailability(blockData); - metrics?.syncUnknownBlock.resolveAvailabilitySource.inc({source: BlockInputAvailabilitySource.GOSSIP}); - const blockInput = getBlockInput.availableData( - config, - signedBlock, - BlockSource.gossip, - blockBytes ?? null, - blockData - ); + if (cachedData.fork === ForkName.deneb) { + const {blobsCache} = cachedData; - resolveBlockInput(blockInput); return { - blockInput, - blockInputMeta: {pending: null, haveBlobs: allBlobs.blobs.length, expectedBlobs: blobKzgCommitments.length}, + blockInput: { + block: null, + blockRootHex: blockHex, + cachedData, + blockInputPromise, + }, + blockInputMeta: {pending: GossipedInputType.block, haveBlobs: blobsCache.size, expectedBlobs: null}, }; - } else { - const blockInput = getBlockInput.dataPromise( - config, - signedBlock, - BlockSource.gossip, - blockBytes ?? null, - cachedData - ); + } else if (fork === ForkName.electra) { + const {dataColumnsCache} = cachedData; - resolveBlockInput(blockInput); return { - blockInput, - blockInputMeta: { - pending: GossipedInputType.blob, - haveBlobs: blobsCache.size, - expectedBlobs: blobKzgCommitments.length, + blockInput: { + block: null, + blockRootHex: blockHex, + cachedData, + blockInputPromise, }, + blockInputMeta: {pending: GossipedInputType.block, haveColumns: dataColumnsCache.size, expectedColumns: null}, }; + } else { + throw Error(`invalid fork=${fork} data not implemented`); } - } else { - // will need to wait for the block to showup - if (cachedData === undefined) { - throw Error("Missing cachedData for deneb+ blobs"); - } - const {blobsCache} = cachedData; - - return { - blockInput: { - block: null, - blockRootHex: blockHex, - cachedData, - blockInputPromise, - }, - blockInputMeta: {pending: GossipedInputType.block, haveBlobs: blobsCache.size, expectedBlobs: null}, - }; } } } @@ -205,16 +363,38 @@ function getEmptyBlockInputCacheEntry(fork: ForkName): BlockInputCacheType { return {fork, blockInputPromise, resolveBlockInput}; } - let resolveAvailability: ((blobs: BlockInputDataBlobs) => void) | null = null; - const availabilityPromise = new Promise((resolveCB) => { - resolveAvailability = resolveCB; - }); + if (fork === ForkName.deneb) { + let resolveAvailability: ((blobs: BlockInputDataBlobs) => void) | null = null; + const availabilityPromise = new Promise((resolveCB) => { + resolveAvailability = resolveCB; + }); - if (resolveAvailability === null) { - throw Error("Promise Constructor was not executed immediately"); - } + if (resolveAvailability === null) { + throw Error("Promise Constructor was not executed immediately"); + } + + const blobsCache = new Map(); + const cachedData: CachedData = {fork, blobsCache, availabilityPromise, resolveAvailability}; + return {fork, blockInputPromise, resolveBlockInput, cachedData}; + } else if (fork === ForkName.electra) { + let resolveAvailability: ((blobs: BlockInputDataDataColumns) => void) | null = null; + const availabilityPromise = new Promise((resolveCB) => { + resolveAvailability = resolveCB; + }); - const blobsCache = new Map(); - const cachedData: CachedData = {fork, blobsCache, availabilityPromise, resolveAvailability}; - return {fork, blockInputPromise, resolveBlockInput, cachedData}; + if (resolveAvailability === null) { + throw Error("Promise Constructor was not executed immediately"); + } + + const dataColumnsCache = new Map(); + const cachedData: CachedData = { + fork, + dataColumnsCache, + availabilityPromise, + resolveAvailability, + }; + return {fork, blockInputPromise, resolveBlockInput, cachedData}; + } else { + throw Error(`Invalid fork=${fork} for getEmptyBlockInputCacheEntry`); + } } diff --git a/packages/beacon-node/src/chain/validation/dataColumnSidecar.ts b/packages/beacon-node/src/chain/validation/dataColumnSidecar.ts new file mode 100644 index 000000000000..004a4a57c25e --- /dev/null +++ b/packages/beacon-node/src/chain/validation/dataColumnSidecar.ts @@ -0,0 +1,66 @@ +import { + KZG_COMMITMENTS_INCLUSION_PROOF_DEPTH, + KZG_COMMITMENTS_SUBTREE_INDEX, + DATA_COLUMN_SIDECAR_SUBNET_COUNT, + NUMBER_OF_COLUMNS, +} from "@lodestar/params"; +import {ssz, deneb, electra, Slot, Root} from "@lodestar/types"; +import {verifyMerkleBranch} from "@lodestar/utils"; + +import {DataColumnSidecarGossipError, DataColumnSidecarErrorCode} from "../errors/dataColumnSidecarError.js"; +import {GossipAction} from "../errors/gossipValidation.js"; +import {IBeaconChain} from "../interface.js"; + +export async function validateGossipDataColumnSidecar( + chain: IBeaconChain, + dataColumnSideCar: electra.DataColumnSidecar, + gossipIndex: number +): Promise { + const dataColumnSlot = dataColumnSideCar.signedBlockHeader.message.slot; + + if ( + dataColumnSideCar.index > NUMBER_OF_COLUMNS || + dataColumnSideCar.index % DATA_COLUMN_SIDECAR_SUBNET_COUNT !== gossipIndex + ) { + throw new DataColumnSidecarGossipError(GossipAction.REJECT, { + code: DataColumnSidecarErrorCode.INVALID_INDEX, + columnIndex: dataColumnSideCar.index, + gossipIndex, + }); + } + + // [IGNORE] The sidecar is not from a future slot (with a MAXIMUM_GOSSIP_CLOCK_DISPARITY allowance) -- + // i.e. validate that sidecar.slot <= current_slot (a client MAY queue future blocks for processing at + // the appropriate slot). + const currentSlotWithGossipDisparity = chain.clock.currentSlotWithGossipDisparity; + if (currentSlotWithGossipDisparity < dataColumnSlot) { + throw new DataColumnSidecarGossipError(GossipAction.IGNORE, { + code: DataColumnSidecarErrorCode.FUTURE_SLOT, + currentSlot: currentSlotWithGossipDisparity, + blockSlot: dataColumnSlot, + }); + } + + validateInclusionProof(dataColumnSideCar); +} + +export function validateDataColumnsSidecars( + _blockSlot: Slot, + _blockRoot: Root, + _expectedKzgCommitments: deneb.BlobKzgCommitments, + _dataColumnSidecars: electra.DataColumnSidecars, + _opts: {skipProofsCheck: boolean} = {skipProofsCheck: false} +): void { + // stubbed + return; +} + +function validateInclusionProof(dataColumnSidecar: electra.DataColumnSidecar): boolean { + return verifyMerkleBranch( + ssz.deneb.BlobKzgCommitments.hashTreeRoot(dataColumnSidecar.kzgCommitments), + dataColumnSidecar.kzgCommitmentsInclusionProof, + KZG_COMMITMENTS_INCLUSION_PROOF_DEPTH, + KZG_COMMITMENTS_SUBTREE_INDEX, + dataColumnSidecar.signedBlockHeader.message.bodyRoot + ); +} diff --git a/packages/beacon-node/src/db/beacon.ts b/packages/beacon-node/src/db/beacon.ts index 07cc47fa54d8..41b6daccd42d 100644 --- a/packages/beacon-node/src/db/beacon.ts +++ b/packages/beacon-node/src/db/beacon.ts @@ -18,6 +18,8 @@ import { BackfilledRanges, BlobSidecarsRepository, BlobSidecarsArchiveRepository, + DataColumnSidecarsRepository, + DataColumnSidecarsArchiveRepository, BLSToExecutionChangeRepository, } from "./repositories/index.js"; import {PreGenesisState, PreGenesisStateLastProcessedBlock} from "./single/index.js"; @@ -34,6 +36,8 @@ export class BeaconDb implements IBeaconDb { blobSidecars: BlobSidecarsRepository; blobSidecarsArchive: BlobSidecarsArchiveRepository; + dataColumnSidecars: DataColumnSidecarsRepository; + dataColumnSidecarsArchive: DataColumnSidecarsArchiveRepository; stateArchive: StateArchiveRepository; checkpointState: CheckpointStateRepository; @@ -67,6 +71,8 @@ export class BeaconDb implements IBeaconDb { this.blobSidecars = new BlobSidecarsRepository(config, db); this.blobSidecarsArchive = new BlobSidecarsArchiveRepository(config, db); + this.dataColumnSidecars = new DataColumnSidecarsRepository(config, db); + this.dataColumnSidecarsArchive = new DataColumnSidecarsArchiveRepository(config, db); this.stateArchive = new StateArchiveRepository(config, db); this.checkpointState = new CheckpointStateRepository(config, db); diff --git a/packages/beacon-node/src/db/buckets.ts b/packages/beacon-node/src/db/buckets.ts index 9dffd0608d52..5f6a08df18b7 100644 --- a/packages/beacon-node/src/db/buckets.ts +++ b/packages/beacon-node/src/db/buckets.ts @@ -61,6 +61,9 @@ export enum Bucket { // 54 was for bestPartialLightClientUpdate, allocate a fresh one // lightClient_bestLightClientUpdate = 55, // SyncPeriod -> LightClientUpdate // DEPRECATED on v1.5.0 lightClient_bestLightClientUpdate = 56, // SyncPeriod -> [Slot, LightClientUpdate] + + allForks_dataColumnSidecars = 57, // ELECTRA BeaconBlockRoot -> DataColumnSidecars + allForks_dataColumnSidecarsArchive = 58, // ELECTRA BeaconBlockSlot -> DataColumnSidecars } export function getBucketNameByValue(enumValue: T): keyof typeof Bucket { diff --git a/packages/beacon-node/src/db/interface.ts b/packages/beacon-node/src/db/interface.ts index 6ffb8992f635..cf55d3d95a44 100644 --- a/packages/beacon-node/src/db/interface.ts +++ b/packages/beacon-node/src/db/interface.ts @@ -16,6 +16,8 @@ import { BackfilledRanges, BlobSidecarsRepository, BlobSidecarsArchiveRepository, + DataColumnSidecarsRepository, + DataColumnSidecarsArchiveRepository, BLSToExecutionChangeRepository, } from "./repositories/index.js"; import {PreGenesisState, PreGenesisStateLastProcessedBlock} from "./single/index.js"; @@ -34,6 +36,8 @@ export interface IBeaconDb { blobSidecars: BlobSidecarsRepository; blobSidecarsArchive: BlobSidecarsArchiveRepository; + dataColumnSidecars: DataColumnSidecarsRepository; + dataColumnSidecarsArchive: DataColumnSidecarsArchiveRepository; // finalized states stateArchive: StateArchiveRepository; diff --git a/packages/beacon-node/src/db/repositories/dataColumnSidecars.ts b/packages/beacon-node/src/db/repositories/dataColumnSidecars.ts new file mode 100644 index 000000000000..fe420c7ab81d --- /dev/null +++ b/packages/beacon-node/src/db/repositories/dataColumnSidecars.ts @@ -0,0 +1,50 @@ +import {ValueOf, ContainerType, ByteVectorType} from "@chainsafe/ssz"; +import {ChainForkConfig} from "@lodestar/config"; +import {Db, Repository} from "@lodestar/db"; +import {ssz} from "@lodestar/types"; +import {NUMBER_OF_COLUMNS} from "@lodestar/params"; + +import {Bucket, getBucketNameByValue} from "../buckets.js"; + +export const dataColumnSidecarsWrapperSsz = new ContainerType( + { + blockRoot: ssz.Root, + slot: ssz.Slot, + dataColumnsLen: ssz.Uint16, + dataColumnsSize: ssz.UintNum64, + // // each byte[i] tells what index (1 based) the column i is stored, 0 means not custodied + // max value to represent will be 128 which can be represented in a byte + dataColumnsIndex: new ByteVectorType(NUMBER_OF_COLUMNS), + dataColumnSidecars: ssz.electra.DataColumnSidecars, + }, + {typeName: "DataColumnSidecarsWrapper", jsonCase: "eth2"} +); + +export type DataColumnSidecarsWrapper = ValueOf; +export const BLOCK_ROOT_IN_WRAPPER_INDEX = 0; +export const BLOCK_SLOT_IN_WRAPPER_INDEX = 32; +export const NUM_COLUMNS_IN_WRAPPER_INDEX = 40; +export const COLUMN_SIZE_IN_WRAPPER_INDEX = 42; +export const CUSTODY_COLUMNS_IN_IN_WRAPPER_INDEX = 50; +export const DATA_COLUMN_SIDECARS_IN_WRAPPER_INDEX = + CUSTODY_COLUMNS_IN_IN_WRAPPER_INDEX + NUMBER_OF_COLUMNS + 4 + 4 * NUMBER_OF_COLUMNS; + +/** + * dataColumnSidecarsWrapper by block root (= hash_tree_root(SignedBeaconBlock.message)) + * + * Used to store unfinalized DataColumnSidecars + */ +export class DataColumnSidecarsRepository extends Repository { + constructor(config: ChainForkConfig, db: Db) { + const bucket = Bucket.allForks_dataColumnSidecars; + super(config, db, bucket, dataColumnSidecarsWrapperSsz, getBucketNameByValue(bucket)); + } + + /** + * Id is hashTreeRoot of unsigned BeaconBlock + */ + getId(value: DataColumnSidecarsWrapper): Uint8Array { + const {blockRoot} = value; + return blockRoot; + } +} diff --git a/packages/beacon-node/src/db/repositories/dataColumnSidecarsArchive.ts b/packages/beacon-node/src/db/repositories/dataColumnSidecarsArchive.ts new file mode 100644 index 000000000000..08a71dcbf646 --- /dev/null +++ b/packages/beacon-node/src/db/repositories/dataColumnSidecarsArchive.ts @@ -0,0 +1,28 @@ +import {ChainForkConfig} from "@lodestar/config"; +import {Db, Repository} from "@lodestar/db"; +import {Slot} from "@lodestar/types"; +import {bytesToInt} from "@lodestar/utils"; +import {Bucket, getBucketNameByValue} from "../buckets.js"; +import {dataColumnSidecarsWrapperSsz, DataColumnSidecarsWrapper} from "./dataColumnSidecars.js"; + +/** + * dataColumnSidecarsWrapper by slot + * + * Used to store finalized DataColumnSidecars + */ +export class DataColumnSidecarsArchiveRepository extends Repository { + constructor(config: ChainForkConfig, db: Db) { + const bucket = Bucket.allForks_dataColumnSidecarsArchive; + super(config, db, bucket, dataColumnSidecarsWrapperSsz, getBucketNameByValue(bucket)); + } + + // Handle key as slot + + getId(value: DataColumnSidecarsWrapper): Slot { + return value.slot; + } + + decodeKey(data: Uint8Array): number { + return bytesToInt(super.decodeKey(data) as unknown as Uint8Array, "be"); + } +} diff --git a/packages/beacon-node/src/db/repositories/index.ts b/packages/beacon-node/src/db/repositories/index.ts index 4a66a0ba9876..72e0c7224148 100644 --- a/packages/beacon-node/src/db/repositories/index.ts +++ b/packages/beacon-node/src/db/repositories/index.ts @@ -1,5 +1,7 @@ export {BlobSidecarsRepository} from "./blobSidecars.js"; export {BlobSidecarsArchiveRepository} from "./blobSidecarsArchive.js"; +export {DataColumnSidecarsRepository} from "./dataColumnSidecars.js"; +export {DataColumnSidecarsArchiveRepository} from "./dataColumnSidecarsArchive.js"; export {BlockRepository} from "./block.js"; export {BlockArchiveRepository} from "./blockArchive.js"; diff --git a/packages/beacon-node/src/network/events.ts b/packages/beacon-node/src/network/events.ts index 45759de61073..d285f121c0de 100644 --- a/packages/beacon-node/src/network/events.ts +++ b/packages/beacon-node/src/network/events.ts @@ -1,6 +1,6 @@ import {EventEmitter} from "events"; import {PeerId, TopicValidatorResult} from "@libp2p/interface"; -import {phase0, RootHex} from "@lodestar/types"; +import {phase0, RootHex, ColumnIndex} from "@lodestar/types"; import {BlockInput, NullBlockInput} from "../chain/blocks/types.js"; import {StrictEventEmitterSingleArg} from "../util/strictEvents.js"; import {PeerIdStr} from "../util/peerId.js"; @@ -27,7 +27,7 @@ export enum NetworkEvent { } export type NetworkEventData = { - [NetworkEvent.peerConnected]: {peer: PeerIdStr; status: phase0.Status}; + [NetworkEvent.peerConnected]: {peer: PeerIdStr; status: phase0.Status; dataColumns: ColumnIndex[]}; [NetworkEvent.peerDisconnected]: {peer: PeerIdStr}; [NetworkEvent.reqRespRequest]: {request: RequestTypedContainer; peer: PeerId}; [NetworkEvent.unknownBlockParent]: {blockInput: BlockInput; peer: PeerIdStr}; diff --git a/packages/beacon-node/src/network/gossip/interface.ts b/packages/beacon-node/src/network/gossip/interface.ts index df26c2328c70..88d1d8d40776 100644 --- a/packages/beacon-node/src/network/gossip/interface.ts +++ b/packages/beacon-node/src/network/gossip/interface.ts @@ -2,7 +2,7 @@ import {Libp2p} from "libp2p"; import {Message, TopicValidatorResult} from "@libp2p/interface"; import {PeerIdStr} from "@chainsafe/libp2p-gossipsub/types"; import {ForkName} from "@lodestar/params"; -import {allForks, altair, capella, deneb, phase0, Slot} from "@lodestar/types"; +import {allForks, altair, capella, deneb, phase0, Slot, electra} from "@lodestar/types"; import {BeaconConfig} from "@lodestar/config"; import {Logger} from "@lodestar/utils"; import {IBeaconChain} from "../../chain/index.js"; @@ -13,6 +13,7 @@ import {GossipActionError} from "../../chain/errors/gossipValidation.js"; export enum GossipType { beacon_block = "beacon_block", blob_sidecar = "blob_sidecar", + data_column_sidecar = "data_column_sidecar", beacon_aggregate_and_proof = "beacon_aggregate_and_proof", beacon_attestation = "beacon_attestation", voluntary_exit = "voluntary_exit", @@ -41,6 +42,7 @@ export interface IGossipTopic { export type GossipTopicTypeMap = { [GossipType.beacon_block]: {type: GossipType.beacon_block}; [GossipType.blob_sidecar]: {type: GossipType.blob_sidecar; index: number}; + [GossipType.data_column_sidecar]: {type: GossipType.data_column_sidecar; index: number}; [GossipType.beacon_aggregate_and_proof]: {type: GossipType.beacon_aggregate_and_proof}; [GossipType.beacon_attestation]: {type: GossipType.beacon_attestation; subnet: number}; [GossipType.voluntary_exit]: {type: GossipType.voluntary_exit}; @@ -71,6 +73,7 @@ export type SSZTypeOfGossipTopic = T extends {type: infer export type GossipTypeMap = { [GossipType.beacon_block]: allForks.SignedBeaconBlock; [GossipType.blob_sidecar]: deneb.BlobSidecar; + [GossipType.data_column_sidecar]: electra.DataColumnSidecar; [GossipType.beacon_aggregate_and_proof]: phase0.SignedAggregateAndProof; [GossipType.beacon_attestation]: phase0.Attestation; [GossipType.voluntary_exit]: phase0.SignedVoluntaryExit; @@ -86,6 +89,7 @@ export type GossipTypeMap = { export type GossipFnByType = { [GossipType.beacon_block]: (signedBlock: allForks.SignedBeaconBlock) => Promise | void; [GossipType.blob_sidecar]: (blobSidecar: deneb.BlobSidecar) => Promise | void; + [GossipType.data_column_sidecar]: (blobSidecar: electra.DataColumnSidecar) => Promise | void; [GossipType.beacon_aggregate_and_proof]: (aggregateAndProof: phase0.SignedAggregateAndProof) => Promise | void; [GossipType.beacon_attestation]: (attestation: phase0.Attestation) => Promise | void; [GossipType.voluntary_exit]: (voluntaryExit: phase0.SignedVoluntaryExit) => Promise | void; diff --git a/packages/beacon-node/src/network/gossip/topic.ts b/packages/beacon-node/src/network/gossip/topic.ts index c5cd68ffa1de..7896d9abed25 100644 --- a/packages/beacon-node/src/network/gossip/topic.ts +++ b/packages/beacon-node/src/network/gossip/topic.ts @@ -7,6 +7,7 @@ import { SYNC_COMMITTEE_SUBNET_COUNT, isForkLightClient, MAX_BLOBS_PER_BLOCK, + DATA_COLUMN_SIDECAR_SUBNET_COUNT, } from "@lodestar/params"; import {GossipAction, GossipActionError, GossipErrorCode} from "../../chain/errors/gossipValidation.js"; @@ -75,6 +76,8 @@ function stringifyGossipTopicType(topic: GossipTopic): string { return `${topic.type}_${topic.subnet}`; case GossipType.blob_sidecar: return `${topic.type}_${topic.index}`; + case GossipType.data_column_sidecar: + return `${topic.type}_${topic.index}`; } } @@ -86,6 +89,8 @@ export function getGossipSSZType(topic: GossipTopic) { return ssz[topic.fork].SignedBeaconBlock; case GossipType.blob_sidecar: return ssz.deneb.BlobSidecar; + case GossipType.data_column_sidecar: + return ssz.electra.DataColumnSidecar; case GossipType.beacon_aggregate_and_proof: return ssz.phase0.SignedAggregateAndProof; case GossipType.beacon_attestation: @@ -189,6 +194,13 @@ export function parseGossipTopic(forkDigestContext: ForkDigestContext, topicStr: return {type: GossipType.blob_sidecar, index, fork, encoding}; } + if (gossipTypeStr.startsWith(GossipType.data_column_sidecar)) { + const indexStr = gossipTypeStr.slice(GossipType.data_column_sidecar.length + 1); // +1 for '_' concatenating the topic name and the index + const index = parseInt(indexStr, 10); + if (Number.isNaN(index)) throw Error(`index ${indexStr} is not a number`); + return {type: GossipType.data_column_sidecar, index, fork, encoding}; + } + throw Error(`Unknown gossip type ${gossipTypeStr}`); } catch (e) { (e as Error).message = `Invalid gossip topic ${topicStr}: ${(e as Error).message}`; @@ -212,6 +224,13 @@ export function getCoreTopicsAtFork( {type: GossipType.attester_slashing}, ]; + // After Electra also track data_column_sidecar_{index} + if (ForkSeq[fork] >= ForkSeq.electra) { + for (let index = 0; index < DATA_COLUMN_SIDECAR_SUBNET_COUNT; index++) { + topics.push({type: GossipType.data_column_sidecar, index}); + } + } + // After Deneb also track blob_sidecar_{index} if (ForkSeq[fork] >= ForkSeq.deneb) { for (let index = 0; index < MAX_BLOBS_PER_BLOCK; index++) { @@ -262,6 +281,7 @@ function parseEncodingStr(encodingStr: string): GossipEncoding { export const gossipTopicIgnoreDuplicatePublishError: Record = { [GossipType.beacon_block]: true, [GossipType.blob_sidecar]: true, + [GossipType.data_column_sidecar]: true, [GossipType.beacon_aggregate_and_proof]: true, [GossipType.beacon_attestation]: true, [GossipType.voluntary_exit]: true, diff --git a/packages/beacon-node/src/network/interface.ts b/packages/beacon-node/src/network/interface.ts index aeeb61f1feb2..65ee7295a306 100644 --- a/packages/beacon-node/src/network/interface.ts +++ b/packages/beacon-node/src/network/interface.ts @@ -16,13 +16,15 @@ import { import type {AddressManager, ConnectionManager, Registrar, TransportManager} from "@libp2p/interface-internal"; import type {Datastore} from "interface-datastore"; import {Identify} from "@chainsafe/libp2p-identify"; -import {Slot, SlotRootHex, allForks, altair, capella, deneb, phase0} from "@lodestar/types"; +import {Slot, SlotRootHex, allForks, altair, capella, deneb, phase0, electra} from "@lodestar/types"; import {PeerIdStr} from "../util/peerId.js"; +import {CustodyConfig} from "../util/dataColumns.js"; import {INetworkEventBus} from "./events.js"; import {INetworkCorePublic} from "./core/types.js"; import {GossipType} from "./gossip/interface.js"; import {PendingGossipsubMessage} from "./processor/types.js"; import {PeerAction} from "./peers/index.js"; +import {NodeId} from "./subnets/interface.js"; export type WithBytes = {data: T; bytes: Uint8Array}; @@ -36,6 +38,9 @@ export type WithBytes = {data: T; bytes: Uint8Array}; */ export interface INetwork extends INetworkCorePublic { + readonly nodeId: NodeId; + readonly peerId: PeerId; + readonly custodyConfig: CustodyConfig; readonly closed: boolean; events: INetworkEventBus; @@ -57,10 +62,19 @@ export interface INetwork extends INetworkCorePublic { ): Promise[]>; sendBlobSidecarsByRange(peerId: PeerIdStr, request: deneb.BlobSidecarsByRangeRequest): Promise; sendBlobSidecarsByRoot(peerId: PeerIdStr, request: deneb.BlobSidecarsByRootRequest): Promise; + sendDataColumnSidecarsByRange( + peerId: PeerIdStr, + request: electra.DataColumnSidecarsByRangeRequest + ): Promise; + sendDataColumnSidecarsByRoot( + peerId: PeerIdStr, + request: electra.DataColumnSidecarsByRootRequest + ): Promise; // Gossip publishBeaconBlock(signedBlock: allForks.SignedBeaconBlock): Promise; publishBlobSidecar(blobSidecar: deneb.BlobSidecar): Promise; + publishDataColumnSidecar(dataColumnSideCar: electra.DataColumnSidecar): Promise; publishBeaconAggregateAndProof(aggregateAndProof: phase0.SignedAggregateAndProof): Promise; publishBeaconAttestation(attestation: phase0.Attestation, subnet: number): Promise; publishVoluntaryExit(voluntaryExit: phase0.SignedVoluntaryExit): Promise; diff --git a/packages/beacon-node/src/network/metadata.ts b/packages/beacon-node/src/network/metadata.ts index fab220c1ebf8..6c3cd92a69d3 100644 --- a/packages/beacon-node/src/network/metadata.ts +++ b/packages/beacon-node/src/network/metadata.ts @@ -11,6 +11,7 @@ export enum ENRKey { eth2 = "eth2", attnets = "attnets", syncnets = "syncnets", + custody_subnet_count = "custody_subnet_count", } export enum SubnetType { attnets = "attnets", diff --git a/packages/beacon-node/src/network/network.ts b/packages/beacon-node/src/network/network.ts index be8bb5114d40..daedbe50a1d0 100644 --- a/packages/beacon-node/src/network/network.ts +++ b/packages/beacon-node/src/network/network.ts @@ -5,22 +5,23 @@ import {BeaconConfig} from "@lodestar/config"; import {sleep} from "@lodestar/utils"; import {LoggerNode} from "@lodestar/logger/node"; import {computeStartSlotAtEpoch, computeTimeAtSlot} from "@lodestar/state-transition"; -import {phase0, allForks, deneb, altair, Root, capella, SlotRootHex} from "@lodestar/types"; +import {phase0, allForks, deneb, altair, Root, capella, SlotRootHex, electra, ColumnIndex} from "@lodestar/types"; import {routes} from "@lodestar/api"; import {ResponseIncoming} from "@lodestar/reqresp"; -import {ForkSeq, MAX_BLOBS_PER_BLOCK} from "@lodestar/params"; +import {ForkSeq, MAX_BLOBS_PER_BLOCK, NUMBER_OF_COLUMNS, DATA_COLUMN_SIDECAR_SUBNET_COUNT} from "@lodestar/params"; import {Metrics, RegistryMetricCreator} from "../metrics/index.js"; import {IBeaconChain} from "../chain/index.js"; import {IBeaconDb} from "../db/interface.js"; import {PeerIdStr, peerIdToString} from "../util/peerId.js"; import {IClock} from "../util/clock.js"; +import {getCustodyConfig, CustodyConfig} from "../util/dataColumns.js"; import {NetworkOptions} from "./options.js"; import {WithBytes, INetwork} from "./interface.js"; import {ReqRespMethod} from "./reqresp/index.js"; import {GossipHandlers, GossipTopicMap, GossipType, GossipTypeMap} from "./gossip/index.js"; import {PeerAction, PeerScoreStats} from "./peers/index.js"; import {INetworkEventBus, NetworkEvent, NetworkEventBus, NetworkEventData} from "./events.js"; -import {CommitteeSubscription} from "./subnets/index.js"; +import {CommitteeSubscription, NodeId} from "./subnets/index.js"; import {isPublishToZeroPeersError} from "./util.js"; import {NetworkProcessor, PendingGossipsubMessage} from "./processor/index.js"; import {INetworkCore, NetworkCore, WorkerNetworkCore} from "./core/index.js"; @@ -38,6 +39,7 @@ import {getActiveForks} from "./forks.js"; type NetworkModules = { opts: NetworkOptions; peerId: PeerId; + nodeId: NodeId; config: BeaconConfig; logger: LoggerNode; chain: IBeaconChain; @@ -51,6 +53,7 @@ export type NetworkInitModules = { opts: NetworkOptions; config: BeaconConfig; peerId: PeerId; + nodeId: NodeId; peerStoreDir?: string; logger: LoggerNode; metrics: Metrics | null; @@ -71,6 +74,8 @@ export type NetworkInitModules = { */ export class Network implements INetwork { readonly peerId: PeerId; + readonly nodeId: NodeId; + readonly custodyConfig: CustodyConfig; // TODO: Make private readonly events: INetworkEventBus; @@ -87,12 +92,14 @@ export class Network implements INetwork { private readonly aggregatorTracker: AggregatorTracker; private subscribedToCoreTopics = false; - private connectedPeers = new Set(); + private connectedPeers = new Map(); private regossipBlsChangesPromise: Promise | null = null; constructor(modules: NetworkModules) { this.peerId = modules.peerId; + this.nodeId = modules.nodeId; this.config = modules.config; + this.custodyConfig = getCustodyConfig(modules.nodeId, modules.config); this.logger = modules.logger; this.chain = modules.chain; this.clock = modules.chain.clock; @@ -122,6 +129,7 @@ export class Network implements INetwork { db, gossipHandlers, peerId, + nodeId, peerStoreDir, getReqRespHandler, }: NetworkInitModules): Promise { @@ -177,6 +185,7 @@ export class Network implements INetwork { return new Network({ opts, peerId, + nodeId, config, logger, chain, @@ -245,7 +254,7 @@ export class Network implements INetwork { // REST API queries getConnectedPeers(): PeerIdStr[] { - return Array.from(this.connectedPeers.values()); + return Array.from(this.connectedPeers.keys()); } getConnectedPeerCount(): number { return this.connectedPeers.size; @@ -304,6 +313,20 @@ export class Network implements INetwork { }); } + async publishDataColumnSidecar(dataColumnSidecar: electra.DataColumnSidecar): Promise { + const slot = dataColumnSidecar.signedBlockHeader.message.slot; + const fork = this.config.getForkName(slot); + const index = dataColumnSidecar.index % DATA_COLUMN_SIDECAR_SUBNET_COUNT; + + return this.publishGossip( + {type: GossipType.data_column_sidecar, fork, index}, + dataColumnSidecar, + { + ignoreDuplicatePublishError: true, + } + ); + } + async publishBeaconAggregateAndProof(aggregateAndProof: phase0.SignedAggregateAndProof): Promise { const fork = this.config.getForkName(aggregateAndProof.message.aggregate.data.slot); return this.publishGossip( @@ -504,6 +527,29 @@ export class Network implements INetwork { ); } + async sendDataColumnSidecarsByRange( + peerId: PeerIdStr, + request: electra.DataColumnSidecarsByRangeRequest + ): Promise { + return collectMaxResponseTyped( + this.sendReqRespRequest(peerId, ReqRespMethod.DataColumnSidecarsByRange, [Version.V1], request), + // request's count represent the slots, so the actual max count received could be slots * blobs per slot + request.count * NUMBER_OF_COLUMNS, + responseSszTypeByMethod[ReqRespMethod.DataColumnSidecarsByRange] + ); + } + + async sendDataColumnSidecarsByRoot( + peerId: PeerIdStr, + request: electra.DataColumnSidecarsByRootRequest + ): Promise { + return collectMaxResponseTyped( + this.sendReqRespRequest(peerId, ReqRespMethod.DataColumnSidecarsByRoot, [Version.V1], request), + request.length, + responseSszTypeByMethod[ReqRespMethod.DataColumnSidecarsByRoot] + ); + } + private sendReqRespRequest( peerId: PeerIdStr, method: ReqRespMethod, @@ -618,7 +664,7 @@ export class Network implements INetwork { }; private onPeerConnected = (data: NetworkEventData[NetworkEvent.peerConnected]): void => { - this.connectedPeers.add(data.peer); + this.connectedPeers.set(data.peer, data.dataColumns); }; private onPeerDisconnected = (data: NetworkEventData[NetworkEvent.peerDisconnected]): void => { diff --git a/packages/beacon-node/src/network/peers/discover.ts b/packages/beacon-node/src/network/peers/discover.ts index 1cb084846f61..bd1dcd38a1f4 100644 --- a/packages/beacon-node/src/network/peers/discover.ts +++ b/packages/beacon-node/src/network/peers/discover.ts @@ -1,16 +1,20 @@ import {Multiaddr} from "@multiformats/multiaddr"; import type {PeerId, PeerInfo} from "@libp2p/interface"; import {ENR} from "@chainsafe/enr"; +import {fromHexString, toHexString} from "@chainsafe/ssz"; import {BeaconConfig} from "@lodestar/config"; import {pruneSetToMax, sleep} from "@lodestar/utils"; import {ATTESTATION_SUBNET_COUNT, SYNC_COMMITTEE_SUBNET_COUNT} from "@lodestar/params"; import {LoggerNode} from "@lodestar/logger/node"; +import {ssz} from "@lodestar/types"; import {NetworkCoreMetrics} from "../core/metrics.js"; import {Libp2p} from "../interface.js"; import {ENRKey, SubnetType} from "../metadata.js"; import {getConnectionsMap, prettyPrintPeerId} from "../util.js"; import {Discv5Worker} from "../discv5/index.js"; import {LodestarDiscv5Opts} from "../discv5/types.js"; +import {NodeId} from "../subnets/interface.js"; +import {getCustodyColumnSubnets} from "../../util/dataColumns.js"; import {deserializeEnrSubnets, zeroAttnets, zeroSyncnets} from "./utils/enrSubnetsDeserialize.js"; import {IPeerRpcScoreStore, ScoreState} from "./score/index.js"; @@ -19,6 +23,8 @@ const MAX_CACHED_ENRS = 100; /** Max age a cached ENR will be considered for dial */ const MAX_CACHED_ENR_AGE_MS = 5 * 60 * 1000; +const MAX_CACHED_NODEIDS = 10000; + export type PeerDiscoveryOpts = { maxPeers: number; discv5FirstQueryDelayMs: number; @@ -76,6 +82,7 @@ type CachedENR = { multiaddrTCP: Multiaddr; subnets: Record; addedUnixMs: number; + custodySubnetCount: number; }; /** @@ -85,11 +92,15 @@ type CachedENR = { export class PeerDiscovery { readonly discv5: Discv5Worker; private libp2p: Libp2p; + private nodeId: NodeId; + private custodySubnets: number[]; private peerRpcScores: IPeerRpcScoreStore; private metrics: NetworkCoreMetrics | null; private logger: LoggerNode; private config: BeaconConfig; private cachedENRs = new Map(); + private peerIdToNodeId = new Map(); + private peerIdToCustodySubnetCount = new Map(); private randomNodeQuery: QueryStatus = {code: QueryStatusCode.NotActive}; private peersToConnect = 0; private subnetRequests: Record> = { @@ -112,6 +123,10 @@ export class PeerDiscovery { this.logger = logger; this.config = config; this.discv5 = discv5; + this.nodeId = fromHexString(ENR.decodeTxt(opts.discv5.enr).nodeId); + // we will only connect to peers that can provide us custody + this.custodySubnets = getCustodyColumnSubnets(this.nodeId, config.CUSTODY_REQUIREMENT); + this.maxPeers = opts.maxPeers; this.discv5StartMs = 0; this.discv5StartMs = Date.now(); @@ -304,7 +319,9 @@ export class PeerDiscovery { const attnets = zeroAttnets; const syncnets = zeroSyncnets; - const status = this.handleDiscoveredPeer(id, multiaddrs[0], attnets, syncnets); + const custodySubnetCount = 0; + + const status = this.handleDiscoveredPeer(id, multiaddrs[0], attnets, syncnets, custodySubnetCount); this.metrics?.discovery.discoveredStatus.inc({status}); }; @@ -317,6 +334,11 @@ export class PeerDiscovery { } // async due to some crypto that's no longer necessary const peerId = await enr.peerId(); + + const nodeId = fromHexString(enr.nodeId); + this.peerIdToNodeId.set(peerId.toString(), nodeId); + pruneSetToMax(this.peerIdToNodeId, MAX_CACHED_NODEIDS); + // tcp multiaddr is known to be be present, checked inside the worker const multiaddrTCP = enr.getLocationMultiaddr(ENRKey.tcp); if (!multiaddrTCP) { @@ -327,6 +349,7 @@ export class PeerDiscovery { // Are this fields mandatory? const attnetsBytes = enr.kvs.get(ENRKey.attnets); // 64 bits const syncnetsBytes = enr.kvs.get(ENRKey.syncnets); // 4 bits + const custodySubnetCountBytes = enr.kvs.get(ENRKey.custody_subnet_count); // 64 bits // Use faster version than ssz's implementation that leverages pre-cached. // Some nodes don't serialize the bitfields properly, encoding the syncnets as attnets, @@ -334,8 +357,10 @@ export class PeerDiscovery { // never throw and treat too long or too short bitfields as zero-ed const attnets = attnetsBytes ? deserializeEnrSubnets(attnetsBytes, ATTESTATION_SUBNET_COUNT) : zeroAttnets; const syncnets = syncnetsBytes ? deserializeEnrSubnets(syncnetsBytes, SYNC_COMMITTEE_SUBNET_COUNT) : zeroSyncnets; + const custodySubnetCount = custodySubnetCountBytes ? ssz.UintNum64.deserialize(custodySubnetCountBytes) : 1; + this.peerIdToCustodySubnetCount.set(peerId.toString(), custodySubnetCount); - const status = this.handleDiscoveredPeer(peerId, multiaddrTCP, attnets, syncnets); + const status = this.handleDiscoveredPeer(peerId, multiaddrTCP, attnets, syncnets, custodySubnetCount); this.metrics?.discovery.discoveredStatus.inc({status}); }; @@ -346,8 +371,11 @@ export class PeerDiscovery { peerId: PeerId, multiaddrTCP: Multiaddr, attnets: boolean[], - syncnets: boolean[] + syncnets: boolean[], + custodySubnetCount: number ): DiscoveredPeerStatus { + const nodeId = this.peerIdToNodeId.get(peerId.toString()); + this.logger.warn("handleDiscoveredPeer", {nodeId: nodeId ? toHexString(nodeId) : null, peerId: peerId.toString()}); try { // Check if peer is not banned or disconnected if (this.peerRpcScores.getScoreState(peerId) !== ScoreState.Healthy) { @@ -374,6 +402,7 @@ export class PeerDiscovery { multiaddrTCP, subnets: {attnets, syncnets}, addedUnixMs: Date.now(), + custodySubnetCount, }; // Only dial peer if necessary @@ -394,6 +423,27 @@ export class PeerDiscovery { } private shouldDialPeer(peer: CachedENR): boolean { + const nodeId = this.peerIdToNodeId.get(peer.peerId.toString()); + if (nodeId === undefined) { + return false; + } + const peerCustodySubnetCount = peer.custodySubnetCount; + const peerCustodySubnets = getCustodyColumnSubnets(nodeId, peerCustodySubnetCount); + const hasAllColumns = this.custodySubnets.reduce((acc, elem) => acc && peerCustodySubnets.includes(elem), true); + + this.logger.debug("peerCustodySubnets", { + peerId: peer.peerId.toString(), + peerNodeId: toHexString(nodeId), + hasAllColumns, + peerCustodySubnetCount, + peerCustodySubnets: peerCustodySubnets.join(","), + custodySubnets: this.custodySubnets.join(","), + nodeId: `${toHexString(this.nodeId)}`, + }); + if (!hasAllColumns) { + return false; + } + for (const type of [SubnetType.attnets, SubnetType.syncnets]) { for (const [subnet, {toUnixMs, peersToConnect}] of this.subnetRequests[type].entries()) { if (toUnixMs < Date.now() || peersToConnect === 0) { diff --git a/packages/beacon-node/src/network/peers/peerManager.ts b/packages/beacon-node/src/network/peers/peerManager.ts index 5149b129c363..fd107af97d67 100644 --- a/packages/beacon-node/src/network/peers/peerManager.ts +++ b/packages/beacon-node/src/network/peers/peerManager.ts @@ -1,5 +1,5 @@ import {Connection, PeerId} from "@libp2p/interface"; -import {BitArray} from "@chainsafe/ssz"; +import {BitArray, toHexString} from "@chainsafe/ssz"; import {SYNC_COMMITTEE_SUBNET_COUNT} from "@lodestar/params"; import {BeaconConfig} from "@lodestar/config"; import {allForks, altair, phase0} from "@lodestar/types"; @@ -17,6 +17,7 @@ import {Eth2Gossipsub} from "../gossip/gossipsub.js"; import {StatusCache} from "../statusCache.js"; import {NetworkCoreMetrics} from "../core/metrics.js"; import {LodestarDiscv5Opts} from "../discv5/types.js"; +import {getCustodyColumns} from "../../util/dataColumns.js"; import {PeerDiscovery, SubnetDiscvQueryMs} from "./discover.js"; import {PeersData, PeerData} from "./peersData.js"; import {getKnownClientFromAgentVersion, ClientKind} from "./client.js"; @@ -376,7 +377,15 @@ export class PeerManager { peerData.relevantStatus = RelevantPeerStatus.relevant; } if (getConnection(this.libp2p, peer.toString())) { - this.networkEventBus.emit(NetworkEvent.peerConnected, {peer: peer.toString(), status}); + const nodeId = peerData?.nodeId ?? this.discovery?.["peerIdToNodeId"].get(peer.toString()); + const custodySubnetCount = + peerData?.custodySubnetCount ?? this.discovery?.["peerIdToCustodySubnetCount"].get(peer.toString()); + this.logger.warn("onStatus", {nodeId: nodeId ? toHexString(nodeId) : undefined, peerId: peer.toString()}); + + if (nodeId !== undefined && custodySubnetCount !== undefined) { + const dataColumns = getCustodyColumns(nodeId, custodySubnetCount); + this.networkEventBus.emit(NetworkEvent.peerConnected, {peer: peer.toString(), status, dataColumns}); + } } } @@ -586,6 +595,8 @@ export class PeerManager { // NOTE: libp2p may emit two "peer:connect" events: One for inbound, one for outbound // If that happens, it's okay. Only the "outbound" connection triggers immediate action const now = Date.now(); + const nodeId = this.discovery?.["peerIdToNodeId"].get(remotePeer.toString()) ?? null; + const custodySubnetCount = this.discovery?.["peerIdToCustodySubnetCount"].get(remotePeer.toString()) ?? null; const peerData: PeerData = { lastReceivedMsgUnixTsMs: direction === "outbound" ? 0 : now, // If inbound, request after STATUS_INBOUND_GRACE_PERIOD @@ -593,11 +604,13 @@ export class PeerManager { connectedUnixTsMs: now, relevantStatus: RelevantPeerStatus.Unknown, direction, + nodeId, peerId: remotePeer, metadata: null, agentVersion: null, agentClient: null, encodingPreference: null, + custodySubnetCount, }; this.connectedPeers.set(remotePeer.toString(), peerData); diff --git a/packages/beacon-node/src/network/peers/peersData.ts b/packages/beacon-node/src/network/peers/peersData.ts index 4f96548c73e4..3ea776917f73 100644 --- a/packages/beacon-node/src/network/peers/peersData.ts +++ b/packages/beacon-node/src/network/peers/peersData.ts @@ -1,6 +1,7 @@ import {PeerId} from "@libp2p/interface"; import {altair} from "@lodestar/types"; import {Encoding} from "@lodestar/reqresp"; +import {NodeId} from "../subnets/interface.js"; import {ClientKind} from "./client.js"; type PeerIdStr = string; @@ -18,10 +19,12 @@ export type PeerData = { relevantStatus: RelevantPeerStatus; direction: "inbound" | "outbound"; peerId: PeerId; + nodeId: NodeId | null; metadata: altair.Metadata | null; agentVersion: string | null; agentClient: ClientKind | null; encodingPreference: Encoding | null; + custodySubnetCount: number | null; }; /** diff --git a/packages/beacon-node/src/network/processor/extractSlotRootFns.ts b/packages/beacon-node/src/network/processor/extractSlotRootFns.ts index d31cb3e2d7f9..2c8d7462b121 100644 --- a/packages/beacon-node/src/network/processor/extractSlotRootFns.ts +++ b/packages/beacon-node/src/network/processor/extractSlotRootFns.ts @@ -6,6 +6,7 @@ import { getSlotFromSignedAggregateAndProofSerialized, getSlotFromBlobSidecarSerialized, getSlotFromSignedBeaconBlockSerialized, + getSlotFromDataColumnSidecarSerialized, } from "../../util/sszBytes.js"; import {GossipType} from "../gossip/index.js"; import {ExtractSlotRootFns} from "./types.js"; @@ -45,6 +46,14 @@ export function createExtractBlockSlotRootFns(): ExtractSlotRootFns { [GossipType.blob_sidecar]: (data: Uint8Array): SlotOptionalRoot | null => { const slot = getSlotFromBlobSidecarSerialized(data); + if (slot === null) { + return null; + } + return {slot}; + }, + [GossipType.data_column_sidecar]: (data: Uint8Array): SlotOptionalRoot | null => { + const slot = getSlotFromDataColumnSidecarSerialized(data); + if (slot === null) { return null; } diff --git a/packages/beacon-node/src/network/processor/gossipHandlers.ts b/packages/beacon-node/src/network/processor/gossipHandlers.ts index 1a71cc7de334..36fa294e6610 100644 --- a/packages/beacon-node/src/network/processor/gossipHandlers.ts +++ b/packages/beacon-node/src/network/processor/gossipHandlers.ts @@ -1,7 +1,7 @@ import {toHexString} from "@chainsafe/ssz"; import {BeaconConfig, ChainForkConfig} from "@lodestar/config"; import {LogLevel, Logger, prettyBytes} from "@lodestar/utils"; -import {Root, Slot, ssz, allForks, deneb, UintNum64} from "@lodestar/types"; +import {Root, Slot, ssz, allForks, deneb, UintNum64, electra} from "@lodestar/types"; import {ForkName, ForkSeq} from "@lodestar/params"; import {routes} from "@lodestar/api"; import {computeTimeAtSlot} from "@lodestar/state-transition"; @@ -15,6 +15,8 @@ import { BlockGossipError, BlobSidecarErrorCode, BlobSidecarGossipError, + DataColumnSidecarGossipError, + DataColumnSidecarErrorCode, GossipAction, GossipActionError, SyncCommitteeError, @@ -46,6 +48,7 @@ import {PeerAction} from "../peers/index.js"; import {validateLightClientFinalityUpdate} from "../../chain/validation/lightClientFinalityUpdate.js"; import {validateLightClientOptimisticUpdate} from "../../chain/validation/lightClientOptimisticUpdate.js"; import {validateGossipBlobSidecar} from "../../chain/validation/blobSidecar.js"; +import {validateGossipDataColumnSidecar} from "../../chain/validation/dataColumnSidecar.js"; import { BlockInput, GossipedInputType, @@ -252,6 +255,74 @@ function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandler } } + async function validateBeaconDataColumn( + dataColumnSidecar: electra.DataColumnSidecar, + dataColumnBytes: Uint8Array, + gossipIndex: number, + peerIdStr: string, + seenTimestampSec: number + ): Promise { + const dataColumnBlockHeader = dataColumnSidecar.signedBlockHeader.message; + const slot = dataColumnBlockHeader.slot; + const blockRoot = ssz.phase0.BeaconBlockHeader.hashTreeRoot(dataColumnBlockHeader); + const blockHex = prettyBytes(blockRoot); + + const delaySec = chain.clock.secFromSlot(slot, seenTimestampSec); + const recvToValLatency = Date.now() / 1000 - seenTimestampSec; + + const {blockInput, blockInputMeta} = chain.seenGossipBlockInput.getGossipBlockInput( + config, + { + type: GossipedInputType.dataColumn, + dataColumnSidecar, + dataColumnBytes, + }, + metrics + ); + + try { + await validateGossipDataColumnSidecar(chain, dataColumnSidecar, gossipIndex); + const recvToValidation = Date.now() / 1000 - seenTimestampSec; + const validationTime = recvToValidation - recvToValLatency; + + metrics?.gossipBlob.recvToValidation.observe(recvToValidation); + metrics?.gossipBlob.validationTime.observe(validationTime); + + logger.debug("Received gossip dataColumn", { + slot: slot, + root: blockHex, + curentSlot: chain.clock.currentSlot, + peerId: peerIdStr, + delaySec, + gossipIndex, + ...blockInputMeta, + recvToValLatency, + recvToValidation, + validationTime, + }); + + return blockInput; + } catch (e) { + if (e instanceof DataColumnSidecarGossipError) { + // Don't trigger this yet if full block and blobs haven't arrived yet + if (e.type.code === DataColumnSidecarErrorCode.PARENT_UNKNOWN && blockInput.block !== null) { + logger.debug("Gossip dataColumn has error", {slot, root: blockHex, code: e.type.code}); + events.emit(NetworkEvent.unknownBlockParent, {blockInput, peer: peerIdStr}); + } + + if (e.action === GossipAction.REJECT) { + chain.persistInvalidSszValue( + ssz.electra.DataColumnSidecar, + dataColumnSidecar, + `gossip_reject_slot_${slot}_index_${dataColumnSidecar.index}` + ); + } + } + + throw e; + } + } + function handleValidBeaconBlock(blockInput: BlockInput, peerIdStr: string, seenTimestampSec: number): void { const signedBlock = blockInput.block; @@ -408,6 +479,63 @@ function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandler } }, + [GossipType.data_column_sidecar]: async ({ + gossipData, + topic, + peerIdStr, + seenTimestampSec, + }: GossipHandlerParamGeneric) => { + const {serializedData} = gossipData; + const dataColumnSidecar = sszDeserialize(topic, serializedData); + const blobSlot = dataColumnSidecar.signedBlockHeader.message.slot; + const index = dataColumnSidecar.index; + + if (config.getForkSeq(blobSlot) < ForkSeq.deneb) { + throw new GossipActionError(GossipAction.REJECT, {code: "PRE_DENEB_BLOCK"}); + } + const blockInput = await validateBeaconDataColumn( + dataColumnSidecar, + serializedData, + topic.index, + peerIdStr, + seenTimestampSec + ); + if (blockInput.block !== null) { + // we can just queue up the blockInput in the processor, but block gossip handler would have already + // queued it up. + // + // handleValidBeaconBlock(blockInput, peerIdStr, seenTimestampSec); + } else { + // wait for the block to arrive till some cutoff else emit unknownBlockInput event + chain.logger.debug("Block not yet available, racing with cutoff", {blobSlot, index}); + const normalBlockInput = await raceWithCutoff( + chain, + blobSlot, + blockInput.blockInputPromise, + BLOCK_AVAILABILITY_CUTOFF_MS + ).catch((_e) => { + return null; + }); + + if (normalBlockInput !== null) { + chain.logger.debug("Block corresponding to blob is now available for processing", {blobSlot, index}); + // we can directly send it for processing but block gossip handler will queue it up anyway + // if we see any issues later, we can send it to handleValidBeaconBlock + // + // handleValidBeaconBlock(normalBlockInput, peerIdStr, seenTimestampSec); + // + // however we can emit the event which will atleast add the peer to the list of peers to pull + // data from + if (normalBlockInput.type === BlockInputType.dataPromise) { + events.emit(NetworkEvent.unknownBlockInput, {blockInput: normalBlockInput, peer: peerIdStr}); + } + } else { + chain.logger.debug("Block not available till BLOCK_AVAILABILITY_CUTOFF_MS", {blobSlot, index}); + events.emit(NetworkEvent.unknownBlockInput, {blockInput, peer: peerIdStr}); + } + } + }, + [GossipType.beacon_aggregate_and_proof]: async ({ gossipData, topic, diff --git a/packages/beacon-node/src/network/processor/gossipQueues/index.ts b/packages/beacon-node/src/network/processor/gossipQueues/index.ts index 366b23b30679..8d38e0e1e14a 100644 --- a/packages/beacon-node/src/network/processor/gossipQueues/index.ts +++ b/packages/beacon-node/src/network/processor/gossipQueues/index.ts @@ -39,6 +39,11 @@ const defaultGossipQueueOpts: { type: QueueType.FIFO, dropOpts: {type: DropType.count, count: 1}, }, + [GossipType.data_column_sidecar]: { + maxLength: 4096, + type: QueueType.FIFO, + dropOpts: {type: DropType.count, count: 1}, + }, // lighthoue has aggregate_queue 4096 and unknown_block_aggregate_queue 1024, we use single queue [GossipType.beacon_aggregate_and_proof]: { maxLength: 5120, diff --git a/packages/beacon-node/src/network/processor/index.ts b/packages/beacon-node/src/network/processor/index.ts index 9a1dcfb32fa0..2a88054b1498 100644 --- a/packages/beacon-node/src/network/processor/index.ts +++ b/packages/beacon-node/src/network/processor/index.ts @@ -66,6 +66,7 @@ type WorkOpts = { const executeGossipWorkOrderObj: Record = { [GossipType.beacon_block]: {bypassQueue: true}, [GossipType.blob_sidecar]: {bypassQueue: true}, + [GossipType.data_column_sidecar]: {bypassQueue: true}, [GossipType.beacon_aggregate_and_proof]: {}, [GossipType.voluntary_exit]: {}, [GossipType.bls_to_execution_change]: {}, @@ -268,7 +269,12 @@ export class NetworkProcessor { }); return; } - if (slot === clockSlot && (topicType === GossipType.beacon_block || topicType === GossipType.blob_sidecar)) { + if ( + slot === clockSlot && + (topicType === GossipType.beacon_block || + topicType === GossipType.blob_sidecar || + topicType === GossipType.data_column_sidecar) + ) { // in the worse case if the current slot block is not valid, this will be reset in the next slot this.isProcessingCurrentSlotBlock = true; } diff --git a/packages/beacon-node/src/network/reqresp/ReqRespBeaconNode.ts b/packages/beacon-node/src/network/reqresp/ReqRespBeaconNode.ts index cfe13b527183..2a0f8bb4e199 100644 --- a/packages/beacon-node/src/network/reqresp/ReqRespBeaconNode.ts +++ b/packages/beacon-node/src/network/reqresp/ReqRespBeaconNode.ts @@ -253,6 +253,13 @@ export class ReqRespBeaconNode extends ReqResp { ); } + if (ForkSeq[fork] >= ForkSeq.electra) { + protocolsAtFork.push( + [protocols.DataColumnSidecarsByRoot(this.config), this.getHandler(ReqRespMethod.DataColumnSidecarsByRoot)], + [protocols.DataColumnSidecarsByRange(this.config), this.getHandler(ReqRespMethod.DataColumnSidecarsByRange)] + ); + } + return protocolsAtFork; } diff --git a/packages/beacon-node/src/network/reqresp/beaconBlocksMaybeBlobsByRange.ts b/packages/beacon-node/src/network/reqresp/beaconBlocksMaybeBlobsByRange.ts index ff5689a7b8c3..94ce085b062c 100644 --- a/packages/beacon-node/src/network/reqresp/beaconBlocksMaybeBlobsByRange.ts +++ b/packages/beacon-node/src/network/reqresp/beaconBlocksMaybeBlobsByRange.ts @@ -1,11 +1,20 @@ import {ChainForkConfig} from "@lodestar/config"; -import {deneb, Epoch, phase0, allForks, Slot} from "@lodestar/types"; -import {ForkSeq} from "@lodestar/params"; +import {deneb, Epoch, phase0, allForks, Slot, electra} from "@lodestar/types"; +import {ForkSeq, NUMBER_OF_COLUMNS} from "@lodestar/params"; import {computeEpochAtSlot} from "@lodestar/state-transition"; -import {BlobsSource, BlockInput, BlockSource, getBlockInput, BlockInputDataBlobs} from "../../chain/blocks/types.js"; +import { + BlobsSource, + BlockInput, + BlockSource, + getBlockInput, + BlockInputDataBlobs, + BlockInputDataDataColumns, + DataColumnsSource, +} from "../../chain/blocks/types.js"; import {PeerIdStr} from "../../util/peerId.js"; import {INetwork, WithBytes} from "../interface.js"; +import {CustodyConfig} from "../../util/dataColumns.js"; export async function beaconBlocksMaybeBlobsByRange( config: ChainForkConfig, @@ -30,20 +39,44 @@ export async function beaconBlocksMaybeBlobsByRange( ); } + const forkSeq = config.getForkSeq(startSlot); + // Note: Assumes all blocks in the same epoch - if (config.getForkSeq(startSlot) < ForkSeq.deneb) { + if (forkSeq < ForkSeq.deneb) { const blocks = await network.sendBeaconBlocksByRange(peerId, request); return blocks.map((block) => getBlockInput.preData(config, block.data, BlockSource.byRange, block.bytes)); } // Only request blobs if they are recent enough else if (computeEpochAtSlot(startSlot) >= currentEpoch - config.MIN_EPOCHS_FOR_BLOB_SIDECARS_REQUESTS) { - const [allBlocks, allBlobSidecars] = await Promise.all([ - network.sendBeaconBlocksByRange(peerId, request), - network.sendBlobSidecarsByRange(peerId, request), - ]); + if (forkSeq < ForkSeq.electra) { + const [allBlocks, allBlobSidecars] = await Promise.all([ + network.sendBeaconBlocksByRange(peerId, request), + network.sendBlobSidecarsByRange(peerId, request), + ]); + + return matchBlockWithBlobs(config, allBlocks, allBlobSidecars, endSlot, BlockSource.byRange, BlobsSource.byRange); + } else { + const {custodyConfig} = network; + // get columns + const {custodyColumns: columns} = custodyConfig; + const dataColumnRequest = {...request, columns}; + const [allBlocks, allDataColumnSidecars] = await Promise.all([ + network.sendBeaconBlocksByRange(peerId, request), + network.sendDataColumnSidecarsByRange(peerId, dataColumnRequest), + ]); - return matchBlockWithBlobs(config, allBlocks, allBlobSidecars, endSlot, BlockSource.byRange, BlobsSource.byRange); + return matchBlockWithDataColumns( + peerId, + config, + custodyConfig, + allBlocks, + allDataColumnSidecars, + endSlot, + BlockSource.byRange, + DataColumnsSource.byRange + ); + } } // Post Deneb but old blobs @@ -125,3 +158,111 @@ export function matchBlockWithBlobs( } return blockInputs; } + +export function matchBlockWithDataColumns( + peerId: PeerIdStr, + config: ChainForkConfig, + custodyConfig: CustodyConfig, + allBlocks: WithBytes[], + allDataColumnSidecars: electra.DataColumnSidecar[], + endSlot: Slot, + blockSource: BlockSource, + dataColumnsSource: DataColumnsSource +): BlockInput[] { + const blockInputs: BlockInput[] = []; + let dataColumnSideCarIndex = 0; + let lastMatchedSlot = -1; + + // Match dataColumnSideCar with the block as some blocks would have no dataColumns and hence + // would be omitted from the response. If there are any inconsitencies in the + // response, the validations during import will reject the block and hence this + // entire segment. + // + // Assuming that the blocks and blobs will come in same sorted order + for (let i = 0; i < allBlocks.length; i++) { + const block = allBlocks[i]; + + const forkSeq = config.getForkSeq(block.data.message.slot); + if (forkSeq < ForkSeq.electra) { + throw Error(`Invalid block forkSeq=${forkSeq} < ForSeq.electra for matchBlockWithDataColumns`); + } else { + const dataColumnSidecars: electra.DataColumnSidecar[] = []; + let dataColumnSidecar: electra.DataColumnSidecar; + while ( + (dataColumnSidecar = allDataColumnSidecars[dataColumnSideCarIndex])?.signedBlockHeader.message.slot === + block.data.message.slot + ) { + dataColumnSidecars.push(dataColumnSidecar); + lastMatchedSlot = block.data.message.slot; + dataColumnSideCarIndex++; + } + + const blobKzgCommitmentsLen = (block.data.message.body as deneb.BeaconBlockBody).blobKzgCommitments.length; + if (blobKzgCommitmentsLen === 0) { + if (dataColumnSidecars.length > 0) { + throw Error( + `Missing or mismatching dataColumnSidecars from peerId=${peerId} for blockSlot=${block.data.message.slot} with blobKzgCommitmentsLen=0 dataColumnSidecars=${dataColumnSidecars.length}>0` + ); + } + + const blockData = { + fork: config.getForkName(block.data.message.slot), + dataColumnsLen: 0, + dataColumnsIndex: new Uint8Array(NUMBER_OF_COLUMNS), + dataColumns: [], + dataColumnsBytes: [], + dataColumnsSource, + } as BlockInputDataDataColumns; + blockInputs.push(getBlockInput.availableData(config, block.data, blockSource, null, blockData)); + } else { + // Quick inspect how many blobSidecars was expected + const {custodyColumns: columnIndexes, custodyColumnsLen, custodyColumnsIndex} = custodyConfig; + const dataColumnIndexes = dataColumnSidecars.map((dataColumnSidecar) => dataColumnSidecar.index); + const custodyIndexesPresent = columnIndexes.reduce( + (acc, columnIndex) => acc && dataColumnIndexes.includes(columnIndex), + true + ); + + if ( + dataColumnSidecars.length < custodyColumnsLen || + dataColumnSidecars.length > NUMBER_OF_COLUMNS || + !custodyIndexesPresent + ) { + throw Error( + `Missing or mismatching dataColumnSidecars from peerId=${peerId} for blockSlot=${block.data.message.slot} with numColumns=${columnIndexes.length} dataColumnSidecars=${dataColumnSidecars.length} custodyIndexesPresent=${custodyIndexesPresent} received dataColumnIndexes=${dataColumnIndexes.join(",")} expected=${columnIndexes.join(",")}` + ); + } + + const blockData = { + fork: config.getForkName(block.data.message.slot), + dataColumnsLen: custodyColumnsLen, + dataColumnsIndex: custodyColumnsIndex, + dataColumns: dataColumnSidecars, + dataColumnsSource, + dataColumnsBytes: Array.from({length: dataColumnSidecars.length}, () => null), + } as BlockInputDataDataColumns; + + // TODO DENEB: instead of null, pass payload in bytes + blockInputs.push(getBlockInput.availableData(config, block.data, blockSource, null, blockData)); + } + } + } + + // If there are still unconsumed blobs this means that the response was inconsistent + // and matching was wrong and hence we should throw error + if ( + allDataColumnSidecars[dataColumnSideCarIndex] !== undefined && + // If there are no data columns, the data columns request can give 1 block outside the requested range + allDataColumnSidecars[dataColumnSideCarIndex].signedBlockHeader.message.slot <= endSlot + ) { + throw Error( + `Unmatched blobSidecars, blocks=${allBlocks.length}, blobs=${ + allDataColumnSidecars.length + } lastMatchedSlot=${lastMatchedSlot}, pending blobSidecars slots=${allDataColumnSidecars + .slice(dataColumnSideCarIndex) + .map((blb) => blb.signedBlockHeader.message.slot) + .join(",")}` + ); + } + return blockInputs; +} diff --git a/packages/beacon-node/src/network/reqresp/beaconBlocksMaybeBlobsByRoot.ts b/packages/beacon-node/src/network/reqresp/beaconBlocksMaybeBlobsByRoot.ts index 2b802ab1edd9..ec5ac60fd599 100644 --- a/packages/beacon-node/src/network/reqresp/beaconBlocksMaybeBlobsByRoot.ts +++ b/packages/beacon-node/src/network/reqresp/beaconBlocksMaybeBlobsByRoot.ts @@ -1,7 +1,7 @@ import {fromHexString} from "@chainsafe/ssz"; import {ChainForkConfig} from "@lodestar/config"; -import {phase0, deneb} from "@lodestar/types"; -import {ForkSeq} from "@lodestar/params"; +import {phase0, deneb, electra} from "@lodestar/types"; +import {ForkName, ForkSeq, NUMBER_OF_COLUMNS} from "@lodestar/params"; import { BlockInput, BlockInputType, @@ -11,12 +11,15 @@ import { NullBlockInput, BlobsSource, BlockInputDataBlobs, + DataColumnsSource, + getBlockInputDataColumns, + BlockInputDataDataColumns, } from "../../chain/blocks/types.js"; import {PeerIdStr} from "../../util/peerId.js"; import {INetwork} from "../interface.js"; import {BlockInputAvailabilitySource} from "../../chain/seenCache/seenGossipBlockInput.js"; import {Metrics} from "../../metrics/index.js"; -import {matchBlockWithBlobs} from "./beaconBlocksMaybeBlobsByRange.js"; +import {matchBlockWithBlobs, matchBlockWithDataColumns} from "./beaconBlocksMaybeBlobsByRange.js"; export async function beaconBlocksMaybeBlobsByRoot( config: ChainForkConfig, @@ -25,31 +28,86 @@ export async function beaconBlocksMaybeBlobsByRoot( request: phase0.BeaconBlocksByRootRequest ): Promise { const allBlocks = await network.sendBeaconBlocksByRoot(peerId, request); + const preDataBlocks = []; + const blobsDataBlocks = []; + const dataColumnsDataBlocks = []; + const blobIdentifiers: deneb.BlobIdentifier[] = []; + const dataColumnIdentifiers: electra.DataColumnIdentifier[] = []; for (const block of allBlocks) { const slot = block.data.message.slot; const blockRoot = config.getForkTypes(slot).BeaconBlock.hashTreeRoot(block.data.message); const fork = config.getForkName(slot); - - if (ForkSeq[fork] >= ForkSeq.deneb) { + if (ForkSeq[fork] < ForkSeq.deneb) { + preDataBlocks.push(block); + } else if (fork === ForkName.deneb) { + blobsDataBlocks.push(block); const blobKzgCommitmentsLen = (block.data.message.body as deneb.BeaconBlockBody).blobKzgCommitments.length; for (let index = 0; index < blobKzgCommitmentsLen; index++) { blobIdentifiers.push({blockRoot, index}); } + } else if (fork === ForkName.electra) { + dataColumnsDataBlocks.push(block); + const blobKzgCommitmentsLen = (block.data.message.body as deneb.BeaconBlockBody).blobKzgCommitments.length; + const custodyColumnIndexes = blobKzgCommitmentsLen > 0 ? network.custodyConfig.custodyColumns : []; + for (const columnIndex of custodyColumnIndexes) { + dataColumnIdentifiers.push({blockRoot, index: columnIndex}); + } + } else { + throw Error(`Invalid fork=${fork} in beaconBlocksMaybeBlobsByRoot`); } } - let allBlobSidecars: deneb.BlobSidecar[]; - if (blobIdentifiers.length > 0) { - allBlobSidecars = await network.sendBlobSidecarsByRoot(peerId, blobIdentifiers); - } else { - allBlobSidecars = []; + let blockInputs = preDataBlocks.map((block) => + getBlockInput.preData(config, block.data, BlockSource.byRoot, block.bytes) + ); + + if (blobsDataBlocks.length > 0) { + let allBlobSidecars: deneb.BlobSidecar[]; + if (blobIdentifiers.length > 0) { + allBlobSidecars = await network.sendBlobSidecarsByRoot(peerId, blobIdentifiers); + } else { + allBlobSidecars = []; + } + + // The last arg is to provide slot to which all blobs should be exausted in matching + // and here it should be infinity since all bobs should match + const blockInputWithBlobs = matchBlockWithBlobs( + config, + allBlocks, + allBlobSidecars, + Infinity, + BlockSource.byRoot, + BlobsSource.byRoot + ); + blockInputs = [...blockInputs, ...blockInputWithBlobs]; + } + + if (dataColumnsDataBlocks.length > 0) { + let allDataColumnsSidecars: electra.DataColumnSidecar[]; + if (dataColumnIdentifiers.length > 0) { + allDataColumnsSidecars = await network.sendDataColumnSidecarsByRoot(peerId, dataColumnIdentifiers); + } else { + allDataColumnsSidecars = []; + } + + // The last arg is to provide slot to which all blobs should be exausted in matching + // and here it should be infinity since all bobs should match + const blockInputWithBlobs = matchBlockWithDataColumns( + peerId, + config, + network.custodyConfig, + allBlocks, + allDataColumnsSidecars, + Infinity, + BlockSource.byRoot, + DataColumnsSource.byRoot + ); + blockInputs = [...blockInputs, ...blockInputWithBlobs]; } - // The last arg is to provide slot to which all blobs should be exausted in matching - // and here it should be infinity since all bobs should match - return matchBlockWithBlobs(config, allBlocks, allBlobSidecars, Infinity, BlockSource.byRoot, BlobsSource.byRoot); + return blockInputs; } export async function unavailableBeaconBlobsByRoot( @@ -64,51 +122,122 @@ export async function unavailableBeaconBlobsByRoot( } // resolve the block if thats unavailable - let block, blobsCache, blockBytes, resolveAvailability, cachedData; + let block, blockBytes, cachedData; if (unavailableBlockInput.block === null) { const allBlocks = await network.sendBeaconBlocksByRoot(peerId, [fromHexString(unavailableBlockInput.blockRootHex)]); block = allBlocks[0].data; blockBytes = allBlocks[0].bytes; cachedData = unavailableBlockInput.cachedData; - ({blobsCache, resolveAvailability} = cachedData); } else { ({block, cachedData, blockBytes} = unavailableBlockInput); - ({blobsCache, resolveAvailability} = cachedData); } - // resolve missing blobs - const blobIdentifiers: deneb.BlobIdentifier[] = []; - const slot = block.message.slot; - const blockRoot = config.getForkTypes(slot).BeaconBlock.hashTreeRoot(block.message); + let availableBlockInput; + if (cachedData.fork === ForkName.deneb) { + const {blobsCache, resolveAvailability} = cachedData; - const blobKzgCommitmentsLen = (block.message.body as deneb.BeaconBlockBody).blobKzgCommitments.length; - for (let index = 0; index < blobKzgCommitmentsLen; index++) { - if (blobsCache.has(index) === false) blobIdentifiers.push({blockRoot, index}); - } + // resolve missing blobs + const blobIdentifiers: deneb.BlobIdentifier[] = []; + const slot = block.message.slot; + const blockRoot = config.getForkTypes(slot).BeaconBlock.hashTreeRoot(block.message); - let allBlobSidecars: deneb.BlobSidecar[]; - if (blobIdentifiers.length > 0) { - allBlobSidecars = await network.sendBlobSidecarsByRoot(peerId, blobIdentifiers); - } else { - allBlobSidecars = []; - } + const blobKzgCommitmentsLen = (block.message.body as deneb.BeaconBlockBody).blobKzgCommitments.length; + for (let index = 0; index < blobKzgCommitmentsLen; index++) { + if (blobsCache.has(index) === false) blobIdentifiers.push({blockRoot, index}); + } - // add them in cache so that its reflected in all the blockInputs that carry this - // for e.g. a blockInput that might be awaiting blobs promise fullfillment in - // verifyBlocksDataAvailability - for (const blobSidecar of allBlobSidecars) { - blobsCache.set(blobSidecar.index, {blobSidecar, blobBytes: null}); - } + let allBlobSidecars: deneb.BlobSidecar[]; + if (blobIdentifiers.length > 0) { + allBlobSidecars = await network.sendBlobSidecarsByRoot(peerId, blobIdentifiers); + } else { + allBlobSidecars = []; + } + + // add them in cache so that its reflected in all the blockInputs that carry this + // for e.g. a blockInput that might be awaiting blobs promise fullfillment in + // verifyBlocksDataAvailability + for (const blobSidecar of allBlobSidecars) { + blobsCache.set(blobSidecar.index, {blobSidecar, blobBytes: null}); + } + + // check and see if all blobs are now available and in that case resolve availability + // if not this will error and the leftover blobs will be tried from another peer + const allBlobs = getBlockInputBlobs(blobsCache); + const {blobs} = allBlobs; + if (blobs.length !== blobKzgCommitmentsLen) { + throw Error(`Not all blobs fetched missingBlobs=${blobKzgCommitmentsLen - blobs.length}`); + } + const blockData = {fork: cachedData.fork, ...allBlobs, blobsSource: BlobsSource.byRoot} as BlockInputDataBlobs; + resolveAvailability(blockData); + metrics?.syncUnknownBlock.resolveAvailabilitySource.inc({source: BlockInputAvailabilitySource.UNKNOWN_SYNC}); + availableBlockInput = getBlockInput.availableData(config, block, BlockSource.byRoot, blockBytes, blockData); + } else if (cachedData.fork === ForkName.electra) { + const {dataColumnsCache, resolveAvailability} = cachedData; + + // resolve missing blobs + const dataColumnIdentifiers: electra.DataColumnIdentifier[] = []; + const slot = block.message.slot; + const blockRoot = config.getForkTypes(slot).BeaconBlock.hashTreeRoot(block.message); + + const blobKzgCommitmentsLen = (block.message.body as deneb.BeaconBlockBody).blobKzgCommitments.length; + if (blobKzgCommitmentsLen === 0) { + const blockData = { + fork: cachedData.fork, + dataColumns: [], + dataColumnsBytes: [], + dataColumnsLen: 0, + dataColumnsIndex: new Uint8Array(NUMBER_OF_COLUMNS), + dataColumnsSource: DataColumnsSource.gossip, + } as BlockInputDataDataColumns; + + resolveAvailability(blockData); + metrics?.syncUnknownBlock.resolveAvailabilitySource.inc({source: BlockInputAvailabilitySource.UNKNOWN_SYNC}); + availableBlockInput = getBlockInput.availableData(config, block, BlockSource.byRoot, blockBytes, blockData); + } else { + const custodyColumnIndexes = network.custodyConfig.custodyColumns; + for (const columnIndex of custodyColumnIndexes) { + if (dataColumnsCache.has(columnIndex) === false) { + dataColumnIdentifiers.push({blockRoot, index: columnIndex}); + } + } - // check and see if all blobs are now available and in that case resolve availability - // if not this will error and the leftover blobs will be tried from another peer - const allBlobs = getBlockInputBlobs(blobsCache); - const {blobs} = allBlobs; - if (blobs.length !== blobKzgCommitmentsLen) { - throw Error(`Not all blobs fetched missingBlobs=${blobKzgCommitmentsLen - blobs.length}`); + let allDataColumnSidecars: electra.DataColumnSidecar[]; + if (dataColumnIdentifiers.length > 0) { + allDataColumnSidecars = await network.sendDataColumnSidecarsByRoot(peerId, dataColumnIdentifiers); + } else { + allDataColumnSidecars = []; + } + + // add them in cache so that its reflected in all the blockInputs that carry this + // for e.g. a blockInput that might be awaiting blobs promise fullfillment in + // verifyBlocksDataAvailability + for (const dataColumnSidecar of allDataColumnSidecars) { + dataColumnsCache.set(dataColumnSidecar.index, {dataColumnSidecar, dataColumnBytes: null}); + } + + // check and see if all blobs are now available and in that case resolve availability + // if not this will error and the leftover blobs will be tried from another peer + const allDataColumns = getBlockInputDataColumns(dataColumnsCache, custodyColumnIndexes); + const {dataColumns} = allDataColumns; + if (dataColumns.length !== network.custodyConfig.custodyColumnsLen) { + throw Error( + `Not all dataColumns fetched missingColumns=${network.custodyConfig.custodyColumnsLen - dataColumns.length}` + ); + } + const blockData = { + fork: cachedData.fork, + ...allDataColumns, + dataColumnsLen: network.custodyConfig.custodyColumnsLen, + dataColumnsIndex: network.custodyConfig.custodyColumnsIndex, + dataColumnsSource: DataColumnsSource.byRoot, + } as BlockInputDataDataColumns; + resolveAvailability(blockData); + metrics?.syncUnknownBlock.resolveAvailabilitySource.inc({source: BlockInputAvailabilitySource.UNKNOWN_SYNC}); + availableBlockInput = getBlockInput.availableData(config, block, BlockSource.byRoot, blockBytes, blockData); + } + } else { + throw Error(`Invalid cachedData fork=${cachedData.fork} for unavailableBeaconBlobsByRoot`); } - const blockData = {fork: cachedData.fork, ...allBlobs, blobsSource: BlobsSource.byRoot} as BlockInputDataBlobs; - resolveAvailability(blockData); - metrics?.syncUnknownBlock.resolveAvailabilitySource.inc({source: BlockInputAvailabilitySource.UNKNOWN_SYNC}); - return getBlockInput.availableData(config, block, BlockSource.byRoot, blockBytes, blockData); + + return availableBlockInput; } diff --git a/packages/beacon-node/src/network/reqresp/handlers/beaconBlocksByRange.ts b/packages/beacon-node/src/network/reqresp/handlers/beaconBlocksByRange.ts index d1046db9651d..e892b2f412dd 100644 --- a/packages/beacon-node/src/network/reqresp/handlers/beaconBlocksByRange.ts +++ b/packages/beacon-node/src/network/reqresp/handlers/beaconBlocksByRange.ts @@ -69,7 +69,7 @@ export async function* onBeaconBlocksByRange( } export function validateBeaconBlocksByRangeRequest( - request: deneb.BlobSidecarsByRangeRequest + request: phase0.BeaconBlocksByRangeRequest ): deneb.BlobSidecarsByRangeRequest { const {startSlot} = request; let {count} = request; diff --git a/packages/beacon-node/src/network/reqresp/handlers/dataColumnSidecarsByRange.ts b/packages/beacon-node/src/network/reqresp/handlers/dataColumnSidecarsByRange.ts new file mode 100644 index 000000000000..903c4a64b69f --- /dev/null +++ b/packages/beacon-node/src/network/reqresp/handlers/dataColumnSidecarsByRange.ts @@ -0,0 +1,125 @@ +import {GENESIS_SLOT, MAX_REQUEST_BLOCKS_DENEB} from "@lodestar/params"; +import {ResponseError, ResponseOutgoing, RespStatus} from "@lodestar/reqresp"; +import {electra, Slot, ssz, ColumnIndex} from "@lodestar/types"; +import {fromHex} from "@lodestar/utils"; +import {IBeaconChain} from "../../../chain/index.js"; +import {IBeaconDb} from "../../../db/index.js"; +import { + DATA_COLUMN_SIDECARS_IN_WRAPPER_INDEX, + COLUMN_SIZE_IN_WRAPPER_INDEX, + CUSTODY_COLUMNS_IN_IN_WRAPPER_INDEX, +} from "../../../db/repositories/dataColumnSidecars.js"; + +export async function* onDataColumnSidecarsByRange( + request: electra.DataColumnSidecarsByRangeRequest, + chain: IBeaconChain, + db: IBeaconDb +): AsyncIterable { + // Non-finalized range of blobs + const {startSlot, count, columns} = validateDataColumnSidecarsByRangeRequest(request); + const endSlot = startSlot + count; + + const finalized = db.dataColumnSidecarsArchive; + const unfinalized = db.dataColumnSidecars; + const finalizedSlot = chain.forkChoice.getFinalizedBlock().slot; + + // Finalized range of blobs + if (startSlot <= finalizedSlot) { + // Chain of blobs won't change + for await (const {key, value: dataColumnSideCarsBytesWrapped} of finalized.binaryEntriesStream({ + gte: startSlot, + lt: endSlot, + })) { + yield* iterateDataColumnBytesFromWrapper( + chain, + dataColumnSideCarsBytesWrapped, + finalized.decodeKey(key), + columns + ); + } + } + + // Non-finalized range of blobs + if (endSlot > finalizedSlot) { + const headRoot = chain.forkChoice.getHeadRoot(); + // TODO DENEB: forkChoice should mantain an array of canonical blocks, and change only on reorg + const headChain = chain.forkChoice.getAllAncestorBlocks(headRoot); + + // Iterate head chain with ascending block numbers + for (let i = headChain.length - 1; i >= 0; i--) { + const block = headChain[i]; + + // Must include only blobs in the range requested + if (block.slot >= startSlot && block.slot < endSlot) { + // Note: Here the forkChoice head may change due to a re-org, so the headChain reflects the canonical chain + // at the time of the start of the request. Spec is clear the chain of blobs must be consistent, but on + // re-org there's no need to abort the request + // Spec: https://github.com/ethereum/consensus-specs/blob/a1e46d1ae47dd9d097725801575b46907c12a1f8/specs/eip4844/p2p-interface.md#blobssidecarsbyrange-v1 + + const blobSideCarsBytesWrapped = await unfinalized.getBinary(fromHex(block.blockRoot)); + if (!blobSideCarsBytesWrapped) { + // Handle the same to onBeaconBlocksByRange + throw new ResponseError(RespStatus.SERVER_ERROR, `No item for root ${block.blockRoot} slot ${block.slot}`); + } + yield* iterateDataColumnBytesFromWrapper(chain, blobSideCarsBytesWrapped, block.slot, columns); + } + + // If block is after endSlot, stop iterating + else if (block.slot >= endSlot) { + break; + } + } + } +} + +export function* iterateDataColumnBytesFromWrapper( + chain: IBeaconChain, + dataColumnSidecarsBytesWrapped: Uint8Array, + blockSlot: Slot, + // use the columns include to see if you want to yield in response + _columns: ColumnIndex[] +): Iterable { + const retrievedColumnsSizeBytes = dataColumnSidecarsBytesWrapped.slice( + COLUMN_SIZE_IN_WRAPPER_INDEX, + CUSTODY_COLUMNS_IN_IN_WRAPPER_INDEX + ); + const columnsSize = ssz.UintNum64.deserialize(retrievedColumnsSizeBytes); + const allDataColumnSidecarsBytes = dataColumnSidecarsBytesWrapped.slice(DATA_COLUMN_SIDECARS_IN_WRAPPER_INDEX); + + const columnsLen = allDataColumnSidecarsBytes.length / columnsSize; + + for (let index = 0; index < columnsLen; index++) { + const dataColumnSidecarBytes = allDataColumnSidecarsBytes.slice(index * columnsSize, (index + 1) * columnsSize); + if (dataColumnSidecarBytes.length !== columnsSize) { + throw new ResponseError( + RespStatus.SERVER_ERROR, + `Invalid dataColumnSidecar index=${index} bytes length=${dataColumnSidecarBytes.length} expected=${columnsSize} for slot ${blockSlot} blobsLen=${columnsLen}` + ); + } + yield { + data: dataColumnSidecarBytes, + fork: chain.config.getForkName(blockSlot), + }; + } +} + +export function validateDataColumnSidecarsByRangeRequest( + request: electra.DataColumnSidecarsByRangeRequest +): electra.DataColumnSidecarsByRangeRequest { + const {startSlot, columns} = request; + let {count} = request; + + if (count < 1) { + throw new ResponseError(RespStatus.INVALID_REQUEST, "count < 1"); + } + // TODO: validate against MIN_EPOCHS_FOR_BLOCK_REQUESTS + if (startSlot < GENESIS_SLOT) { + throw new ResponseError(RespStatus.INVALID_REQUEST, "startSlot < genesis"); + } + + if (count > MAX_REQUEST_BLOCKS_DENEB) { + count = MAX_REQUEST_BLOCKS_DENEB; + } + + return {startSlot, count, columns}; +} diff --git a/packages/beacon-node/src/network/reqresp/handlers/dataColumnSidecarsByRoot.ts b/packages/beacon-node/src/network/reqresp/handlers/dataColumnSidecarsByRoot.ts new file mode 100644 index 000000000000..dea728cd62ef --- /dev/null +++ b/packages/beacon-node/src/network/reqresp/handlers/dataColumnSidecarsByRoot.ts @@ -0,0 +1,90 @@ +import {ResponseError, ResponseOutgoing, RespStatus} from "@lodestar/reqresp"; +import {NUMBER_OF_COLUMNS} from "@lodestar/params"; +import {electra, RootHex, ssz} from "@lodestar/types"; +import {toHex, fromHex} from "@lodestar/utils"; +import {IBeaconChain} from "../../../chain/index.js"; +import {IBeaconDb} from "../../../db/index.js"; +import { + DATA_COLUMN_SIDECARS_IN_WRAPPER_INDEX, + CUSTODY_COLUMNS_IN_IN_WRAPPER_INDEX, + COLUMN_SIZE_IN_WRAPPER_INDEX, +} from "../../../db/repositories/dataColumnSidecars.js"; + +export async function* onDataColumnSidecarsByRoot( + requestBody: electra.DataColumnSidecarsByRootRequest, + chain: IBeaconChain, + db: IBeaconDb +): AsyncIterable { + const finalizedSlot = chain.forkChoice.getFinalizedBlock().slot; + + // In sidecars by root request, it can be expected that sidecar requests will be come + // clustured by blockroots, and this helps us save db lookups once we load sidecars + // for a root + let lastFetchedSideCars: { + blockRoot: RootHex; + bytes: Uint8Array; + custodyColumns: Uint8Array; + columnsSize: number; + } | null = null; + + for (const dataColumnIdentifier of requestBody) { + const {blockRoot, index} = dataColumnIdentifier; + const blockRootHex = toHex(blockRoot); + const block = chain.forkChoice.getBlockHex(blockRootHex); + + // NOTE: Only support non-finalized blocks. + // SPEC: Clients MUST support requesting blocks and sidecars since the latest finalized epoch. + // https://github.com/ethereum/consensus-specs/blob/11a037fd9227e29ee809c9397b09f8cc3383a8c0/specs/eip4844/p2p-interface.md#beaconblockandblobssidecarbyroot-v1 + if (!block || block.slot <= finalizedSlot) { + continue; + } + + // Check if we need to load sidecars for a new block root + if (lastFetchedSideCars === null || lastFetchedSideCars.blockRoot !== blockRootHex) { + const dataColumnSidecarsBytesWrapped = await db.dataColumnSidecars.getBinary(fromHex(block.blockRoot)); + if (!dataColumnSidecarsBytesWrapped) { + // Handle the same to onBeaconBlocksByRange + throw new ResponseError(RespStatus.SERVER_ERROR, `No item for root ${block.blockRoot} slot ${block.slot}`); + } + + const retrievedColumnsSizeBytes = dataColumnSidecarsBytesWrapped.slice( + COLUMN_SIZE_IN_WRAPPER_INDEX, + CUSTODY_COLUMNS_IN_IN_WRAPPER_INDEX + ); + const columnsSize = ssz.UintNum64.deserialize(retrievedColumnsSizeBytes); + const dataColumnSidecarsBytes = dataColumnSidecarsBytesWrapped.slice(DATA_COLUMN_SIDECARS_IN_WRAPPER_INDEX); + + const custodyColumns = dataColumnSidecarsBytesWrapped.slice( + CUSTODY_COLUMNS_IN_IN_WRAPPER_INDEX, + CUSTODY_COLUMNS_IN_IN_WRAPPER_INDEX + NUMBER_OF_COLUMNS + ); + + lastFetchedSideCars = {blockRoot: blockRootHex, bytes: dataColumnSidecarsBytes, columnsSize, custodyColumns}; + } + + const dataIndex = lastFetchedSideCars.custodyColumns[index]; + const {columnsSize} = lastFetchedSideCars; + + if (dataIndex === undefined || dataIndex === 0) { + throw Error( + `Missing dataColumnSidecar blockRoot=${blockRootHex} index=${index} calculated dataIndex=${dataIndex}` + ); + } + + // dataIndex is 1 based index + const dataColumnSidecarBytes = lastFetchedSideCars.bytes.slice( + (dataIndex - 1) * columnsSize, + dataIndex * columnsSize + ); + if (dataColumnSidecarBytes.length !== columnsSize) { + throw Error( + `Inconsistent state, dataColumnSidecar blockRoot=${blockRootHex} index=${index} dataColumnSidecarBytes=${dataColumnSidecarBytes.length} expected=${columnsSize}` + ); + } + + yield { + data: dataColumnSidecarBytes, + fork: chain.config.getForkName(block.slot), + }; + } +} diff --git a/packages/beacon-node/src/network/reqresp/handlers/index.ts b/packages/beacon-node/src/network/reqresp/handlers/index.ts index 50b8cc870844..0c4732310641 100644 --- a/packages/beacon-node/src/network/reqresp/handlers/index.ts +++ b/packages/beacon-node/src/network/reqresp/handlers/index.ts @@ -7,6 +7,8 @@ import {onBeaconBlocksByRange} from "./beaconBlocksByRange.js"; import {onBeaconBlocksByRoot} from "./beaconBlocksByRoot.js"; import {onBlobSidecarsByRoot} from "./blobSidecarsByRoot.js"; import {onBlobSidecarsByRange} from "./blobSidecarsByRange.js"; +import {onDataColumnSidecarsByRange} from "./dataColumnSidecarsByRange.js"; +import {onDataColumnSidecarsByRoot} from "./dataColumnSidecarsByRoot.js"; import {onLightClientBootstrap} from "./lightClientBootstrap.js"; import {onLightClientFinalityUpdate} from "./lightClientFinalityUpdate.js"; import {onLightClientOptimisticUpdate} from "./lightClientOptimisticUpdate.js"; @@ -44,6 +46,15 @@ export function getReqRespHandlers({db, chain}: {db: IBeaconDb; chain: IBeaconCh const body = ssz.deneb.BlobSidecarsByRangeRequest.deserialize(req.data); return onBlobSidecarsByRange(body, chain, db); }, + [ReqRespMethod.DataColumnSidecarsByRange]: (req) => { + const body = ssz.electra.DataColumnSidecarsByRangeRequest.deserialize(req.data); + return onDataColumnSidecarsByRange(body, chain, db); + }, + [ReqRespMethod.DataColumnSidecarsByRoot]: (req) => { + const body = ssz.electra.DataColumnSidecarsByRootRequest.deserialize(req.data); + return onDataColumnSidecarsByRoot(body, chain, db); + }, + [ReqRespMethod.LightClientBootstrap]: (req) => { const body = ssz.Root.deserialize(req.data); return onLightClientBootstrap(body, chain); diff --git a/packages/beacon-node/src/network/reqresp/protocols.ts b/packages/beacon-node/src/network/reqresp/protocols.ts index a0fa9576c93c..4fd96f6be711 100644 --- a/packages/beacon-node/src/network/reqresp/protocols.ts +++ b/packages/beacon-node/src/network/reqresp/protocols.ts @@ -71,6 +71,18 @@ export const BlobSidecarsByRoot = toProtocol({ contextBytesType: ContextBytesType.ForkDigest, }); +export const DataColumnSidecarsByRange = toProtocol({ + method: ReqRespMethod.DataColumnSidecarsByRange, + version: Version.V1, + contextBytesType: ContextBytesType.ForkDigest, +}); + +export const DataColumnSidecarsByRoot = toProtocol({ + method: ReqRespMethod.DataColumnSidecarsByRoot, + version: Version.V1, + contextBytesType: ContextBytesType.ForkDigest, +}); + export const LightClientBootstrap = toProtocol({ method: ReqRespMethod.LightClientBootstrap, version: Version.V1, diff --git a/packages/beacon-node/src/network/reqresp/rateLimit.ts b/packages/beacon-node/src/network/reqresp/rateLimit.ts index 881ab36bc05d..cf48d9d138fe 100644 --- a/packages/beacon-node/src/network/reqresp/rateLimit.ts +++ b/packages/beacon-node/src/network/reqresp/rateLimit.ts @@ -3,6 +3,8 @@ import { MAX_REQUEST_LIGHT_CLIENT_UPDATES, MAX_BLOBS_PER_BLOCK, MAX_REQUEST_BLOB_SIDECARS, + MAX_REQUEST_BLOCKS_DENEB, + NUMBER_OF_COLUMNS, } from "@lodestar/params"; import {InboundRateLimitQuota} from "@lodestar/reqresp"; import {ReqRespMethod, RequestBodyByMethod} from "./types.js"; @@ -46,6 +48,16 @@ export const rateLimitQuotas: Record = { byPeer: {quota: 128 * MAX_BLOBS_PER_BLOCK, quotaTimeMs: 10_000}, getRequestCount: getRequestCountFn(ReqRespMethod.BlobSidecarsByRoot, (req) => req.length), }, + [ReqRespMethod.DataColumnSidecarsByRange]: { + // Rationale: MAX_REQUEST_BLOCKS_DENEB * NUMBER_OF_COLUMNS + byPeer: {quota: MAX_REQUEST_BLOCKS_DENEB * NUMBER_OF_COLUMNS, quotaTimeMs: 10_000}, + getRequestCount: getRequestCountFn(ReqRespMethod.DataColumnSidecarsByRange, (req) => req.count), + }, + [ReqRespMethod.DataColumnSidecarsByRoot]: { + // Rationale: quota of BeaconBlocksByRoot * NUMBER_OF_COLUMNS + byPeer: {quota: 128 * NUMBER_OF_COLUMNS, quotaTimeMs: 10_000}, + getRequestCount: getRequestCountFn(ReqRespMethod.DataColumnSidecarsByRoot, (req) => req.length), + }, [ReqRespMethod.LightClientBootstrap]: { // As similar in the nature of `Status` protocol so we use the same rate limits. byPeer: {quota: 5, quotaTimeMs: 15_000}, diff --git a/packages/beacon-node/src/network/reqresp/types.ts b/packages/beacon-node/src/network/reqresp/types.ts index f690d282307f..cf930d8e1bc6 100644 --- a/packages/beacon-node/src/network/reqresp/types.ts +++ b/packages/beacon-node/src/network/reqresp/types.ts @@ -1,7 +1,7 @@ import {Type} from "@chainsafe/ssz"; import {ForkLightClient, ForkName, isForkLightClient} from "@lodestar/params"; import {Protocol, ProtocolHandler, ReqRespRequest} from "@lodestar/reqresp"; -import {Root, allForks, altair, deneb, phase0, ssz} from "@lodestar/types"; +import {Root, allForks, altair, deneb, phase0, ssz, electra} from "@lodestar/types"; export type ProtocolNoHandler = Omit; @@ -16,6 +16,8 @@ export enum ReqRespMethod { BeaconBlocksByRoot = "beacon_blocks_by_root", BlobSidecarsByRange = "blob_sidecars_by_range", BlobSidecarsByRoot = "blob_sidecars_by_root", + DataColumnSidecarsByRange = "data_column_sidecars_by_range", + DataColumnSidecarsByRoot = "data_column_sidecars_by_root", LightClientBootstrap = "light_client_bootstrap", LightClientUpdatesByRange = "light_client_updates_by_range", LightClientFinalityUpdate = "light_client_finality_update", @@ -32,6 +34,8 @@ export type RequestBodyByMethod = { [ReqRespMethod.BeaconBlocksByRoot]: phase0.BeaconBlocksByRootRequest; [ReqRespMethod.BlobSidecarsByRange]: deneb.BlobSidecarsByRangeRequest; [ReqRespMethod.BlobSidecarsByRoot]: deneb.BlobSidecarsByRootRequest; + [ReqRespMethod.DataColumnSidecarsByRange]: electra.DataColumnSidecarsByRangeRequest; + [ReqRespMethod.DataColumnSidecarsByRoot]: electra.DataColumnSidecarsByRootRequest; [ReqRespMethod.LightClientBootstrap]: Root; [ReqRespMethod.LightClientUpdatesByRange]: altair.LightClientUpdatesByRange; [ReqRespMethod.LightClientFinalityUpdate]: null; @@ -48,6 +52,9 @@ type ResponseBodyByMethod = { [ReqRespMethod.BeaconBlocksByRoot]: allForks.SignedBeaconBlock; [ReqRespMethod.BlobSidecarsByRange]: deneb.BlobSidecar; [ReqRespMethod.BlobSidecarsByRoot]: deneb.BlobSidecar; + [ReqRespMethod.DataColumnSidecarsByRange]: electra.DataColumnSidecar; + [ReqRespMethod.DataColumnSidecarsByRoot]: electra.DataColumnSidecar; + [ReqRespMethod.LightClientBootstrap]: altair.LightClientBootstrap; [ReqRespMethod.LightClientUpdatesByRange]: altair.LightClientUpdate; [ReqRespMethod.LightClientFinalityUpdate]: altair.LightClientFinalityUpdate; @@ -62,10 +69,14 @@ export const requestSszTypeByMethod: { [ReqRespMethod.Goodbye]: ssz.phase0.Goodbye, [ReqRespMethod.Ping]: ssz.phase0.Ping, [ReqRespMethod.Metadata]: null, + [ReqRespMethod.BeaconBlocksByRange]: ssz.phase0.BeaconBlocksByRangeRequest, [ReqRespMethod.BeaconBlocksByRoot]: ssz.phase0.BeaconBlocksByRootRequest, [ReqRespMethod.BlobSidecarsByRange]: ssz.deneb.BlobSidecarsByRangeRequest, [ReqRespMethod.BlobSidecarsByRoot]: ssz.deneb.BlobSidecarsByRootRequest, + [ReqRespMethod.DataColumnSidecarsByRange]: ssz.electra.DataColumnSidecarsByRangeRequest, + [ReqRespMethod.DataColumnSidecarsByRoot]: ssz.electra.DataColumnSidecarsByRootRequest, + [ReqRespMethod.LightClientBootstrap]: ssz.Root, [ReqRespMethod.LightClientUpdatesByRange]: ssz.altair.LightClientUpdatesByRange, [ReqRespMethod.LightClientFinalityUpdate]: null, @@ -91,6 +102,9 @@ export const responseSszTypeByMethod: {[K in ReqRespMethod]: ResponseTypeGetter< [ReqRespMethod.BeaconBlocksByRoot]: blocksResponseType, [ReqRespMethod.BlobSidecarsByRange]: () => ssz.deneb.BlobSidecar, [ReqRespMethod.BlobSidecarsByRoot]: () => ssz.deneb.BlobSidecar, + [ReqRespMethod.DataColumnSidecarsByRange]: () => ssz.electra.DataColumnSidecar, + [ReqRespMethod.DataColumnSidecarsByRoot]: () => ssz.electra.DataColumnSidecar, + [ReqRespMethod.LightClientBootstrap]: (fork) => ssz.allForksLightClient[onlyLightclientFork(fork)].LightClientBootstrap, [ReqRespMethod.LightClientUpdatesByRange]: (fork) => diff --git a/packages/beacon-node/src/node/nodejs.ts b/packages/beacon-node/src/node/nodejs.ts index a1147b60bea2..2e7ee756c336 100644 --- a/packages/beacon-node/src/node/nodejs.ts +++ b/packages/beacon-node/src/node/nodejs.ts @@ -21,6 +21,7 @@ import {getApi, BeaconRestApiServer} from "../api/index.js"; import {initializeExecutionEngine, initializeExecutionBuilder} from "../execution/index.js"; import {initializeEth1ForBlockProduction} from "../eth1/index.js"; import {initCKZG, loadEthereumTrustedSetup, TrustedFileMode} from "../util/kzg.js"; +import {NodeId} from "../network/subnets/interface.js"; import {IBeaconNodeOptions} from "./options.js"; import {runNodeNotifier} from "./notifier.js"; @@ -49,6 +50,7 @@ export type BeaconNodeInitModules = { logger: LoggerNode; processShutdownCallback: ProcessShutdownCallback; peerId: PeerId; + nodeId: NodeId; peerStoreDir?: string; anchorState: BeaconStateAllForks; wsCheckpoint?: phase0.Checkpoint; @@ -146,6 +148,7 @@ export class BeaconNode { logger, processShutdownCallback, peerId, + nodeId, peerStoreDir, anchorState, wsCheckpoint, @@ -195,6 +198,7 @@ export class BeaconNode { : null; const chain = new BeaconChain(opts.chain, { + nodeId, config, db, logger: logger.child({module: LoggerModule.chain}), @@ -231,6 +235,7 @@ export class BeaconNode { chain, db, peerId, + nodeId, peerStoreDir, getReqRespHandler: getReqRespHandlers({db, chain}), }); diff --git a/packages/beacon-node/src/sync/range/chain.ts b/packages/beacon-node/src/sync/range/chain.ts index 41bbce3da820..f100fe2153a2 100644 --- a/packages/beacon-node/src/sync/range/chain.ts +++ b/packages/beacon-node/src/sync/range/chain.ts @@ -2,6 +2,8 @@ import {toHexString} from "@chainsafe/ssz"; import {Epoch, Root, Slot, phase0} from "@lodestar/types"; import {ErrorAborted, Logger} from "@lodestar/utils"; import {ChainForkConfig} from "@lodestar/config"; +import {ForkName} from "@lodestar/params"; + import {BlockInput, BlockInputType} from "../../chain/blocks/types.js"; import {PeerAction} from "../../network/index.js"; import {ItTrigger} from "../../util/itTrigger.js"; @@ -404,14 +406,27 @@ export class SyncChain { const blobs = res.result.reduce((acc, blockInput) => { hasPostDenebBlocks ||= blockInput.type === BlockInputType.availableData; return hasPostDenebBlocks - ? acc + (blockInput.type === BlockInputType.availableData ? blockInput.blockData.blobs.length : 0) + ? acc + + (blockInput.type === BlockInputType.availableData && blockInput.blockData.fork === ForkName.deneb + ? blockInput.blockData.blobs.length + : 0) + : 0; + }, 0); + const dataColumns = res.result.reduce((acc, blockInput) => { + hasPostDenebBlocks ||= blockInput.type === BlockInputType.availableData; + return hasPostDenebBlocks + ? acc + + (blockInput.type === BlockInputType.availableData && blockInput.blockData.fork === ForkName.electra + ? blockInput.blockData.dataColumns.length + : 0) : 0; }, 0); + const downloadInfo = {blocks: res.result.length}; if (hasPostDenebBlocks) { - Object.assign(downloadInfo, {blobs}); + Object.assign(downloadInfo, {blobs, dataColumns}); } - this.logger.debug("Downloaded batch", {id: this.logId, ...batch.getMetadata(), ...downloadInfo}); + this.logger.debug("Downloaded batch", {id: this.logId, ...batch.getMetadata(), ...downloadInfo, peer}); this.triggerBatchProcessor(); } else { this.logger.verbose("Batch download error", {id: this.logId, ...batch.getMetadata()}, res.err); diff --git a/packages/beacon-node/src/sync/unknownBlock.ts b/packages/beacon-node/src/sync/unknownBlock.ts index 3c15b32eb8d8..20c56e1f1a17 100644 --- a/packages/beacon-node/src/sync/unknownBlock.ts +++ b/packages/beacon-node/src/sync/unknownBlock.ts @@ -2,7 +2,7 @@ import {fromHexString, toHexString} from "@chainsafe/ssz"; import {ChainForkConfig} from "@lodestar/config"; import {Logger, pruneSetToMax} from "@lodestar/utils"; import {Root, RootHex, deneb} from "@lodestar/types"; -import {INTERVALS_PER_SLOT} from "@lodestar/params"; +import {INTERVALS_PER_SLOT, ForkName} from "@lodestar/params"; import {sleep} from "@lodestar/utils"; import {INetwork, NetworkEvent, NetworkEventData, PeerAction} from "../network/index.js"; import {PeerIdStr} from "../util/peerId.js"; @@ -515,21 +515,28 @@ export class UnknownBlockSync { const shuffledPeers = shuffle(connectedPeers); let blockRootHex; - let pendingBlobs; let blobKzgCommitmentsLen; let blockRoot; + const dataMeta: Record = {}; if (unavailableBlockInput.block === null) { blockRootHex = unavailableBlockInput.blockRootHex; blockRoot = fromHexString(blockRootHex); } else { - const unavailableBlock = unavailableBlockInput.block; + const {cachedData, block: unavailableBlock} = unavailableBlockInput; blockRoot = this.config .getForkTypes(unavailableBlock.message.slot) .BeaconBlock.hashTreeRoot(unavailableBlock.message); blockRootHex = toHexString(blockRoot); blobKzgCommitmentsLen = (unavailableBlock.message.body as deneb.BeaconBlockBody).blobKzgCommitments.length; - pendingBlobs = blobKzgCommitmentsLen - unavailableBlockInput.cachedData.blobsCache.size; + + if (cachedData.fork === ForkName.deneb) { + const pendingBlobs = blobKzgCommitmentsLen - cachedData.blobsCache.size; + Object.assign(dataMeta, {pendingBlobs}); + } else if (cachedData.fork === ForkName.electra) { + const pendingColumns = this.network.custodyConfig.custodyColumnsLen - cachedData.dataColumnsCache.size; + Object.assign(dataMeta, {pendingColumns}); + } } let lastError: Error | null = null; @@ -559,7 +566,7 @@ export class UnknownBlockSync { if (unavailableBlockInput.block === null) { this.logger.debug("Fetched NullBlockInput", {attempts: i, blockRootHex}); } else { - this.logger.debug("Fetched UnavailableBlockInput", {attempts: i, pendingBlobs, blobKzgCommitmentsLen}); + this.logger.debug("Fetched UnavailableBlockInput", {attempts: i, ...dataMeta, blobKzgCommitmentsLen}); } return {blockInput, peerIdStr: peer}; diff --git a/packages/beacon-node/src/util/blobs.ts b/packages/beacon-node/src/util/blobs.ts index 13d935ba29da..3fceb7838d04 100644 --- a/packages/beacon-node/src/util/blobs.ts +++ b/packages/beacon-node/src/util/blobs.ts @@ -1,9 +1,16 @@ import {digest as sha256Digest} from "@chainsafe/as-sha256"; import {Tree} from "@chainsafe/persistent-merkle-tree"; -import {VERSIONED_HASH_VERSION_KZG, KZG_COMMITMENT_GINDEX0, ForkName} from "@lodestar/params"; -import {deneb, ssz, allForks} from "@lodestar/types"; +import { + VERSIONED_HASH_VERSION_KZG, + KZG_COMMITMENT_GINDEX0, + KZG_COMMITMENTS_GINDEX, + ForkName, + NUMBER_OF_COLUMNS, +} from "@lodestar/params"; +import {electra, deneb, ssz, allForks} from "@lodestar/types"; import {ChainForkConfig} from "@lodestar/config"; import {signedBlockToSignedHeader} from "@lodestar/state-transition"; +import {ckzg} from "./kzg.js"; type VersionHash = Uint8Array; @@ -24,6 +31,14 @@ export function computeInclusionProof( return new Tree(bodyView.node).getSingleProof(BigInt(commitmentGindex)); } +export function computeKzgCommitmentsInclusionProof( + fork: ForkName, + body: allForks.BeaconBlockBody +): electra.KzgCommitmentsInclusionProof { + const bodyView = (ssz[fork].BeaconBlockBody as allForks.AllForksSSZTypes["BeaconBlockBody"]).toView(body); + return new Tree(bodyView.node).getSingleProof(BigInt(KZG_COMMITMENTS_GINDEX)); +} + export function computeBlobSidecars( config: ChainForkConfig, signedBlock: allForks.SignedBeaconBlock, @@ -46,3 +61,41 @@ export function computeBlobSidecars( return {index, blob, kzgCommitment, kzgProof, signedBlockHeader, kzgCommitmentInclusionProof}; }); } + +export function computeDataColumnSidecars( + config: ChainForkConfig, + signedBlock: allForks.SignedBeaconBlock, + contents: deneb.Contents & {kzgCommitmentsInclusionProof?: electra.KzgCommitmentsInclusionProof} +): electra.DataColumnSidecars { + const blobKzgCommitments = (signedBlock as deneb.SignedBeaconBlock).message.body.blobKzgCommitments; + if (blobKzgCommitments === undefined) { + throw Error("Invalid block with missing blobKzgCommitments for computeBlobSidecars"); + } + if (blobKzgCommitments.length === 0) { + return []; + } + + const signedBlockHeader = signedBlockToSignedHeader(config, signedBlock); + const fork = config.getForkName(signedBlockHeader.message.slot); + const kzgCommitmentsInclusionProof = + contents.kzgCommitmentsInclusionProof ?? computeKzgCommitmentsInclusionProof(fork, signedBlock.message.body); + const cellsAndProofs = contents.blobs.map((blob) => ckzg.computeCellsAndKzgProofs(blob)); + const dataColumnSidecars = Array.from({length: NUMBER_OF_COLUMNS}, (_, j) => { + // j'th column + const column = Array.from({length: contents.blobs.length}, (_, i) => cellsAndProofs[i][0][j]); + const kzgProofs = Array.from({length: contents.blobs.length}, (_, i) => cellsAndProofs[i][1][j]); + + const dataColumnSidecar = { + index: j, + column, + kzgCommitments: blobKzgCommitments, + kzgProofs, + signedBlockHeader, + kzgCommitmentsInclusionProof, + } as electra.DataColumnSidecar; + + return dataColumnSidecar; + }); + + return dataColumnSidecars; +} diff --git a/packages/beacon-node/src/util/dataColumns.ts b/packages/beacon-node/src/util/dataColumns.ts new file mode 100644 index 000000000000..9efec2a02ba6 --- /dev/null +++ b/packages/beacon-node/src/util/dataColumns.ts @@ -0,0 +1,74 @@ +import {digest} from "@chainsafe/as-sha256"; +import {NUMBER_OF_COLUMNS, DATA_COLUMN_SIDECAR_SUBNET_COUNT} from "@lodestar/params"; +import {ColumnIndex} from "@lodestar/types"; +import {ChainForkConfig} from "@lodestar/config"; +import {ssz} from "@lodestar/types"; +import {NodeId} from "../network/subnets/index.js"; + +export type CustodyConfig = {custodyColumnsIndex: Uint8Array; custodyColumnsLen: number; custodyColumns: ColumnIndex[]}; + +export function getCustodyConfig(nodeId: NodeId, config: ChainForkConfig): CustodyConfig { + const custodyColumns = getCustodyColumns(nodeId, config.CUSTODY_REQUIREMENT); + const custodyMeta = getCustodyColumnsMeta(custodyColumns); + return {...custodyMeta, custodyColumns}; +} + +export function getCustodyColumnsMeta(custodyColumns: ColumnIndex[]): { + custodyColumnsIndex: Uint8Array; + custodyColumnsLen: number; +} { + // custody columns map which column maps to which index in the array of columns custodied + // with zero representing it is not custodied + const custodyColumnsIndex = new Uint8Array(NUMBER_OF_COLUMNS); + let custodyAtIndex = 1; + for (const columnIndex of custodyColumns) { + custodyColumns[columnIndex] = custodyAtIndex; + custodyAtIndex++; + } + return {custodyColumnsIndex, custodyColumnsLen: custodyColumns.length}; +} + +// optimize by having a size limited index/map +export function getCustodyColumns(nodeId: NodeId, custodySubnetCount: number): ColumnIndex[] { + const subnetIds = getCustodyColumnSubnets(nodeId, custodySubnetCount); + const columnsPerSubnet = Number(NUMBER_OF_COLUMNS / DATA_COLUMN_SIDECAR_SUBNET_COUNT); + + const columnIndexes = []; + for (const subnetId of subnetIds) { + for (let i = 0; i < columnsPerSubnet; i++) { + const columnIndex = DATA_COLUMN_SIDECAR_SUBNET_COUNT * i + subnetId; + columnIndexes.push(columnIndex); + } + } + + columnIndexes.sort((a, b) => a - b); + return columnIndexes; +} + +export function getCustodyColumnSubnets(nodeId: NodeId, custodySubnetCount: number): number[] { + const subnetIds: number[] = []; + if (custodySubnetCount > DATA_COLUMN_SIDECAR_SUBNET_COUNT) { + custodySubnetCount = DATA_COLUMN_SIDECAR_SUBNET_COUNT; + } + + let currentId = ssz.UintBn256.deserialize(nodeId); + while (subnetIds.length < custodySubnetCount) { + // could be optimized + const currentIdBytes = ssz.UintBn256.serialize(currentId); + const subnetId = Number( + ssz.UintBn64.deserialize(digest(currentIdBytes).slice(0, 8)) % BigInt(DATA_COLUMN_SIDECAR_SUBNET_COUNT) + ); + if (!subnetIds.includes(subnetId)) { + subnetIds.push(subnetId); + } + + const willOverflow = currentIdBytes.reduce((acc, elem) => acc && elem === 0xff, true); + if (willOverflow) { + currentId = BigInt(0); + } else { + currentId++; + } + } + + return subnetIds; +} diff --git a/packages/beacon-node/src/util/kzg.ts b/packages/beacon-node/src/util/kzg.ts index e20a379d62ff..0fc45eff04fe 100644 --- a/packages/beacon-node/src/util/kzg.ts +++ b/packages/beacon-node/src/util/kzg.ts @@ -19,6 +19,18 @@ export let ckzg: { computeBlobKzgProof(blob: Uint8Array, commitment: Uint8Array): Uint8Array; verifyBlobKzgProof(blob: Uint8Array, commitment: Uint8Array, proof: Uint8Array): boolean; verifyBlobKzgProofBatch(blobs: Uint8Array[], expectedKzgCommitments: Uint8Array[], kzgProofs: Uint8Array[]): boolean; + computeCells(blob: Uint8Array): Uint8Array[]; + computeCellsAndKzgProofs(blob: Uint8Array): [Uint8Array[], Uint8Array[]]; + cellsToBlob(cells: Uint8Array[]): Uint8Array; + recoverAllCells(cellIds: number[], cells: Uint8Array[]): Uint8Array[]; + verifyCellKzgProof(commitmentBytes: Uint8Array, cellId: number, cell: Uint8Array, proofBytes: Uint8Array): boolean; + verifyCellKzgProofBatch( + commitmentsBytes: Uint8Array[], + rowIndices: number[], + columnIndices: number[], + cells: Uint8Array[], + proofsBytes: Uint8Array[] + ): boolean; } = { freeTrustedSetup: ckzgNotLoaded, loadTrustedSetup: ckzgNotLoaded, @@ -26,6 +38,12 @@ export let ckzg: { computeBlobKzgProof: ckzgNotLoaded, verifyBlobKzgProof: ckzgNotLoaded, verifyBlobKzgProofBatch: ckzgNotLoaded, + computeCells: ckzgNotLoaded, + computeCellsAndKzgProofs: ckzgNotLoaded, + cellsToBlob: ckzgNotLoaded, + recoverAllCells: ckzgNotLoaded, + verifyCellKzgProof: ckzgNotLoaded, + verifyCellKzgProofBatch: ckzgNotLoaded, }; // Global variable __dirname no longer available in ES6 modules. diff --git a/packages/beacon-node/src/util/sszBytes.ts b/packages/beacon-node/src/util/sszBytes.ts index 802b9a266ab1..df0a087b482b 100644 --- a/packages/beacon-node/src/util/sszBytes.ts +++ b/packages/beacon-node/src/util/sszBytes.ts @@ -198,6 +198,26 @@ export function getSlotFromBlobSidecarSerialized(data: Uint8Array): Slot | null return getSlotFromOffset(data, SLOT_BYTES_POSITION_IN_SIGNED_BLOB_SIDECAR); } +/** + * { + index: ColumnIndex [ fixed - 8 bytes], + column: DataColumn BYTES_PER_FIELD_ELEMENT * FIELD_ELEMENTS_PER_CELL * , + kzgCommitments: denebSsz.BlobKzgCommitments, + kzgProofs: denebSsz.KZGProofs, + signedBlockHeader: phase0Ssz.SignedBeaconBlockHeader, + kzgCommitmentsInclusionProof: KzgCommitmentsInclusionProof, + } + */ + +const SLOT_BYTES_POSITION_IN_SIGNED_DATA_COLUMN_SIDECAR = 20; +export function getSlotFromDataColumnSidecarSerialized(data: Uint8Array): Slot | null { + if (data.length < SLOT_BYTES_POSITION_IN_SIGNED_DATA_COLUMN_SIDECAR + SLOT_SIZE) { + return null; + } + + return getSlotFromOffset(data, SLOT_BYTES_POSITION_IN_SIGNED_DATA_COLUMN_SIDECAR); +} + function getSlotFromOffset(data: Uint8Array, offset: number): Slot { // TODO: Optimize const dv = new DataView(data.buffer, data.byteOffset, data.byteLength); diff --git a/packages/beacon-node/test/unit/db/api/repositories/dataColumn.test.ts b/packages/beacon-node/test/unit/db/api/repositories/dataColumn.test.ts new file mode 100644 index 000000000000..93ac6f4cde1e --- /dev/null +++ b/packages/beacon-node/test/unit/db/api/repositories/dataColumn.test.ts @@ -0,0 +1,103 @@ +import {rimraf} from "rimraf"; +import {describe, it, expect, beforeEach, afterEach, beforeAll} from "vitest"; +import {ssz} from "@lodestar/types"; +import {createChainForkConfig} from "@lodestar/config"; +import {LevelDbController} from "@lodestar/db"; +import {NUMBER_OF_COLUMNS} from "@lodestar/params"; + +import { + DataColumnSidecarsRepository, + dataColumnSidecarsWrapperSsz, + DATA_COLUMN_SIDECARS_IN_WRAPPER_INDEX, + COLUMN_SIZE_IN_WRAPPER_INDEX, + CUSTODY_COLUMNS_IN_IN_WRAPPER_INDEX, +} from "../../../../../src/db/repositories/dataColumnSidecars.js"; +import {testLogger} from "../../../../utils/logger.js"; +import {computeDataColumnSidecars} from "../../../../../src/util/blobs.js"; +import {loadEthereumTrustedSetup, initCKZG} from "../../../../../src/util/kzg.js"; + +/* eslint-disable @typescript-eslint/naming-convention */ +const config = createChainForkConfig({ + ALTAIR_FORK_EPOCH: 0, + BELLATRIX_FORK_EPOCH: 0, + DENEB_FORK_EPOCH: 0, + ELECTRA_FORK_EPOCH: 0, +}); +describe("block archive repository", function () { + const testDir = "./.tmp"; + let dataColumnRepo: DataColumnSidecarsRepository; + let db: LevelDbController; + + beforeEach(async function () { + db = await LevelDbController.create({name: testDir}, {logger: testLogger()}); + dataColumnRepo = new DataColumnSidecarsRepository(config, db); + }); + afterEach(async function () { + await db.close(); + rimraf.sync(testDir); + }); + + beforeAll(async function () { + await initCKZG(); + loadEthereumTrustedSetup(); + }); + + it("should get block by parent root", async function () { + const dataColumn = ssz.electra.DataColumnSidecar.defaultValue(); + const blockRoot = ssz.phase0.BeaconBlockHeader.hashTreeRoot(dataColumn.signedBlockHeader.message); + const slot = dataColumn.signedBlockHeader.message.slot; + const blob = ssz.deneb.Blob.defaultValue(); + const commitment = ssz.deneb.KZGCommitment.defaultValue(); + const singedBlock = ssz.electra.SignedBeaconBlock.defaultValue(); + + singedBlock.message.body.blobKzgCommitments.push(commitment); + singedBlock.message.body.blobKzgCommitments.push(commitment); + singedBlock.message.body.blobKzgCommitments.push(commitment); + const dataColumnSidecars = computeDataColumnSidecars(config, singedBlock, {blobs: [blob, blob, blob]}); + for (let j = 0; j < dataColumnSidecars.length; j++) { + dataColumnSidecars[j].index = j; + } + + const blobKzgCommitmentsLen = 3; + const columnsSize = + ssz.electra.DataColumnSidecar.minSize + + blobKzgCommitmentsLen * + (ssz.electra.Cell.fixedSize + ssz.deneb.KZGCommitment.fixedSize + ssz.deneb.KZGProof.fixedSize); + + const numColumns = NUMBER_OF_COLUMNS; + const custodyColumns = new Uint8Array(numColumns); + + const writeData = { + blockRoot, + slot, + numColumns, + columnsSize, + custodyColumns, + dataColumnSidecars, + }; + + await dataColumnRepo.add(writeData); + const retrievedBinary = await dataColumnRepo.getBinary(blockRoot); + if (!retrievedBinary) throw Error("get by root returned null"); + + const retrieved = dataColumnSidecarsWrapperSsz.deserialize(retrievedBinary); + expect(dataColumnSidecarsWrapperSsz.equals(retrieved, writeData)).toBe(true); + + const retrievedColumnsSizeBytes = retrievedBinary.slice( + COLUMN_SIZE_IN_WRAPPER_INDEX, + CUSTODY_COLUMNS_IN_IN_WRAPPER_INDEX + ); + + const retrievedColumnsSize = ssz.UintNum64.deserialize(retrievedColumnsSizeBytes); + expect(retrievedColumnsSize === columnsSize).toBe(true); + const dataColumnSidecarsBytes = retrievedBinary.slice(DATA_COLUMN_SIDECARS_IN_WRAPPER_INDEX); + expect(dataColumnSidecarsBytes.length === columnsSize * numColumns).toBe(true); + + for (let j = 0; j < numColumns; j++) { + const dataColumnBytes = dataColumnSidecarsBytes.slice(j * columnsSize, (j + 1) * columnsSize); + const retrivedDataColumnSidecar = ssz.electra.DataColumnSidecar.deserialize(dataColumnBytes); + const index = retrivedDataColumnSidecar.index; + expect(j === index).toBe(true); + } + }); +}); diff --git a/packages/beacon-node/test/unit/network/gossip/topic.test.ts b/packages/beacon-node/test/unit/network/gossip/topic.test.ts index dbaa4002bfcc..430f45ee6807 100644 --- a/packages/beacon-node/test/unit/network/gossip/topic.test.ts +++ b/packages/beacon-node/test/unit/network/gossip/topic.test.ts @@ -21,6 +21,12 @@ describe("network / gossip / topic", function () { topicStr: "/eth2/46acb19a/blob_sidecar_1/ssz_snappy", }, ], + [GossipType.data_column_sidecar]: [ + { + topic: {type: GossipType.data_column_sidecar, index: 1, fork: ForkName.electra, encoding}, + topicStr: "/eth2/46acb19a/data_column_sidecar/ssz_snappy", + }, + ], [GossipType.beacon_aggregate_and_proof]: [ { topic: {type: GossipType.beacon_aggregate_and_proof, fork: ForkName.phase0, encoding}, diff --git a/packages/beacon-node/test/unit/util/dataColumn.test.ts b/packages/beacon-node/test/unit/util/dataColumn.test.ts new file mode 100644 index 000000000000..62a4ef6b8d5d --- /dev/null +++ b/packages/beacon-node/test/unit/util/dataColumn.test.ts @@ -0,0 +1,14 @@ +import {describe, it, expect} from "vitest"; +import {ssz} from "@lodestar/types"; + +import {getCustodyColumns} from "../../../src/util/dataColumns.js"; + +describe("custody columns", () => { + it("getCustodyColumnIndexes", async () => { + const nodeId = ssz.UintBn256.serialize( + BigInt("84065159290331321853352677657753050104170032838956724170714636178275273565505") + ); + const columnIndexs = getCustodyColumns(nodeId, 1); + expect(columnIndexs).toEqual([27, 59, 91, 123]); + }); +}); diff --git a/packages/beacon-node/test/utils/node/beacon.ts b/packages/beacon-node/test/utils/node/beacon.ts index 0163fa148102..f8fee8294907 100644 --- a/packages/beacon-node/test/utils/node/beacon.ts +++ b/packages/beacon-node/test/utils/node/beacon.ts @@ -1,3 +1,4 @@ +import crypto from "node:crypto"; import deepmerge from "deepmerge"; import tmp from "tmp"; import {PeerId} from "@libp2p/interface"; @@ -19,6 +20,7 @@ import {defaultOptions} from "../../../src/node/options.js"; import {BeaconDb} from "../../../src/db/index.js"; import {testLogger} from "../logger.js"; import {InteropStateOpts} from "../../../src/node/utils/interop/state.js"; +import {NodeId} from "../../../src/network/subnets/interface.js"; export async function getDevBeaconNode( opts: { @@ -27,15 +29,17 @@ export async function getDevBeaconNode( validatorCount?: number; logger?: LoggerNode; peerId?: PeerId; + nodeId?: NodeId; peerStoreDir?: string; anchorState?: BeaconStateAllForks; wsCheckpoint?: phase0.Checkpoint; } & InteropStateOpts ): Promise { const {params, validatorCount = 8, peerStoreDir} = opts; - let {options = {}, logger, peerId} = opts; + let {options = {}, logger, peerId, nodeId} = opts; if (!peerId) peerId = await createSecp256k1PeerId(); + if (!nodeId) nodeId = crypto.randomBytes(32); const tmpDir = tmp.dirSync({unsafeCleanup: true}); const config = createChainForkConfig({...minimalConfig, ...params}); logger = logger ?? testLogger(); @@ -94,6 +98,7 @@ export async function getDevBeaconNode( logger, processShutdownCallback: () => {}, peerId, + nodeId, peerStoreDir, anchorState, wsCheckpoint: opts.wsCheckpoint, diff --git a/packages/cli/src/cmds/beacon/handler.ts b/packages/cli/src/cmds/beacon/handler.ts index ec96081d3c75..c12e35f91dd5 100644 --- a/packages/cli/src/cmds/beacon/handler.ts +++ b/packages/cli/src/cmds/beacon/handler.ts @@ -35,7 +35,8 @@ const EIGHT_GB = 8 * 1024 * 1024 * 1024; * Runs a beacon node. */ export async function beaconHandler(args: BeaconArgs & GlobalArgs): Promise { - const {config, options, beaconPaths, network, version, commit, peerId, logger} = await beaconHandlerInit(args); + const {config, options, beaconPaths, network, version, commit, nodeId, peerId, logger} = + await beaconHandlerInit(args); const heapSizeLimit = getHeapStatistics().heap_size_limit; if (heapSizeLimit < EIGHT_GB) { @@ -89,6 +90,7 @@ export async function beaconHandler(args: BeaconArgs & GlobalArgs): Promise { +): Promise<{peerId: PeerId; enr: SignableENR; nodeId: Uint8Array}> { const {persistNetworkIdentity} = args; const newPeerIdAndENR = async (): Promise<{peerId: PeerId; enr: SignableENR}> => { @@ -181,14 +188,16 @@ export async function initPeerIdAndEnr( const enrFile = path.join(beaconDir, "enr"); const peerIdFile = path.join(beaconDir, "peer-id.json"); const {peerId, enr, newEnr} = await readPersistedPeerIdAndENR(peerIdFile, enrFile); - overwriteEnrWithCliArgs(enr, args, logger, {newEnr, bootnode}); + overwriteEnrWithCliArgs(config, enr, args, logger, {newEnr, bootnode}); // Re-persist peer-id and enr writeFile600Perm(peerIdFile, exportToJSON(peerId)); writeFile600Perm(enrFile, enr.encodeTxt()); - return {peerId, enr}; + const nodeId = fromHex(enr.nodeId); + return {peerId, enr, nodeId}; } else { const {peerId, enr} = await newPeerIdAndENR(); - overwriteEnrWithCliArgs(enr, args, logger, {newEnr: true, bootnode}); - return {peerId, enr}; + overwriteEnrWithCliArgs(config, enr, args, logger, {newEnr: true, bootnode}); + const nodeId = fromHex(enr.nodeId); + return {peerId, enr, nodeId}; } } diff --git a/packages/cli/src/cmds/bootnode/handler.ts b/packages/cli/src/cmds/bootnode/handler.ts index 77851b1fcb88..990bce624551 100644 --- a/packages/cli/src/cmds/bootnode/handler.ts +++ b/packages/cli/src/cmds/bootnode/handler.ts @@ -181,7 +181,7 @@ export async function bootnodeHandlerInit(args: BootnodeArgs & GlobalArgs) { ); const logger = initLogger(args, beaconPaths.dataDir, config, "bootnode.log"); - const {peerId, enr} = await initPeerIdAndEnr(args as unknown as BeaconArgs, bootnodeDir, logger, true); + const {peerId, enr} = await initPeerIdAndEnr(config, args as unknown as BeaconArgs, bootnodeDir, logger, true); return {discv5Args, metricsArgs, bootnodeDir, network, version, commit, peerId, enr, logger}; } diff --git a/packages/config/src/chainConfig/configs/mainnet.ts b/packages/config/src/chainConfig/configs/mainnet.ts index 0de1bee666ec..eed180b4a7ea 100644 --- a/packages/config/src/chainConfig/configs/mainnet.ts +++ b/packages/config/src/chainConfig/configs/mainnet.ts @@ -102,4 +102,8 @@ export const chainConfig: ChainConfig = { // Deneb // `2**12` (= 4096 epochs, ~18 days) MIN_EPOCHS_FOR_BLOB_SIDECARS_REQUESTS: 4096, + + // Electra + SAMPLES_PER_SLOT: 8, + CUSTODY_REQUIREMENT: 1, }; diff --git a/packages/config/src/chainConfig/configs/minimal.ts b/packages/config/src/chainConfig/configs/minimal.ts index c99a76d1ee40..978161ed01d0 100644 --- a/packages/config/src/chainConfig/configs/minimal.ts +++ b/packages/config/src/chainConfig/configs/minimal.ts @@ -100,4 +100,8 @@ export const chainConfig: ChainConfig = { // Deneb // `2**12` (= 4096 epochs, ~18 days) MIN_EPOCHS_FOR_BLOB_SIDECARS_REQUESTS: 4096, + + // Electra + SAMPLES_PER_SLOT: 8, + CUSTODY_REQUIREMENT: 1, }; diff --git a/packages/config/src/chainConfig/types.ts b/packages/config/src/chainConfig/types.ts index 234a08558be5..bce53e357095 100644 --- a/packages/config/src/chainConfig/types.ts +++ b/packages/config/src/chainConfig/types.ts @@ -72,6 +72,9 @@ export type ChainConfig = { // Networking MIN_EPOCHS_FOR_BLOB_SIDECARS_REQUESTS: number; + + SAMPLES_PER_SLOT: number; + CUSTODY_REQUIREMENT: number; }; export const chainConfigTypes: SpecTypes = { @@ -134,6 +137,9 @@ export const chainConfigTypes: SpecTypes = { // Networking MIN_EPOCHS_FOR_BLOB_SIDECARS_REQUESTS: "number", + + SAMPLES_PER_SLOT: "number", + CUSTODY_REQUIREMENT: "number", }; /** Allows values in a Spec file */ diff --git a/packages/params/src/forkName.ts b/packages/params/src/forkName.ts index bbb72a7972fa..9aefabce14cf 100644 --- a/packages/params/src/forkName.ts +++ b/packages/params/src/forkName.ts @@ -45,3 +45,9 @@ export type ForkBlobs = Exclude; export function isForkBlobs(fork: ForkName): fork is ForkBlobs { return isForkWithdrawals(fork) && fork !== ForkName.capella; } + +export type ForkPrePeerDAS = ForkPreBlobs | ForkName.deneb; +export type ForkPeerDAS = Exclude; +export function isForkPeerDAS(fork: ForkName): fork is ForkPeerDAS { + return isForkBlobs(fork) && fork !== ForkName.deneb; +} diff --git a/packages/params/src/index.ts b/packages/params/src/index.ts index 6a95e3ca632e..d2f53859bbe9 100644 --- a/packages/params/src/index.ts +++ b/packages/params/src/index.ts @@ -93,6 +93,12 @@ export const { MAX_BLOB_COMMITMENTS_PER_BLOCK, MAX_BLOBS_PER_BLOCK, KZG_COMMITMENT_INCLUSION_PROOF_DEPTH, + + FIELD_ELEMENTS_PER_CELL, + FIELD_ELEMENTS_PER_EXT_BLOB, + KZG_COMMITMENTS_INCLUSION_PROOF_DEPTH, + MAX_REQUEST_DATA_COLUMN_SIDECARS, + DATA_COLUMN_SIDECAR_SUBNET_COUNT, } = activePreset; //////////// @@ -244,3 +250,12 @@ export const KZG_COMMITMENT_SUBTREE_INDEX0 = KZG_COMMITMENT_GINDEX0 - 2 ** KZG_C // ssz.deneb.BlobSidecars.elementType.fixedSize export const BLOBSIDECAR_FIXED_SIZE = ACTIVE_PRESET === PresetName.minimal ? 131672 : 131928; + +// 128 +export const NUMBER_OF_COLUMNS = (FIELD_ELEMENTS_PER_BLOB * 2) / FIELD_ELEMENTS_PER_CELL; +export const BYTES_PER_CELL = FIELD_ELEMENTS_PER_CELL * BYTES_PER_FIELD_ELEMENT; +export const CELLS_PER_BLOB = FIELD_ELEMENTS_PER_EXT_BLOB / FIELD_ELEMENTS_PER_CELL; + +// ssz.electra.BeaconBlockBody.getPathInfo(['blobKzgCommitments']).gindex +export const KZG_COMMITMENTS_GINDEX = 27; +export const KZG_COMMITMENTS_SUBTREE_INDEX = KZG_COMMITMENTS_GINDEX - 2 ** KZG_COMMITMENTS_INCLUSION_PROOF_DEPTH; diff --git a/packages/params/src/presets/mainnet.ts b/packages/params/src/presets/mainnet.ts index 42a705a07f03..4be082d90ecc 100644 --- a/packages/params/src/presets/mainnet.ts +++ b/packages/params/src/presets/mainnet.ts @@ -118,4 +118,12 @@ export const mainnetPreset: BeaconPreset = { MAX_BLOB_COMMITMENTS_PER_BLOCK: 4096, MAX_BLOBS_PER_BLOCK: 6, KZG_COMMITMENT_INCLUSION_PROOF_DEPTH: 17, + + // ELECTRA + /////////// + FIELD_ELEMENTS_PER_CELL: 64, + FIELD_ELEMENTS_PER_EXT_BLOB: 8192, + KZG_COMMITMENTS_INCLUSION_PROOF_DEPTH: 4, + MAX_REQUEST_DATA_COLUMN_SIDECARS: 16384, + DATA_COLUMN_SIDECAR_SUBNET_COUNT: 32, }; diff --git a/packages/params/src/presets/minimal.ts b/packages/params/src/presets/minimal.ts index b940841a0429..e3ccc5909ca3 100644 --- a/packages/params/src/presets/minimal.ts +++ b/packages/params/src/presets/minimal.ts @@ -119,4 +119,12 @@ export const minimalPreset: BeaconPreset = { MAX_BLOB_COMMITMENTS_PER_BLOCK: 16, MAX_BLOBS_PER_BLOCK: 6, KZG_COMMITMENT_INCLUSION_PROOF_DEPTH: 9, + + // ELECTRA + /////////// + FIELD_ELEMENTS_PER_CELL: 64, + FIELD_ELEMENTS_PER_EXT_BLOB: 8192, + KZG_COMMITMENTS_INCLUSION_PROOF_DEPTH: 4, + MAX_REQUEST_DATA_COLUMN_SIDECARS: 16384, + DATA_COLUMN_SIDECAR_SUBNET_COUNT: 32, }; diff --git a/packages/params/src/types.ts b/packages/params/src/types.ts index 3c5ba6381131..ee4d4c8b85b2 100644 --- a/packages/params/src/types.ts +++ b/packages/params/src/types.ts @@ -82,6 +82,14 @@ export type BeaconPreset = { MAX_BLOB_COMMITMENTS_PER_BLOCK: number; MAX_BLOBS_PER_BLOCK: number; KZG_COMMITMENT_INCLUSION_PROOF_DEPTH: number; + + // ELECTRA + /////////// + FIELD_ELEMENTS_PER_CELL: number; + FIELD_ELEMENTS_PER_EXT_BLOB: number; + KZG_COMMITMENTS_INCLUSION_PROOF_DEPTH: number; + MAX_REQUEST_DATA_COLUMN_SIDECARS: number; + DATA_COLUMN_SIDECAR_SUBNET_COUNT: number; }; /** @@ -167,6 +175,14 @@ export const beaconPresetTypes: BeaconPresetTypes = { MAX_BLOB_COMMITMENTS_PER_BLOCK: "number", MAX_BLOBS_PER_BLOCK: "number", KZG_COMMITMENT_INCLUSION_PROOF_DEPTH: "number", + + // ELECTRA + /////////// + FIELD_ELEMENTS_PER_CELL: "number", + FIELD_ELEMENTS_PER_EXT_BLOB: "number", + KZG_COMMITMENTS_INCLUSION_PROOF_DEPTH: "number", + MAX_REQUEST_DATA_COLUMN_SIDECARS: "number", + DATA_COLUMN_SIDECAR_SUBNET_COUNT: "number", }; type BeaconPresetTypes = { diff --git a/packages/types/src/electra/sszTypes.ts b/packages/types/src/electra/sszTypes.ts index 30690a499845..ebd86dd482c7 100644 --- a/packages/types/src/electra/sszTypes.ts +++ b/packages/types/src/electra/sszTypes.ts @@ -1,8 +1,62 @@ -import {ContainerType} from "@chainsafe/ssz"; +import {ContainerType, ByteVectorType, ListCompositeType, VectorCompositeType, ListBasicType} from "@chainsafe/ssz"; +import { + BYTES_PER_FIELD_ELEMENT, + FIELD_ELEMENTS_PER_CELL, + MAX_BLOB_COMMITMENTS_PER_BLOCK, + NUMBER_OF_COLUMNS, + KZG_COMMITMENTS_INCLUSION_PROOF_DEPTH, + MAX_REQUEST_DATA_COLUMN_SIDECARS, +} from "@lodestar/params"; + import {ssz as primitiveSsz} from "../primitive/index.js"; +import {ssz as phase0Ssz} from "../phase0/index.js"; import {ssz as denebSsz} from "../deneb/index.js"; -const {BLSSignature} = primitiveSsz; +const {BLSSignature, Root, ColumnIndex, Bytes32, Slot, UintNum64} = primitiveSsz; + +export const Cell = new ByteVectorType(BYTES_PER_FIELD_ELEMENT * FIELD_ELEMENTS_PER_CELL); +export const DataColumn = new ListCompositeType(Cell, MAX_BLOB_COMMITMENTS_PER_BLOCK); +export const ExtendedMatrix = new ListCompositeType(Cell, MAX_BLOB_COMMITMENTS_PER_BLOCK * NUMBER_OF_COLUMNS); +export const KzgCommitmentsInclusionProof = new VectorCompositeType(Bytes32, KZG_COMMITMENTS_INCLUSION_PROOF_DEPTH); + +export const DataColumnSidecar = new ContainerType( + { + index: ColumnIndex, + column: DataColumn, + kzgCommitments: denebSsz.BlobKzgCommitments, + kzgProofs: denebSsz.KZGProofs, + signedBlockHeader: phase0Ssz.SignedBeaconBlockHeader, + kzgCommitmentsInclusionProof: KzgCommitmentsInclusionProof, + }, + {typeName: "DataColumnSidecar", jsonCase: "eth2"} +); + +export const DataColumnSidecars = new ListCompositeType(DataColumnSidecar, NUMBER_OF_COLUMNS); + +// ReqResp types +// ============= + +export const DataColumnIdentifier = new ContainerType( + { + blockRoot: Root, + index: ColumnIndex, + }, + {typeName: "DataColumnIdentifier", jsonCase: "eth2"} +); + +export const DataColumnSidecarsByRootRequest = new ListCompositeType( + DataColumnIdentifier, + MAX_REQUEST_DATA_COLUMN_SIDECARS +); + +export const DataColumnSidecarsByRangeRequest = new ContainerType( + { + startSlot: Slot, + count: UintNum64, + columns: new ListBasicType(ColumnIndex, NUMBER_OF_COLUMNS), + }, + {typeName: "DataColumnSidecarsByRangeRequest", jsonCase: "eth2"} +); export const ExecutionPayload = new ContainerType( { diff --git a/packages/types/src/electra/types.ts b/packages/types/src/electra/types.ts index 198259eed1dd..a59bb4e673fa 100644 --- a/packages/types/src/electra/types.ts +++ b/packages/types/src/electra/types.ts @@ -1,7 +1,17 @@ import {ValueOf} from "@chainsafe/ssz"; import * as ssz from "./sszTypes.js"; -export type BlobSidecar = ValueOf; +export type Cell = ValueOf; +export type DataColumn = ValueOf; +export type ExtendedMatrix = ValueOf; +export type KzgCommitmentsInclusionProof = ValueOf; +export type DataColumnSidecar = ValueOf; +export type DataColumnSidecars = ValueOf; + +export type DataColumnIdentifier = ValueOf; +export type DataColumnSidecarsByRootRequest = ValueOf; +export type DataColumnSidecarsByRangeRequest = ValueOf; + export type ExecutionPayloadAndBlobsBundle = ValueOf; export type ExecutionPayload = ValueOf; diff --git a/packages/types/src/primitive/sszTypes.ts b/packages/types/src/primitive/sszTypes.ts index 068a32e2cc17..88193d2902fe 100644 --- a/packages/types/src/primitive/sszTypes.ts +++ b/packages/types/src/primitive/sszTypes.ts @@ -63,3 +63,4 @@ export const BLSSignature = Bytes96; export const Domain = Bytes32; export const ParticipationFlags = new UintNumberType(1, {setBitwiseOR: true}); export const ExecutionAddress = new ExecutionAddressType(); +export const ColumnIndex = UintNum64; diff --git a/packages/types/src/primitive/types.ts b/packages/types/src/primitive/types.ts index 53422cc9b995..90e7eadb178e 100644 --- a/packages/types/src/primitive/types.ts +++ b/packages/types/src/primitive/types.ts @@ -47,3 +47,4 @@ export type ExecutionAddress = Bytes20; export type RootHex = string; /** Non-spec type to signal time is represented in seconds */ export type TimeSeconds = number; +export type ColumnIndex = UintNum64; diff --git a/packages/validator/src/util/params.ts b/packages/validator/src/util/params.ts index 0afede39b951..2e2066992898 100644 --- a/packages/validator/src/util/params.ts +++ b/packages/validator/src/util/params.ts @@ -222,5 +222,17 @@ function getSpecCriticalParams(localConfig: ChainConfig): Record