From e52f24f598929b857be727d6435966e211085a04 Mon Sep 17 00:00:00 2001 From: g11tech <76567250+g11tech@users.noreply.github.com> Date: Tue, 8 Mar 2022 00:10:28 +0530 Subject: [PATCH 01/10] Configuring nethermind with jwtAuth in CI (#3827) * configuring nethermind with jwtAuth * fixes for providong jwt file * fixes * fixes * removing the leading 0x for nethermind * fixes * fix the array merging in the dev beacon node options * fix the api options merge * updating nethermind branch to config fixes * scripts update * activating geth on the latest update * correct the secret variable --- .github/workflows/test-sim-merge.yml | 7 ++++--- kiln/nethermind/common-setup.sh | 4 ++++ kiln/nethermind/post-merge.sh | 7 ++++++- kiln/nethermind/pre-merge.sh | 7 ++++++- packages/lodestar/test/utils/node/beacon.ts | 20 ++++++++++++++++++-- 5 files changed, 38 insertions(+), 7 deletions(-) create mode 100755 kiln/nethermind/common-setup.sh diff --git a/.github/workflows/test-sim-merge.yml b/.github/workflows/test-sim-merge.yml index 4f915f148aad..42d8416f34e3 100644 --- a/.github/workflows/test-sim-merge.yml +++ b/.github/workflows/test-sim-merge.yml @@ -3,8 +3,8 @@ name: Sim merge tests on: [pull_request, push] env: - GETH_COMMIT: e44f96d1d31bdc405f2653709df53bab4596f66b - NETHERMIND_COMMIT: f1fe9d3cb200772c9fd0b8b3935212d93d0f90fe + GETH_COMMIT: 59a36720b322a1b7e7084de30d6bad2371e5a067 + NETHERMIND_COMMIT: 93a0799441c567f4075f544ae33379d77f269117 jobs: sim-merge-tests: @@ -59,7 +59,7 @@ jobs: with: dotnet-version: "6.0.x" - name: Clone Nethermind merge interop branch - run: git clone -b kiln_processing https://github.com/g11tech/nethermind --recursive && cd nethermind && git reset --hard $NETHERMIND_COMMIT && git submodule update --init --recursive + run: git clone -b kiln https://github.com/g11tech/nethermind --recursive && cd nethermind && git reset --hard $NETHERMIND_COMMIT && git submodule update --init --recursive - name: Build Nethermind run: cd nethermind/src/Nethermind && dotnet build Nethermind.sln -c Release @@ -70,6 +70,7 @@ jobs: EL_BINARY_DIR: ../../nethermind/src/Nethermind/Nethermind.Runner EL_SCRIPT_DIR: kiln/nethermind EL_PORT: 8550 + ENGINE_PORT: 8551 - name: Upload debug log test files if: ${{ always() }} diff --git a/kiln/nethermind/common-setup.sh b/kiln/nethermind/common-setup.sh new file mode 100755 index 000000000000..d0715d7160d9 --- /dev/null +++ b/kiln/nethermind/common-setup.sh @@ -0,0 +1,4 @@ +#!/bin/bash -x + +# echo a hex encoded 256 bit secret into a file +echo $JWT_SECRET_HEX> $DATA_DIR/jwtsecret diff --git a/kiln/nethermind/post-merge.sh b/kiln/nethermind/post-merge.sh index 1b92f938fccc..1c9fa1a74c2f 100755 --- a/kiln/nethermind/post-merge.sh +++ b/kiln/nethermind/post-merge.sh @@ -1,4 +1,9 @@ #!/bin/bash -x +scriptDir=$(dirname $0) +currentDir=$(pwd) + +. $scriptDir/common-setup.sh + cd $EL_BINARY_DIR -dotnet run -c Release -- --config themerge_kiln_testvectors --Merge.TerminalTotalDifficulty $TTD +dotnet run -c Release -- --config themerge_kiln_testvectors --Merge.TerminalTotalDifficulty $TTD --JsonRpc.JwtSecretFile $currentDir/$DATA_DIR/jwtsecret diff --git a/kiln/nethermind/pre-merge.sh b/kiln/nethermind/pre-merge.sh index 647b5264a885..d2fbed3e4e2b 100755 --- a/kiln/nethermind/pre-merge.sh +++ b/kiln/nethermind/pre-merge.sh @@ -1,4 +1,9 @@ #!/bin/bash -x +scriptDir=$(dirname $0) +currentDir=$(pwd) + +. $scriptDir/common-setup.sh + cd $EL_BINARY_DIR -dotnet run -c Release -- --config themerge_kiln_m2 --Merge.TerminalTotalDifficulty $TTD +dotnet run -c Release -- --config themerge_kiln_m2 --Merge.TerminalTotalDifficulty $TTD --JsonRpc.JwtSecretFile $currentDir/$DATA_DIR/jwtsecret diff --git a/packages/lodestar/test/utils/node/beacon.ts b/packages/lodestar/test/utils/node/beacon.ts index 2702473d0d3a..946e108f176c 100644 --- a/packages/lodestar/test/utils/node/beacon.ts +++ b/packages/lodestar/test/utils/node/beacon.ts @@ -18,6 +18,7 @@ import {testLogger} from "../logger"; import {InteropStateOpts} from "../../../src/node/utils/interop/state"; import {TreeBacked} from "@chainsafe/ssz"; import {allForks, phase0} from "@chainsafe/lodestar-types"; +import {isPlainObject} from "@chainsafe/lodestar-utils"; export async function getDevBeaconNode( opts: { @@ -59,16 +60,24 @@ export async function getDevBeaconNode( ); options = deepmerge( + // This deepmerge should NOT merge the array with the defaults but overwrite them defaultOptions, deepmerge( + // This deepmerge should merge all the array elements of the api options with the + // dev defaults that we wish, especially for the api options { db: {name: tmpDir.name}, - eth1: {enabled: false, providerUrls: ["http://localhost:8545"]}, + eth1: {enabled: false}, + api: {rest: {api: ["beacon", "config", "events", "node", "validator"]}}, metrics: {enabled: false}, network: {discv5: null}, } as Partial, options - ) + ), + { + arrayMerge: overwriteTargetArrayIfItems, + isMergeableObject: isPlainObject, + } ); const state = opts.anchorState || (await initDevState(config, db, validatorCount, opts)); @@ -83,3 +92,10 @@ export async function getDevBeaconNode( wsCheckpoint: opts.wsCheckpoint, }); } + +function overwriteTargetArrayIfItems(target: unknown[], source: unknown[]): unknown[] { + if (source.length === 0) { + return target; + } + return source; +} From b3ff40e129ae93bd74930843aa6cfa2906532413 Mon Sep 17 00:00:00 2001 From: tuyennhv Date: Tue, 8 Mar 2022 10:24:30 +0700 Subject: [PATCH 02/10] Revert "Apply peer action when there are gossip validation errors (#3781)" (#3830) This reverts commit 060918ecf2c8b2c1dbe02ea2ca2100f2efeb2b18. --- .../src/chain/errors/gossipValidation.ts | 7 +--- .../src/chain/validation/aggregateAndProof.ts | 27 +++++--------- .../src/chain/validation/attestation.ts | 35 ++++++++----------- .../src/chain/validation/attesterSlashing.ts | 7 ++-- .../lodestar/src/chain/validation/block.ts | 28 +++++++-------- .../src/chain/validation/proposerSlashing.ts | 7 ++-- .../src/chain/validation/syncCommittee.ts | 12 +++---- .../syncCommitteeContributionAndProof.ts | 9 +++-- .../src/chain/validation/voluntaryExit.ts | 9 ++--- .../lodestar/src/network/gossip/gossipsub.ts | 3 -- .../src/network/gossip/validation/index.ts | 14 ++------ packages/lodestar/src/network/network.ts | 1 - .../lodestar/src/network/peers/peerManager.ts | 2 -- .../unit/network/gossip/gossipsub.test.ts | 16 ++------- 14 files changed, 58 insertions(+), 119 deletions(-) diff --git a/packages/lodestar/src/chain/errors/gossipValidation.ts b/packages/lodestar/src/chain/errors/gossipValidation.ts index 2ce535023c08..87750c6658b8 100644 --- a/packages/lodestar/src/chain/errors/gossipValidation.ts +++ b/packages/lodestar/src/chain/errors/gossipValidation.ts @@ -1,5 +1,4 @@ import {LodestarError} from "@chainsafe/lodestar-utils"; -import {PeerAction} from "../../network"; export enum GossipAction { IGNORE = "IGNORE", @@ -7,14 +6,10 @@ export enum GossipAction { } export class GossipActionError extends LodestarError { - /** The action at gossipsub side */ action: GossipAction; - /** The action at node side */ - lodestarAction: PeerAction | null; - constructor(action: GossipAction, lodestarAction: PeerAction | null, type: T) { + constructor(action: GossipAction, type: T) { super(type); this.action = action; - this.lodestarAction = lodestarAction; } } diff --git a/packages/lodestar/src/chain/validation/aggregateAndProof.ts b/packages/lodestar/src/chain/validation/aggregateAndProof.ts index 0ca4035c4f7f..de3bd166b282 100644 --- a/packages/lodestar/src/chain/validation/aggregateAndProof.ts +++ b/packages/lodestar/src/chain/validation/aggregateAndProof.ts @@ -12,7 +12,6 @@ import {getSelectionProofSignatureSet, getAggregateAndProofSignatureSet} from ". import {AttestationError, AttestationErrorCode, GossipAction} from "../errors"; import {getCommitteeIndices, verifyHeadBlockAndTargetRoot, verifyPropagationSlotRange} from "./attestation"; import {RegenCaller} from "../regen"; -import {PeerAction} from "../../network/peers"; export async function validateGossipAggregateAndProof( chain: IBeaconChain, @@ -35,9 +34,7 @@ export async function validateGossipAggregateAndProof( // [REJECT] The attestation's epoch matches its target -- i.e. attestation.data.target.epoch == compute_epoch_at_slot(attestation.data.slot) if (targetEpoch !== attEpoch) { - throw new AttestationError(GossipAction.REJECT, PeerAction.LowToleranceError, { - code: AttestationErrorCode.BAD_TARGET_EPOCH, - }); + throw new AttestationError(GossipAction.REJECT, {code: AttestationErrorCode.BAD_TARGET_EPOCH}); } // [IGNORE] aggregate.data.slot is within the last ATTESTATION_PROPAGATION_SLOT_RANGE slots (with a MAXIMUM_GOSSIP_CLOCK_DISPARITY allowance) @@ -49,7 +46,7 @@ export async function validateGossipAggregateAndProof( // index aggregate_and_proof.aggregator_index for the epoch aggregate.data.target.epoch. const aggregatorIndex = aggregateAndProof.aggregatorIndex; if (chain.seenAggregators.isKnown(targetEpoch, aggregatorIndex)) { - throw new AttestationError(GossipAction.IGNORE, null, { + throw new AttestationError(GossipAction.IGNORE, { code: AttestationErrorCode.AGGREGATOR_ALREADY_KNOWN, targetEpoch, aggregatorIndex, @@ -67,7 +64,7 @@ export async function validateGossipAggregateAndProof( const attHeadState = await chain.regen .getState(attHeadBlock.stateRoot, RegenCaller.validateGossipAggregateAndProof) .catch((e: Error) => { - throw new AttestationError(GossipAction.REJECT, null, { + throw new AttestationError(GossipAction.REJECT, { code: AttestationErrorCode.MISSING_ATTESTATION_HEAD_STATE, error: e as Error, }); @@ -86,25 +83,19 @@ export async function validateGossipAggregateAndProof( // len(get_attesting_indices(state, aggregate.data, aggregate.aggregation_bits)) >= 1. if (attestingIndices.length < 1) { // missing attestation participants - throw new AttestationError(GossipAction.REJECT, PeerAction.LowToleranceError, { - code: AttestationErrorCode.EMPTY_AGGREGATION_BITFIELD, - }); + throw new AttestationError(GossipAction.REJECT, {code: AttestationErrorCode.EMPTY_AGGREGATION_BITFIELD}); } // [REJECT] aggregate_and_proof.selection_proof selects the validator as an aggregator for the slot // -- i.e. is_aggregator(state, aggregate.data.slot, aggregate.data.index, aggregate_and_proof.selection_proof) returns True. if (!isAggregatorFromCommitteeLength(committeeIndices.length, aggregateAndProof.selectionProof)) { - throw new AttestationError(GossipAction.REJECT, PeerAction.LowToleranceError, { - code: AttestationErrorCode.INVALID_AGGREGATOR, - }); + throw new AttestationError(GossipAction.REJECT, {code: AttestationErrorCode.INVALID_AGGREGATOR}); } // [REJECT] The aggregator's validator index is within the committee // -- i.e. aggregate_and_proof.aggregator_index in get_beacon_committee(state, aggregate.data.slot, aggregate.data.index). if (!committeeIndices.includes(aggregateAndProof.aggregatorIndex)) { - throw new AttestationError(GossipAction.REJECT, PeerAction.LowToleranceError, { - code: AttestationErrorCode.AGGREGATOR_NOT_IN_COMMITTEE, - }); + throw new AttestationError(GossipAction.REJECT, {code: AttestationErrorCode.AGGREGATOR_NOT_IN_COMMITTEE}); } // [REJECT] The aggregate_and_proof.selection_proof is a valid signature of the aggregate.data.slot @@ -118,16 +109,14 @@ export async function validateGossipAggregateAndProof( allForks.getIndexedAttestationSignatureSet(attHeadState, indexedAttestation), ]; if (!(await chain.bls.verifySignatureSets(signatureSets, {batchable: true}))) { - throw new AttestationError(GossipAction.REJECT, PeerAction.LowToleranceError, { - code: AttestationErrorCode.INVALID_SIGNATURE, - }); + throw new AttestationError(GossipAction.REJECT, {code: AttestationErrorCode.INVALID_SIGNATURE}); } // It's important to double check that the attestation still hasn't been observed, since // there can be a race-condition if we receive two attestations at the same time and // process them in different threads. if (chain.seenAggregators.isKnown(targetEpoch, aggregatorIndex)) { - throw new AttestationError(GossipAction.IGNORE, null, { + throw new AttestationError(GossipAction.IGNORE, { code: AttestationErrorCode.AGGREGATOR_ALREADY_KNOWN, targetEpoch, aggregatorIndex, diff --git a/packages/lodestar/src/chain/validation/attestation.ts b/packages/lodestar/src/chain/validation/attestation.ts index 34569546a1d7..ee7a2b66c0af 100644 --- a/packages/lodestar/src/chain/validation/attestation.ts +++ b/packages/lodestar/src/chain/validation/attestation.ts @@ -15,7 +15,6 @@ import {IBeaconChain} from ".."; import {AttestationError, AttestationErrorCode, GossipAction} from "../errors"; import {MAXIMUM_GOSSIP_CLOCK_DISPARITY_SEC} from "../../constants"; import {RegenCaller} from "../regen"; -import {PeerAction} from "../../network"; const {getIndexedAttestationSignatureSet} = allForks; @@ -42,7 +41,7 @@ export async function validateGossipAttestation( // [REJECT] The attestation's epoch matches its target -- i.e. attestation.data.target.epoch == compute_epoch_at_slot(attestation.data.slot) if (targetEpoch !== attEpoch) { - throw new AttestationError(GossipAction.REJECT, PeerAction.LowToleranceError, { + throw new AttestationError(GossipAction.REJECT, { code: AttestationErrorCode.BAD_TARGET_EPOCH, }); } @@ -61,9 +60,7 @@ export async function validateGossipAttestation( bitIndex = getSingleBitIndex(aggregationBits); } catch (e) { if (e instanceof AggregationBitsError && e.type.code === AggregationBitsErrorCode.NOT_EXACTLY_ONE_BIT_SET) { - throw new AttestationError(GossipAction.REJECT, PeerAction.LowToleranceError, { - code: AttestationErrorCode.NOT_EXACTLY_ONE_AGGREGATION_BIT_SET, - }); + throw new AttestationError(GossipAction.REJECT, {code: AttestationErrorCode.NOT_EXACTLY_ONE_AGGREGATION_BIT_SET}); } else { throw e; } @@ -92,7 +89,7 @@ export async function validateGossipAttestation( const attHeadState = await chain.regen .getState(attHeadBlock.stateRoot, RegenCaller.validateGossipAttestation) .catch((e: Error) => { - throw new AttestationError(GossipAction.REJECT, null, { + throw new AttestationError(GossipAction.REJECT, { code: AttestationErrorCode.MISSING_ATTESTATION_HEAD_STATE, error: e as Error, }); @@ -108,9 +105,7 @@ export async function validateGossipAttestation( // -- i.e. len(attestation.aggregation_bits) == len(get_beacon_committee(state, data.slot, data.index)). // > TODO: Is this necessary? Lighthouse does not do this check if (aggregationBits.length !== committeeIndices.length) { - throw new AttestationError(GossipAction.REJECT, PeerAction.LowToleranceError, { - code: AttestationErrorCode.WRONG_NUMBER_OF_AGGREGATION_BITS, - }); + throw new AttestationError(GossipAction.REJECT, {code: AttestationErrorCode.WRONG_NUMBER_OF_AGGREGATION_BITS}); } // LH > verify_middle_checks @@ -124,7 +119,7 @@ export async function validateGossipAttestation( // which may be pre-computed along with the committee information for the signature check. const expectedSubnet = computeSubnetForSlot(attHeadState, attSlot, attIndex); if (subnet !== null && subnet !== expectedSubnet) { - throw new AttestationError(GossipAction.REJECT, PeerAction.LowToleranceError, { + throw new AttestationError(GossipAction.REJECT, { code: AttestationErrorCode.INVALID_SUBNET_ID, received: subnet, expected: expectedSubnet, @@ -134,7 +129,7 @@ export async function validateGossipAttestation( // [IGNORE] There has been no other valid attestation seen on an attestation subnet that has an // identical attestation.data.target.epoch and participating validator index. if (chain.seenAttesters.isKnown(targetEpoch, validatorIndex)) { - throw new AttestationError(GossipAction.IGNORE, null, { + throw new AttestationError(GossipAction.IGNORE, { code: AttestationErrorCode.ATTESTATION_ALREADY_KNOWN, targetEpoch, validatorIndex, @@ -149,9 +144,7 @@ export async function validateGossipAttestation( }; const signatureSet = getIndexedAttestationSignatureSet(attHeadState, indexedAttestation); if (!(await chain.bls.verifySignatureSets([signatureSet], {batchable: true}))) { - throw new AttestationError(GossipAction.REJECT, PeerAction.LowToleranceError, { - code: AttestationErrorCode.INVALID_SIGNATURE, - }); + throw new AttestationError(GossipAction.REJECT, {code: AttestationErrorCode.INVALID_SIGNATURE}); } // Now that the attestation has been fully verified, store that we have received a valid attestation from this validator. @@ -160,7 +153,7 @@ export async function validateGossipAttestation( // there can be a race-condition if we receive two attestations at the same time and // process them in different threads. if (chain.seenAttesters.isKnown(targetEpoch, validatorIndex)) { - throw new AttestationError(GossipAction.IGNORE, null, { + throw new AttestationError(GossipAction.IGNORE, { code: AttestationErrorCode.ATTESTATION_ALREADY_KNOWN, targetEpoch, validatorIndex, @@ -189,14 +182,14 @@ export function verifyPropagationSlotRange(chain: IBeaconChain, attestationSlot: 0 ); if (attestationSlot < earliestPermissibleSlot) { - throw new AttestationError(GossipAction.IGNORE, PeerAction.LowToleranceError, { + throw new AttestationError(GossipAction.IGNORE, { code: AttestationErrorCode.PAST_SLOT, earliestPermissibleSlot, attestationSlot, }); } if (attestationSlot > latestPermissibleSlot) { - throw new AttestationError(GossipAction.IGNORE, PeerAction.LowToleranceError, { + throw new AttestationError(GossipAction.IGNORE, { code: AttestationErrorCode.FUTURE_SLOT, latestPermissibleSlot, attestationSlot, @@ -237,7 +230,7 @@ function verifyHeadBlockIsKnown(chain: IBeaconChain, beaconBlockRoot: Root): IPr const headBlock = chain.forkChoice.getBlock(beaconBlockRoot); if (headBlock === null) { - throw new AttestationError(GossipAction.IGNORE, null, { + throw new AttestationError(GossipAction.IGNORE, { code: AttestationErrorCode.UNKNOWN_BEACON_BLOCK_ROOT, root: toHexString(beaconBlockRoot.valueOf() as typeof beaconBlockRoot), }); @@ -265,7 +258,7 @@ function verifyAttestationTargetRoot(headBlock: IProtoBlock, targetRoot: Root, a // // Reference: // https://github.com/ethereum/eth2.0-specs/pull/2001#issuecomment-699246659 - throw new AttestationError(GossipAction.REJECT, PeerAction.LowToleranceError, { + throw new AttestationError(GossipAction.REJECT, { code: AttestationErrorCode.INVALID_TARGET_ROOT, targetRoot: toHexString(targetRoot), expected: null, @@ -285,7 +278,7 @@ function verifyAttestationTargetRoot(headBlock: IProtoBlock, targetRoot: Root, a // TODO: Do a fast comparision to convert and compare byte by byte if (expectedTargetRoot !== toHexString(targetRoot)) { // Reject any attestation with an invalid target root. - throw new AttestationError(GossipAction.REJECT, PeerAction.LowToleranceError, { + throw new AttestationError(GossipAction.REJECT, { code: AttestationErrorCode.INVALID_TARGET_ROOT, targetRoot: toHexString(targetRoot), expected: expectedTargetRoot, @@ -303,7 +296,7 @@ export function getCommitteeIndices( const slotCommittees = committees[attestationSlot % SLOTS_PER_EPOCH]; if (attestationIndex >= slotCommittees.length) { - throw new AttestationError(GossipAction.REJECT, PeerAction.LowToleranceError, { + throw new AttestationError(GossipAction.REJECT, { code: AttestationErrorCode.COMMITTEE_INDEX_OUT_OF_RANGE, index: attestationIndex, }); diff --git a/packages/lodestar/src/chain/validation/attesterSlashing.ts b/packages/lodestar/src/chain/validation/attesterSlashing.ts index 69dd0009bcf7..a0065ee909ec 100644 --- a/packages/lodestar/src/chain/validation/attesterSlashing.ts +++ b/packages/lodestar/src/chain/validation/attesterSlashing.ts @@ -1,6 +1,5 @@ import {phase0, allForks, getAttesterSlashableIndices} from "@chainsafe/lodestar-beacon-state-transition"; import {IBeaconChain} from ".."; -import {PeerAction} from "../../network/peers"; import {AttesterSlashingError, AttesterSlashingErrorCode, GossipAction} from "../errors"; export async function validateGossipAttesterSlashing( @@ -13,7 +12,7 @@ export async function validateGossipAttesterSlashing( // ), verify if any(attester_slashed_indices.difference(prior_seen_attester_slashed_indices))). const intersectingIndices = getAttesterSlashableIndices(attesterSlashing); if (chain.opPool.hasSeenAttesterSlashing(intersectingIndices)) { - throw new AttesterSlashingError(GossipAction.IGNORE, null, { + throw new AttesterSlashingError(GossipAction.IGNORE, { code: AttesterSlashingErrorCode.ALREADY_EXISTS, }); } @@ -25,7 +24,7 @@ export async function validateGossipAttesterSlashing( // verifySignature = false, verified in batch below allForks.assertValidAttesterSlashing(state, attesterSlashing, false); } catch (e) { - throw new AttesterSlashingError(GossipAction.REJECT, PeerAction.HighToleranceError, { + throw new AttesterSlashingError(GossipAction.REJECT, { code: AttesterSlashingErrorCode.INVALID, error: e as Error, }); @@ -33,7 +32,7 @@ export async function validateGossipAttesterSlashing( const signatureSets = allForks.getAttesterSlashingSignatureSets(state, attesterSlashing); if (!(await chain.bls.verifySignatureSets(signatureSets, {batchable: true}))) { - throw new AttesterSlashingError(GossipAction.REJECT, PeerAction.HighToleranceError, { + throw new AttesterSlashingError(GossipAction.REJECT, { code: AttesterSlashingErrorCode.INVALID, error: Error("Invalid signature"), }); diff --git a/packages/lodestar/src/chain/validation/block.ts b/packages/lodestar/src/chain/validation/block.ts index 7d108cbbd724..db152ec1116a 100644 --- a/packages/lodestar/src/chain/validation/block.ts +++ b/packages/lodestar/src/chain/validation/block.ts @@ -8,7 +8,6 @@ import {MAXIMUM_GOSSIP_CLOCK_DISPARITY} from "../../constants"; import {IBeaconChain} from "../interface"; import {BlockGossipError, BlockErrorCode, GossipAction} from "../errors"; import {RegenCaller} from "../regen"; -import {PeerAction} from "../../network/peers"; export async function validateGossipBlock( config: IChainForkConfig, @@ -24,7 +23,7 @@ export async function validateGossipBlock( // appropriate slot). const currentSlotWithGossipDisparity = chain.clock.currentSlotWithGossipDisparity; if (currentSlotWithGossipDisparity < blockSlot) { - throw new BlockGossipError(GossipAction.IGNORE, PeerAction.LowToleranceError, { + throw new BlockGossipError(GossipAction.IGNORE, { code: BlockErrorCode.FUTURE_SLOT, currentSlot: currentSlotWithGossipDisparity, blockSlot, @@ -36,7 +35,7 @@ export async function validateGossipBlock( const finalizedCheckpoint = chain.forkChoice.getFinalizedCheckpoint(); const finalizedSlot = computeStartSlotAtEpoch(finalizedCheckpoint.epoch); if (blockSlot <= finalizedSlot) { - throw new BlockGossipError(GossipAction.IGNORE, PeerAction.LowToleranceError, { + throw new BlockGossipError(GossipAction.IGNORE, { code: BlockErrorCode.WOULD_REVERT_FINALIZED_SLOT, blockSlot, finalizedSlot, @@ -51,7 +50,7 @@ export async function validateGossipBlock( // already know this block. const blockRoot = toHexString(config.getForkTypes(blockSlot).BeaconBlock.hashTreeRoot(block)); if (chain.forkChoice.getBlockHex(blockRoot) !== null) { - throw new BlockGossipError(GossipAction.IGNORE, null, {code: BlockErrorCode.ALREADY_KNOWN, root: blockRoot}); + throw new BlockGossipError(GossipAction.IGNORE, {code: BlockErrorCode.ALREADY_KNOWN, root: blockRoot}); } // No need to check for badBlock @@ -60,7 +59,7 @@ export async function validateGossipBlock( // [IGNORE] The block is the first block with valid signature received for the proposer for the slot, signed_beacon_block.message.slot. const proposerIndex = block.proposerIndex; if (chain.seenBlockProposers.isKnown(blockSlot, proposerIndex)) { - throw new BlockGossipError(GossipAction.IGNORE, null, {code: BlockErrorCode.REPEAT_PROPOSAL, proposerIndex}); + throw new BlockGossipError(GossipAction.IGNORE, {code: BlockErrorCode.REPEAT_PROPOSAL, proposerIndex}); } // [REJECT] The current finalized_checkpoint is an ancestor of block -- i.e. @@ -78,12 +77,12 @@ export async function validateGossipBlock( // descend from the finalized root. // (Non-Lighthouse): Since we prune all blocks non-descendant from finalized checking the `db.block` database won't be useful to guard // against known bad fork blocks, so we throw PARENT_UNKNOWN for cases (1) and (2) - throw new BlockGossipError(GossipAction.IGNORE, null, {code: BlockErrorCode.PARENT_UNKNOWN, parentRoot}); + throw new BlockGossipError(GossipAction.IGNORE, {code: BlockErrorCode.PARENT_UNKNOWN, parentRoot}); } // [REJECT] The block is from a higher slot than its parent. if (parentBlock.slot >= blockSlot) { - throw new BlockGossipError(GossipAction.IGNORE, PeerAction.LowToleranceError, { + throw new BlockGossipError(GossipAction.IGNORE, { code: BlockErrorCode.NOT_LATER_THAN_PARENT, parentSlot: parentBlock.slot, slot: blockSlot, @@ -98,7 +97,7 @@ export async function validateGossipBlock( const blockState = await chain.regen .getBlockSlotState(parentRoot, blockSlot, RegenCaller.validateGossipBlock) .catch(() => { - throw new BlockGossipError(GossipAction.IGNORE, null, {code: BlockErrorCode.PARENT_UNKNOWN, parentRoot}); + throw new BlockGossipError(GossipAction.IGNORE, {code: BlockErrorCode.PARENT_UNKNOWN, parentRoot}); }); // Extra conditions for merge fork blocks @@ -109,8 +108,8 @@ export async function validateGossipBlock( const executionPayload = block.body.executionPayload; if (bellatrix.isBellatrixStateType(blockState) && bellatrix.isExecutionEnabled(blockState, block.body)) { const expectedTimestamp = computeTimeAtSlot(config, blockSlot, chain.genesisTime); - if (executionPayload.timestamp !== expectedTimestamp) { - throw new BlockGossipError(GossipAction.REJECT, PeerAction.LowToleranceError, { + if (executionPayload.timestamp !== computeTimeAtSlot(config, blockSlot, chain.genesisTime)) { + throw new BlockGossipError(GossipAction.REJECT, { code: BlockErrorCode.INCORRECT_TIMESTAMP, timestamp: executionPayload.timestamp, expectedTimestamp, @@ -123,7 +122,7 @@ export async function validateGossipBlock( const signatureSet = allForks.getProposerSignatureSet(blockState, signedBlock); // Don't batch so verification is not delayed if (!(await chain.bls.verifySignatureSets([signatureSet], {verifyOnMainThread: true}))) { - throw new BlockGossipError(GossipAction.REJECT, PeerAction.LowToleranceError, { + throw new BlockGossipError(GossipAction.REJECT, { code: BlockErrorCode.PROPOSAL_SIGNATURE_INVALID, }); } @@ -133,15 +132,12 @@ export async function validateGossipBlock( // shuffling, the block MAY be queued for later processing while proposers for the block's branch are calculated -- // in such a case do not REJECT, instead IGNORE this message. if (blockState.epochCtx.getBeaconProposer(blockSlot) !== proposerIndex) { - throw new BlockGossipError(GossipAction.REJECT, PeerAction.LowToleranceError, { - code: BlockErrorCode.INCORRECT_PROPOSER, - proposerIndex, - }); + throw new BlockGossipError(GossipAction.REJECT, {code: BlockErrorCode.INCORRECT_PROPOSER, proposerIndex}); } // Check again in case there two blocks are processed concurrently if (chain.seenBlockProposers.isKnown(blockSlot, proposerIndex)) { - throw new BlockGossipError(GossipAction.IGNORE, null, {code: BlockErrorCode.REPEAT_PROPOSAL, proposerIndex}); + throw new BlockGossipError(GossipAction.IGNORE, {code: BlockErrorCode.REPEAT_PROPOSAL, proposerIndex}); } // Simple implementation of a pending block queue. Keeping the block here recycles the queue logic, and keeps the diff --git a/packages/lodestar/src/chain/validation/proposerSlashing.ts b/packages/lodestar/src/chain/validation/proposerSlashing.ts index 253fb2690b90..670eab2d92a5 100644 --- a/packages/lodestar/src/chain/validation/proposerSlashing.ts +++ b/packages/lodestar/src/chain/validation/proposerSlashing.ts @@ -1,6 +1,5 @@ import {phase0, allForks} from "@chainsafe/lodestar-beacon-state-transition"; import {IBeaconChain} from ".."; -import {PeerAction} from "../../network/peers"; import {ProposerSlashingError, ProposerSlashingErrorCode, GossipAction} from "../errors"; export async function validateGossipProposerSlashing( @@ -10,7 +9,7 @@ export async function validateGossipProposerSlashing( // [IGNORE] The proposer slashing is the first valid proposer slashing received for the proposer with index // proposer_slashing.signed_header_1.message.proposer_index. if (chain.opPool.hasSeenProposerSlashing(proposerSlashing.signedHeader1.message.proposerIndex)) { - throw new ProposerSlashingError(GossipAction.IGNORE, null, { + throw new ProposerSlashingError(GossipAction.IGNORE, { code: ProposerSlashingErrorCode.ALREADY_EXISTS, }); } @@ -22,7 +21,7 @@ export async function validateGossipProposerSlashing( // verifySignature = false, verified in batch below allForks.assertValidProposerSlashing(state, proposerSlashing, false); } catch (e) { - throw new ProposerSlashingError(GossipAction.REJECT, PeerAction.HighToleranceError, { + throw new ProposerSlashingError(GossipAction.REJECT, { code: ProposerSlashingErrorCode.INVALID, error: e as Error, }); @@ -30,7 +29,7 @@ export async function validateGossipProposerSlashing( const signatureSets = allForks.getProposerSlashingSignatureSets(state, proposerSlashing); if (!(await chain.bls.verifySignatureSets(signatureSets, {batchable: true}))) { - throw new ProposerSlashingError(GossipAction.REJECT, PeerAction.HighToleranceError, { + throw new ProposerSlashingError(GossipAction.REJECT, { code: ProposerSlashingErrorCode.INVALID, error: Error("Invalid signature"), }); diff --git a/packages/lodestar/src/chain/validation/syncCommittee.ts b/packages/lodestar/src/chain/validation/syncCommittee.ts index dcf04292ed4e..3ff11490cbb7 100644 --- a/packages/lodestar/src/chain/validation/syncCommittee.ts +++ b/packages/lodestar/src/chain/validation/syncCommittee.ts @@ -1,7 +1,6 @@ import {CachedBeaconStateAllForks} from "@chainsafe/lodestar-beacon-state-transition"; import {SYNC_COMMITTEE_SUBNET_SIZE, SYNC_COMMITTEE_SUBNET_COUNT} from "@chainsafe/lodestar-params"; import {altair} from "@chainsafe/lodestar-types"; -import {PeerAction} from "../../network/peers"; import {GossipAction, SyncCommitteeError, SyncCommitteeErrorCode} from "../errors"; import {IBeaconChain} from "../interface"; import {getSyncCommitteeSignatureSet} from "./signatureSets"; @@ -32,7 +31,7 @@ export async function validateGossipSyncCommittee( // [IGNORE] There has been no other valid sync committee signature for the declared slot for the validator referenced // by sync_committee_signature.validator_index. if (chain.seenSyncCommitteeMessages.isKnown(slot, subnet, validatorIndex)) { - throw new SyncCommitteeError(GossipAction.IGNORE, null, { + throw new SyncCommitteeError(GossipAction.IGNORE, { code: SyncCommitteeErrorCode.SYNC_COMMITTEE_ALREADY_KNOWN, }); } @@ -60,7 +59,7 @@ export async function validateSyncCommitteeSigOnly( ): Promise { const signatureSet = getSyncCommitteeSignatureSet(headState, syncCommittee); if (!(await chain.bls.verifySignatureSets([signatureSet], {batchable: true}))) { - throw new SyncCommitteeError(GossipAction.REJECT, PeerAction.LowToleranceError, { + throw new SyncCommitteeError(GossipAction.REJECT, { code: SyncCommitteeErrorCode.INVALID_SIGNATURE, }); } @@ -78,9 +77,8 @@ export function validateGossipSyncCommitteeExceptSig( const {slot, validatorIndex} = data; // [IGNORE] The signature's slot is for the current slot, i.e. sync_committee_signature.slot == current_slot. // (with a MAXIMUM_GOSSIP_CLOCK_DISPARITY allowance) - // don't apply any peer actions for now if (!chain.clock.isCurrentSlotGivenGossipDisparity(slot)) { - throw new SyncCommitteeError(GossipAction.IGNORE, null, { + throw new SyncCommitteeError(GossipAction.IGNORE, { code: SyncCommitteeErrorCode.NOT_CURRENT_SLOT, currentSlot: chain.clock.currentSlot, slot, @@ -89,7 +87,7 @@ export function validateGossipSyncCommitteeExceptSig( // [REJECT] The subcommittee index is in the allowed range, i.e. contribution.subcommittee_index < SYNC_COMMITTEE_SUBNET_COUNT. if (subnet >= SYNC_COMMITTEE_SUBNET_COUNT) { - throw new SyncCommitteeError(GossipAction.REJECT, PeerAction.LowToleranceError, { + throw new SyncCommitteeError(GossipAction.REJECT, { code: SyncCommitteeErrorCode.INVALID_SUBCOMMITTEE_INDEX, subcommitteeIndex: subnet, }); @@ -99,7 +97,7 @@ export function validateGossipSyncCommitteeExceptSig( // Note this validation implies the validator is part of the broader current sync committee along with the correct subcommittee. const indexInSubcommittee = getIndexInSubcommittee(headState, subnet, data); if (indexInSubcommittee === null) { - throw new SyncCommitteeError(GossipAction.REJECT, PeerAction.LowToleranceError, { + throw new SyncCommitteeError(GossipAction.REJECT, { code: SyncCommitteeErrorCode.VALIDATOR_NOT_IN_SYNC_COMMITTEE, validatorIndex, }); diff --git a/packages/lodestar/src/chain/validation/syncCommitteeContributionAndProof.ts b/packages/lodestar/src/chain/validation/syncCommitteeContributionAndProof.ts index 17a82070b0c3..3aa141347b18 100644 --- a/packages/lodestar/src/chain/validation/syncCommitteeContributionAndProof.ts +++ b/packages/lodestar/src/chain/validation/syncCommitteeContributionAndProof.ts @@ -9,7 +9,6 @@ import { getSyncCommitteeContributionSignatureSet, getContributionPubkeys, } from "./signatureSets"; -import {PeerAction} from "../../network/peers"; /** * Spec v1.1.0-beta.2 @@ -39,7 +38,7 @@ export async function validateSyncCommitteeGossipContributionAndProof( // [IGNORE] The sync committee contribution is the first valid contribution received for the aggregator with index // contribution_and_proof.aggregator_index for the slot contribution.slot and subcommittee index contribution.subcommittee_index. if (chain.seenContributionAndProof.isKnown(slot, subcommitteeIndex, aggregatorIndex)) { - throw new SyncCommitteeError(GossipAction.IGNORE, null, { + throw new SyncCommitteeError(GossipAction.IGNORE, { code: SyncCommitteeErrorCode.SYNC_COMMITTEE_ALREADY_KNOWN, }); } @@ -47,7 +46,7 @@ export async function validateSyncCommitteeGossipContributionAndProof( // [REJECT] The contribution has participants -- that is, any(contribution.aggregation_bits) const pubkeys = getContributionPubkeys(headState as CachedBeaconStateAltair, contribution); if (!pubkeys.length) { - throw new SyncCommitteeError(GossipAction.REJECT, PeerAction.LowToleranceError, { + throw new SyncCommitteeError(GossipAction.REJECT, { code: SyncCommitteeErrorCode.NO_PARTICIPANT, }); } @@ -55,7 +54,7 @@ export async function validateSyncCommitteeGossipContributionAndProof( // [REJECT] contribution_and_proof.selection_proof selects the validator as an aggregator for the slot -- // i.e. is_sync_committee_aggregator(contribution_and_proof.selection_proof) returns True. if (!isSyncCommitteeAggregator(contributionAndProof.selectionProof)) { - throw new SyncCommitteeError(GossipAction.REJECT, PeerAction.LowToleranceError, { + throw new SyncCommitteeError(GossipAction.REJECT, { code: SyncCommitteeErrorCode.INVALID_AGGREGATOR, aggregatorIndex: contributionAndProof.aggregatorIndex, }); @@ -79,7 +78,7 @@ export async function validateSyncCommitteeGossipContributionAndProof( ]; if (!(await chain.bls.verifySignatureSets(signatureSets, {batchable: true}))) { - throw new SyncCommitteeError(GossipAction.REJECT, PeerAction.LowToleranceError, { + throw new SyncCommitteeError(GossipAction.REJECT, { code: SyncCommitteeErrorCode.INVALID_SIGNATURE, }); } diff --git a/packages/lodestar/src/chain/validation/voluntaryExit.ts b/packages/lodestar/src/chain/validation/voluntaryExit.ts index 7e946b034805..7fab5ec863a9 100644 --- a/packages/lodestar/src/chain/validation/voluntaryExit.ts +++ b/packages/lodestar/src/chain/validation/voluntaryExit.ts @@ -1,6 +1,5 @@ import {phase0, allForks} from "@chainsafe/lodestar-beacon-state-transition"; import {IBeaconChain} from ".."; -import {PeerAction} from "../../network"; import {VoluntaryExitError, VoluntaryExitErrorCode, GossipAction} from "../errors"; export async function validateGossipVoluntaryExit( @@ -10,7 +9,7 @@ export async function validateGossipVoluntaryExit( // [IGNORE] The voluntary exit is the first valid voluntary exit received for the validator with index // signed_voluntary_exit.message.validator_index. if (chain.opPool.hasSeenVoluntaryExit(voluntaryExit.message.validatorIndex)) { - throw new VoluntaryExitError(GossipAction.IGNORE, null, { + throw new VoluntaryExitError(GossipAction.IGNORE, { code: VoluntaryExitErrorCode.ALREADY_EXISTS, }); } @@ -26,17 +25,15 @@ export async function validateGossipVoluntaryExit( // [REJECT] All of the conditions within process_voluntary_exit pass validation. // verifySignature = false, verified in batch below - // These errors occur due to a fault in the beacon chain. It is not necessarily - // the fault on the peer. if (!allForks.isValidVoluntaryExit(state, voluntaryExit, false)) { - throw new VoluntaryExitError(GossipAction.REJECT, PeerAction.HighToleranceError, { + throw new VoluntaryExitError(GossipAction.REJECT, { code: VoluntaryExitErrorCode.INVALID, }); } const signatureSet = allForks.getVoluntaryExitSignatureSet(state, voluntaryExit); if (!(await chain.bls.verifySignatureSets([signatureSet], {batchable: true}))) { - throw new VoluntaryExitError(GossipAction.REJECT, PeerAction.HighToleranceError, { + throw new VoluntaryExitError(GossipAction.REJECT, { code: VoluntaryExitErrorCode.INVALID_SIGNATURE, }); } diff --git a/packages/lodestar/src/network/gossip/gossipsub.ts b/packages/lodestar/src/network/gossip/gossipsub.ts index d286952c37d3..184e519f94ce 100644 --- a/packages/lodestar/src/network/gossip/gossipsub.ts +++ b/packages/lodestar/src/network/gossip/gossipsub.ts @@ -42,13 +42,11 @@ import { GOSSIP_D_LOW, } from "./scoringParameters"; import {Eth2Context} from "../../chain"; -import {IPeerRpcScoreStore} from "../peers"; import {computeAllPeersScoreWeights} from "./scoreMetrics"; export interface IGossipsubModules { config: IBeaconConfig; libp2p: Libp2p; - peerRpcScores: IPeerRpcScoreStore; logger: ILogger; metrics: IMetrics | null; signal: AbortSignal; @@ -105,7 +103,6 @@ export class Eth2Gossipsub extends Gossipsub { const {validatorFnsByType, jobQueues} = createValidatorFnsByType(gossipHandlers, { config, logger, - peerRpcScores: modules.peerRpcScores, uncompressCache: this.uncompressCache, metrics, signal, diff --git a/packages/lodestar/src/network/gossip/validation/index.ts b/packages/lodestar/src/network/gossip/validation/index.ts index 52820a90ca67..d235a41875b1 100644 --- a/packages/lodestar/src/network/gossip/validation/index.ts +++ b/packages/lodestar/src/network/gossip/validation/index.ts @@ -18,13 +18,10 @@ import {decodeMessageData, UncompressCache} from "../encoding"; import {createValidationQueues} from "./queue"; import {DEFAULT_ENCODING} from "../constants"; import {getGossipAcceptMetadataByType, GetGossipAcceptMetadataFn} from "./onAccept"; -import {IPeerRpcScoreStore, PeerAction} from "../../peers/score"; -import PeerId from "peer-id"; type ValidatorFnModules = { config: IChainForkConfig; logger: ILogger; - peerRpcScores: IPeerRpcScoreStore; metrics: IMetrics | null; uncompressCache: UncompressCache; }; @@ -81,14 +78,13 @@ function getGossipValidatorFn( return async function gossipValidatorFn(topic, gossipMsg, seenTimestampSec) { // Define in scope above try {} to be used in catch {} if object was parsed let gossipObject; - const {data, receivedFrom} = gossipMsg; try { const encoding = topic.encoding ?? DEFAULT_ENCODING; // Deserialize object from bytes ONLY after being picked up from the validation queue try { const sszType = getGossipSSZType(topic); - const messageData = decodeMessageData(encoding, data, uncompressCache); + const messageData = decodeMessageData(encoding, gossipMsg.data, uncompressCache); gossipObject = // TODO: Review if it's really necessary to deserialize this as TreeBacked topic.type === GossipType.beacon_block || topic.type === GossipType.beacon_aggregate_and_proof @@ -96,10 +92,10 @@ function getGossipValidatorFn( : sszType.deserialize(messageData); } catch (e) { // TODO: Log the error or do something better with it - throw new GossipActionError(GossipAction.REJECT, PeerAction.LowToleranceError, {code: (e as Error).message}); + throw new GossipActionError(GossipAction.REJECT, {code: (e as Error).message}); } - await (gossipHandler as GossipHandlerFn)(gossipObject, topic, receivedFrom, seenTimestampSec); + await (gossipHandler as GossipHandlerFn)(gossipObject, topic, gossipMsg.receivedFrom, seenTimestampSec); const metadata = getGossipObjectAcceptMetadata(config, gossipObject, topic); logger.debug(`gossip - ${type} - accept`, metadata); @@ -110,10 +106,6 @@ function getGossipValidatorFn( throw new GossipValidationError(ERR_TOPIC_VALIDATOR_IGNORE, (e as Error).message); } - if (e.lodestarAction) { - modules.peerRpcScores.applyAction(PeerId.createFromB58String(receivedFrom), e.lodestarAction); - } - // If the gossipObject was deserialized include its short metadata with the error data const metadata = gossipObject && getGossipObjectAcceptMetadata(config, gossipObject, topic); const errorData = {...metadata, ...e.getMetadata()}; diff --git a/packages/lodestar/src/network/network.ts b/packages/lodestar/src/network/network.ts index 04bd7541dfff..bc9f2e0ece0c 100644 --- a/packages/lodestar/src/network/network.ts +++ b/packages/lodestar/src/network/network.ts @@ -91,7 +91,6 @@ export class Network implements INetwork { config, libp2p, logger, - peerRpcScores, metrics, signal, gossipHandlers: gossipHandlers ?? getGossipHandlers({chain, config, logger, network: this, metrics}, opts), diff --git a/packages/lodestar/src/network/peers/peerManager.ts b/packages/lodestar/src/network/peers/peerManager.ts index b1bc701323ce..358b8456fd56 100644 --- a/packages/lodestar/src/network/peers/peerManager.ts +++ b/packages/lodestar/src/network/peers/peerManager.ts @@ -361,8 +361,6 @@ export class PeerManager { // ban and disconnect peers with bad score, collect rest of healthy peers const connectedHealthyPeers: PeerId[] = []; for (const peer of connectedPeers) { - // to decay score - this.peerRpcScores.update(peer); switch (this.peerRpcScores.getScoreState(peer)) { case ScoreState.Banned: void this.goodbyeAndDisconnect(peer, GoodByeReasonCode.BANNED); diff --git a/packages/lodestar/test/unit/network/gossip/gossipsub.test.ts b/packages/lodestar/test/unit/network/gossip/gossipsub.test.ts index 102c07cb1f45..07370089d70c 100644 --- a/packages/lodestar/test/unit/network/gossip/gossipsub.test.ts +++ b/packages/lodestar/test/unit/network/gossip/gossipsub.test.ts @@ -1,4 +1,3 @@ -import sinon, {SinonStubbedInstance} from "sinon"; import {expect, assert} from "chai"; import Libp2p from "libp2p"; import {InMessage} from "libp2p-interfaces/src/pubsub"; @@ -18,24 +17,19 @@ import {createNode} from "../../../utils/network"; import {testLogger} from "../../../utils/logger"; import {GossipAction, GossipActionError} from "../../../../src/chain/errors"; import {Eth2Context} from "../../../../src/chain"; -import {IPeerRpcScoreStore, PeerRpcScoreStore} from "../../../../src/network/peers/score"; describe("network / gossip / validation", function () { const logger = testLogger(); const metrics = null; const gossipType = GossipType.beacon_block; - const sandbox = sinon.createSandbox(); let message: InMessage; let topicString: string; let libp2p: Libp2p; let eth2Context: Eth2Context; - let peerRpcScoresStub: IPeerRpcScoreStore & SinonStubbedInstance; let controller: AbortController; beforeEach(() => { - peerRpcScoresStub = sandbox.createStubInstance(PeerRpcScoreStore) as IPeerRpcScoreStore & - SinonStubbedInstance; controller = new AbortController(); eth2Context = { activeValidatorCount: 16, @@ -43,11 +37,7 @@ describe("network / gossip / validation", function () { currentSlot: 1000 * SLOTS_PER_EPOCH, }; }); - - afterEach(() => { - controller.abort(); - sandbox.restore(); - }); + afterEach(() => controller.abort()); beforeEach(async function () { const signedBlock = generateEmptySignedBlock(); @@ -65,7 +55,7 @@ describe("network / gossip / validation", function () { it("should throw on failed validation", async () => { const gossipHandlersPartial: Partial = { [gossipType]: async () => { - throw new GossipActionError(GossipAction.REJECT, null, {code: "TEST_ERROR"}); + throw new GossipActionError(GossipAction.REJECT, {code: "TEST_ERROR"}); }, }; @@ -74,7 +64,6 @@ describe("network / gossip / validation", function () { gossipHandlers: gossipHandlersPartial as GossipHandlers, logger, libp2p, - peerRpcScores: peerRpcScoresStub, metrics, signal: controller.signal, eth2Context, @@ -106,7 +95,6 @@ describe("network / gossip / validation", function () { gossipHandlers: gossipHandlersPartial as GossipHandlers, logger, libp2p, - peerRpcScores: peerRpcScoresStub, metrics, signal: controller.signal, eth2Context, From 6d539c34b4c35f986239933eeef456217e78cbe6 Mon Sep 17 00:00:00 2001 From: tuyennhv Date: Tue, 8 Mar 2022 23:15:06 +0700 Subject: [PATCH 03/10] Cache message id right inside the message object (#3834) --- packages/lodestar/src/network/gossip/gossipsub.ts | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/lodestar/src/network/gossip/gossipsub.ts b/packages/lodestar/src/network/gossip/gossipsub.ts index 184e519f94ce..81a9cb6b471d 100644 --- a/packages/lodestar/src/network/gossip/gossipsub.ts +++ b/packages/lodestar/src/network/gossip/gossipsub.ts @@ -54,6 +54,12 @@ export interface IGossipsubModules { gossipHandlers: GossipHandlers; } +/** + * Cache message id right in message so that we don't have to compute it twice. + * When we send messages to other peers, protobuf will just ignore `msgId` field. + */ +type Eth2InMessage = InMessage & {msgId?: Uint8Array}; + /** * Wrapper around js-libp2p-gossipsub with the following extensions: * - Eth2 message id @@ -75,7 +81,6 @@ export class Eth2Gossipsub extends Gossipsub { // Internal caches private readonly gossipTopicCache: GossipTopicCache; private readonly uncompressCache = new UncompressCache(); - private readonly msgIdCache = new WeakMap(); private readonly validatorFnsByType: ValidatorFnsByType; @@ -131,14 +136,16 @@ export class Eth2Gossipsub extends Gossipsub { /** * @override Use eth2 msg id and cache results to the msg + * The cached msgId inside the message will be ignored when we send messages to other peers + * since we don't have this field in protobuf. */ - getMsgId(msg: InMessage): Uint8Array { - let msgId = this.msgIdCache.get(msg); + getMsgId(msg: Eth2InMessage): Uint8Array { + let msgId = msg.msgId; if (!msgId) { const topicStr = msg.topicIDs[0]; const topic = this.gossipTopicCache.getTopic(topicStr); msgId = computeMsgId(topic, topicStr, msg.data, this.uncompressCache); - this.msgIdCache.set(msg, msgId); + msg.msgId = msgId; } return msgId; } From f597feab2f6bcb21089add6d679c6c158c5ba2f5 Mon Sep 17 00:00:00 2001 From: Lion - dapplion <35266934+dapplion@users.noreply.github.com> Date: Wed, 9 Mar 2022 12:22:40 +0530 Subject: [PATCH 04/10] Use in-memory peer store for ephemeral data (#3838) * Use in-memory peer store for ephemeral data * Bound PeerRpcScoreStore data structures * Update test type * Fix e2e test in PeerManager --- packages/lodestar/src/network/interface.ts | 3 +- packages/lodestar/src/network/network.ts | 19 +--- packages/lodestar/src/network/peers/index.ts | 1 - .../lodestar/src/network/peers/metastore.ts | 86 ------------------- .../lodestar/src/network/peers/peerManager.ts | 56 ++++++++---- packages/lodestar/src/network/peers/score.ts | 54 ++++++++---- .../lodestar/src/network/reqresp/interface.ts | 5 +- .../lodestar/src/network/reqresp/reqResp.ts | 13 +-- .../e2e/network/peers/peerManager.test.ts | 25 ++++-- .../test/unit/network/peers/metastore.test.ts | 69 --------------- .../test/unit/network/peers/score.test.ts | 27 ++---- packages/lodestar/test/utils/peer.ts | 22 ----- 12 files changed, 111 insertions(+), 269 deletions(-) delete mode 100644 packages/lodestar/src/network/peers/metastore.ts delete mode 100644 packages/lodestar/test/unit/network/peers/metastore.test.ts diff --git a/packages/lodestar/src/network/interface.ts b/packages/lodestar/src/network/interface.ts index b19c8a5e2b53..1d405c960ee1 100644 --- a/packages/lodestar/src/network/interface.ts +++ b/packages/lodestar/src/network/interface.ts @@ -8,7 +8,7 @@ import PeerId from "peer-id"; import {INetworkEventBus} from "./events"; import {Eth2Gossipsub} from "./gossip"; import {MetadataController} from "./metadata"; -import {IPeerMetadataStore, PeerAction} from "./peers"; +import {PeerAction} from "./peers"; import {IReqResp} from "./reqresp"; import {IAttnetsService, ISubnetsService, CommitteeSubscription} from "./subnets"; @@ -25,7 +25,6 @@ export interface INetwork { gossip: Eth2Gossipsub; discv5?: Discv5; metadata: MetadataController; - peerMetadata: IPeerMetadataStore; /** Our network identity */ peerId: PeerId; localMultiaddrs: Multiaddr[]; diff --git a/packages/lodestar/src/network/network.ts b/packages/lodestar/src/network/network.ts index bc9f2e0ece0c..cc62c66ca9c2 100644 --- a/packages/lodestar/src/network/network.ts +++ b/packages/lodestar/src/network/network.ts @@ -20,7 +20,6 @@ import {ReqResp, IReqResp, IReqRespOptions, ReqRespHandlers} from "./reqresp"; import {Eth2Gossipsub, GossipType, GossipHandlers, getGossipHandlers} from "./gossip"; import {MetadataController} from "./metadata"; import {getActiveForks, FORK_EPOCH_LOOKAHEAD} from "./forks"; -import {IPeerMetadataStore, Libp2pPeerMetadataStore} from "./peers/metastore"; import {PeerManager} from "./peers/peerManager"; import {IPeerRpcScoreStore, PeerAction, PeerRpcScoreStore} from "./peers"; import {INetworkEventBus, NetworkEventBus} from "./events"; @@ -45,7 +44,6 @@ export class Network implements INetwork { syncnetsService: SyncnetsService; gossip: Eth2Gossipsub; metadata: MetadataController; - peerMetadata: IPeerMetadataStore; private readonly peerRpcScores: IPeerRpcScoreStore; private readonly peerManager: PeerManager; @@ -66,24 +64,12 @@ export class Network implements INetwork { this.chain = chain; const networkEventBus = new NetworkEventBus(); const metadata = new MetadataController({}, {config, chain, logger}); - const peerMetadata = new Libp2pPeerMetadataStore(libp2p.peerStore.metadataBook); - const peerRpcScores = new PeerRpcScoreStore(peerMetadata); + const peerRpcScores = new PeerRpcScoreStore(); this.events = networkEventBus; this.metadata = metadata; this.peerRpcScores = peerRpcScores; - this.peerMetadata = peerMetadata; this.reqResp = new ReqResp( - { - config, - libp2p, - reqRespHandlers, - peerMetadata, - metadata, - peerRpcScores, - logger, - networkEventBus, - metrics, - }, + {config, libp2p, reqRespHandlers, metadata, peerRpcScores, logger, networkEventBus, metrics}, opts ); @@ -114,7 +100,6 @@ export class Network implements INetwork { metrics, chain, config, - peerMetadata, peerRpcScores, networkEventBus, }, diff --git a/packages/lodestar/src/network/peers/index.ts b/packages/lodestar/src/network/peers/index.ts index 02eaab9a9ad1..326467975b98 100644 --- a/packages/lodestar/src/network/peers/index.ts +++ b/packages/lodestar/src/network/peers/index.ts @@ -1,3 +1,2 @@ -export * from "./metastore"; export * from "./peerManager"; export * from "./score"; diff --git a/packages/lodestar/src/network/peers/metastore.ts b/packages/lodestar/src/network/peers/metastore.ts deleted file mode 100644 index bcafc47c0124..000000000000 --- a/packages/lodestar/src/network/peers/metastore.ts +++ /dev/null @@ -1,86 +0,0 @@ -import MetadataBook from "libp2p/src/peer-store/metadata-book"; -import PeerId from "peer-id"; -import {altair, ssz} from "@chainsafe/lodestar-types"; -import {ReqRespEncoding} from "../reqresp"; -import {Type} from "@chainsafe/ssz"; - -/** - * Get/set data about peers. - */ -export interface IPeerMetadataStore { - encoding: PeerStoreBucket; - metadata: PeerStoreBucket; - rpcScore: PeerStoreBucket; - rpcScoreLastUpdate: PeerStoreBucket; -} - -export type PeerStoreBucket = { - set: (peer: PeerId, value: T) => void; - get: (peer: PeerId) => T | undefined; -}; - -type BucketSerdes = { - serialize: (value: T) => Uint8Array; - deserialize: (data: Uint8Array) => T; -}; - -/** - * Wrapper around Libp2p.peerstore.metabook - * that uses ssz serialization to store data - */ -export class Libp2pPeerMetadataStore implements IPeerMetadataStore { - encoding: PeerStoreBucket; - metadata: PeerStoreBucket; - rpcScore: PeerStoreBucket; - rpcScoreLastUpdate: PeerStoreBucket; - - private readonly metabook: MetadataBook; - - constructor(metabook: MetadataBook) { - this.metabook = metabook; - - const number64Serdes = typeToSerdes(ssz.Number64); - const metadataV2Serdes = typeToSerdes(ssz.altair.Metadata); - const stringSerdes: BucketSerdes = { - serialize: (v) => Buffer.from(v, "utf8"), - deserialize: (b) => Buffer.from(b).toString("utf8") as ReqRespEncoding, - }; - const floatSerdes: BucketSerdes = { - serialize: (f) => Buffer.from(String(f), "utf8"), - deserialize: (b) => parseFloat(Buffer.from(b).toString("utf8")), - }; - - this.encoding = this.typedStore("encoding", stringSerdes); - // Discard existing `metadata` stored values. Store both phase0 and altair Metadata objects as altair - // Serializing altair.Metadata instead of phase0.Metadata has a cost of just `SYNC_COMMITTEE_SUBNET_COUNT // 8` bytes - this.metadata = this.typedStore("metadata-altair", metadataV2Serdes); - this.rpcScore = this.typedStore("score", floatSerdes); - this.rpcScoreLastUpdate = this.typedStore("score-last-update", number64Serdes); - } - - private typedStore(key: string, type: BucketSerdes): PeerStoreBucket { - return { - set: (peer: PeerId, value: T): void => { - if (value != null) { - // TODO: fix upstream type (which also contains @ts-ignore) - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-ignore - this.metabook.set(peer, key, Buffer.from(type.serialize(value))); - } else { - this.metabook.deleteValue(peer, key); - } - }, - get: (peer: PeerId): T | undefined => { - const value = this.metabook.getValue(peer, key); - return value ? type.deserialize(value) : undefined; - }, - }; - } -} - -function typeToSerdes(type: Type): BucketSerdes { - return { - serialize: (v) => type.serialize(v), - deserialize: (b) => type.deserialize(b), - }; -} diff --git a/packages/lodestar/src/network/peers/peerManager.ts b/packages/lodestar/src/network/peers/peerManager.ts index 358b8456fd56..af56504b1249 100644 --- a/packages/lodestar/src/network/peers/peerManager.ts +++ b/packages/lodestar/src/network/peers/peerManager.ts @@ -11,7 +11,6 @@ import {NetworkEvent, INetworkEventBus} from "../events"; import {IReqResp, ReqRespMethod, RequestTypedContainer} from "../reqresp"; import {prettyPrintPeerId, getClientFromPeerStore} from "../util"; import {ISubnetsService} from "../subnets"; -import {Libp2pPeerMetadataStore} from "./metastore"; import {PeerDiscovery, SubnetDiscvQueryMs} from "./discover"; import {IPeerRpcScoreStore, ScoreState} from "./score"; import { @@ -69,7 +68,6 @@ export type PeerManagerModules = { syncnetsService: ISubnetsService; chain: IBeaconChain; config: IBeaconConfig; - peerMetadata: Libp2pPeerMetadataStore; peerRpcScores: IPeerRpcScoreStore; networkEventBus: INetworkEventBus; }; @@ -89,6 +87,7 @@ type PeerData = { relevantStatus: RelevantPeerStatus; direction: Connection["stat"]["direction"]; peerId: PeerId; + metadata: altair.Metadata | null; }; /** @@ -108,7 +107,6 @@ export class PeerManager { private syncnetsService: ISubnetsService; private chain: IBeaconChain; private config: IBeaconConfig; - private peerMetadata: Libp2pPeerMetadataStore; private peerRpcScores: IPeerRpcScoreStore; /** If null, discovery is disabled */ private discovery: PeerDiscovery | null; @@ -129,7 +127,6 @@ export class PeerManager { this.syncnetsService = modules.syncnetsService; this.chain = modules.chain; this.config = modules.config; - this.peerMetadata = modules.peerMetadata; this.peerRpcScores = modules.peerRpcScores; this.networkEventBus = modules.networkEventBus; this.opts = opts; @@ -246,7 +243,7 @@ export class PeerManager { */ private onPing(peer: PeerId, seqNumber: phase0.Ping): void { // if the sequence number is unknown update the peer's metadata - const metadata = this.peerMetadata.metadata.get(peer); + const metadata = this.connectedPeers.get(peer.toB58String())?.metadata; if (!metadata || metadata.seqNumber < seqNumber) { void this.requestMetadata(peer); } @@ -258,10 +255,14 @@ export class PeerManager { private onMetadata(peer: PeerId, metadata: allForks.Metadata): void { // Store metadata always in case the peer updates attnets but not the sequence number // Trust that the peer always sends the latest metadata (From Lighthouse) - this.peerMetadata.metadata.set(peer, { - ...metadata, - syncnets: (metadata as Partial).syncnets || [], - }); + const peerData = this.connectedPeers.get(peer.toB58String()); + if (peerData) { + peerData.metadata = { + seqNumber: metadata.seqNumber, + attnets: metadata.attnets, + syncnets: (metadata as Partial).syncnets || [], + }; + } } /** @@ -328,6 +329,10 @@ export class PeerManager { private async requestPing(peer: PeerId): Promise { try { this.onPing(peer, await this.reqResp.ping(peer)); + + // If peer replies a PING request also update lastReceivedMsg + const peerData = this.connectedPeers.get(peer.toB58String()); + if (peerData) peerData.lastReceivedMsgUnixTsMs = Date.now(); } catch (e) { // TODO: Downvote peer here or in the reqResp layer } @@ -358,6 +363,9 @@ export class PeerManager { private heartbeat(): void { const connectedPeers = this.getConnectedPeerIds(); + // Decay scores before reading them. Also prunes scores + this.peerRpcScores.update(); + // ban and disconnect peers with bad score, collect rest of healthy peers const connectedHealthyPeers: PeerId[] = []; for (const peer of connectedPeers) { @@ -374,12 +382,15 @@ export class PeerManager { } const {peersToDisconnect, peersToConnect, attnetQueries, syncnetQueries} = prioritizePeers( - connectedHealthyPeers.map((peer) => ({ - id: peer, - attnets: this.peerMetadata.metadata.get(peer)?.attnets ?? [], - syncnets: this.peerMetadata.metadata.get(peer)?.syncnets ?? [], - score: this.peerRpcScores.getScore(peer), - })), + connectedHealthyPeers.map((peer) => { + const peerData = this.connectedPeers.get(peer.toB58String()); + return { + id: peer, + attnets: peerData?.metadata?.attnets ?? [], + syncnets: peerData?.metadata?.syncnets ?? [], + score: this.peerRpcScores.getScore(peer), + }; + }), // Collect subnets which we need peers for in the current slot this.attnetsService.getActiveSubnets(), this.syncnetsService.getActiveSubnets(), @@ -423,6 +434,18 @@ export class PeerManager { for (const peer of peersToDisconnect) { void this.goodbyeAndDisconnect(peer, GoodByeReasonCode.TOO_MANY_PEERS); } + + // Prune connectedPeers map in case it leaks. It has happen in previous nodes, + // disconnect is not always called for all peers + if (this.connectedPeers.size > connectedPeers.length * 2) { + const actualConnectedPeerIds = new Set(connectedPeers.map((peerId) => peerId.toB58String())); + for (const [peerIdStr, peerData] of this.connectedPeers) { + if (!actualConnectedPeerIds.has(peerIdStr)) { + this.connectedPeers.delete(peerIdStr); + this.reqResp.pruneOnPeerDisconnect(peerData.peerId); + } + } + } } private pingAndStatusTimeouts(): void { @@ -479,6 +502,7 @@ export class PeerManager { relevantStatus: RelevantPeerStatus.Unknown, direction, peerId: peer, + metadata: null, }); if (direction === "outbound") { @@ -503,7 +527,7 @@ export class PeerManager { this.logger.verbose("peer disconnected", {peer: prettyPrintPeerId(peer), direction, status}); this.networkEventBus.emit(NetworkEvent.peerDisconnected, peer); - this.reqResp.pruneRateLimiterData(peer); + this.reqResp.pruneOnPeerDisconnect(peer); this.metrics?.peerDisconnectedEvent.inc({direction}); }; diff --git a/packages/lodestar/src/network/peers/score.ts b/packages/lodestar/src/network/peers/score.ts index 38ca0c2e6f64..203e2a00114f 100644 --- a/packages/lodestar/src/network/peers/score.ts +++ b/packages/lodestar/src/network/peers/score.ts @@ -1,5 +1,5 @@ import PeerId from "peer-id"; -import {IPeerMetadataStore} from "./metastore"; +import {pruneSetToMax} from "../../util/map"; /** The default score for new peers */ const DEFAULT_SCORE = 0; @@ -11,11 +11,15 @@ const MIN_SCORE_BEFORE_BAN = -50; const MAX_SCORE = 100; /** The minimum score a peer can obtain */ const MIN_SCORE = -100; +/** Drop score if absolute value is below this threshold */ +const SCORE_THRESHOLD = 1; /** The halflife of a peer's score. I.e the number of miliseconds it takes for the score to decay to half its value */ const SCORE_HALFLIFE_MS = 10 * 60 * 1000; const HALFLIFE_DECAY_MS = -Math.log(2) / SCORE_HALFLIFE_MS; /** The number of miliseconds we ban a peer for before their score begins to decay */ -const BANNED_BEFORE_DECAY_MS = 1800 * 1000; +const BANNED_BEFORE_DECAY_MS = 30 * 60 * 1000; +/** Limit of entries in the scores map */ +const MAX_ENTRIES = 1000; export enum PeerAction { /** Immediately ban peer */ @@ -59,11 +63,13 @@ function scoreToState(score: number): ScoreState { return ScoreState.Healthy; } +type PeerIdStr = string; + export interface IPeerRpcScoreStore { getScore(peer: PeerId): number; getScoreState(peer: PeerId): ScoreState; applyAction(peer: PeerId, action: PeerAction, actionName?: string): void; - update(peer: PeerId): void; + update(): void; } /** @@ -72,14 +78,13 @@ export interface IPeerRpcScoreStore { * The decay rate applies equally to positive and negative scores. */ export class PeerRpcScoreStore implements IPeerRpcScoreStore { - private readonly store: IPeerMetadataStore; + private readonly scores = new Map(); + private readonly lastUpdate = new Map(); - constructor(store: IPeerMetadataStore) { - this.store = store; - } + // TODO: Persist scores, at least BANNED status to disk getScore(peer: PeerId): number { - return this.store.rpcScore.get(peer) ?? DEFAULT_SCORE; + return this.scores.get(peer.toB58String()) ?? DEFAULT_SCORE; } getScoreState(peer: PeerId): ScoreState { @@ -93,20 +98,37 @@ export class PeerRpcScoreStore implements IPeerRpcScoreStore { actionName; } - update(peer: PeerId): void { - this.add(peer, 0); + update(): void { + // Bound size of data structures + pruneSetToMax(this.scores, MAX_ENTRIES); + pruneSetToMax(this.lastUpdate, MAX_ENTRIES); + + for (const [peerIdStr, prevScore] of this.scores) { + const newScore = this.decayScore(peerIdStr, prevScore); + + // Prune scores below threshold + if (Math.abs(newScore) < SCORE_THRESHOLD) { + this.scores.delete(peerIdStr); + this.lastUpdate.delete(peerIdStr); + } + + // If above threshold, persist decayed value + else { + this.scores.set(peerIdStr, newScore); + } + } } - private decayScore(peer: PeerId, prevScore: number): number { + private decayScore(peer: PeerIdStr, prevScore: number): number { const nowMs = Date.now(); - const lastUpdate = this.store.rpcScoreLastUpdate.get(peer) ?? nowMs; + const lastUpdate = this.lastUpdate.get(peer) ?? nowMs; // Decay the current score // Using exponential decay based on a constant half life. const sinceLastUpdateMs = nowMs - lastUpdate; // If peer was banned, lastUpdate will be in the future if (sinceLastUpdateMs > 0 && prevScore !== 0) { - this.store.rpcScoreLastUpdate.set(peer, nowMs); + this.lastUpdate.set(peer, nowMs); // e^(-ln(2)/HL*t) const decayFactor = Math.exp(HALFLIFE_DECAY_MS * sinceLastUpdateMs); return prevScore * decayFactor; @@ -118,7 +140,7 @@ export class PeerRpcScoreStore implements IPeerRpcScoreStore { private add(peer: PeerId, scoreDelta: number): void { const prevScore = this.getScore(peer); - let newScore = this.decayScore(peer, prevScore) + scoreDelta; + let newScore = this.decayScore(peer.toB58String(), prevScore) + scoreDelta; if (newScore > MAX_SCORE) newScore = MAX_SCORE; if (newScore < MIN_SCORE) newScore = MIN_SCORE; @@ -126,9 +148,9 @@ export class PeerRpcScoreStore implements IPeerRpcScoreStore { const newState = scoreToState(newScore); if (prevState !== ScoreState.Banned && newState === ScoreState.Banned) { // ban this peer for at least BANNED_BEFORE_DECAY_MS seconds - this.store.rpcScoreLastUpdate.set(peer, Date.now() + BANNED_BEFORE_DECAY_MS); + this.lastUpdate.set(peer.toB58String(), Date.now() + BANNED_BEFORE_DECAY_MS); } - this.store.rpcScore.set(peer, newScore); + this.scores.set(peer.toB58String(), newScore); } } diff --git a/packages/lodestar/src/network/reqresp/interface.ts b/packages/lodestar/src/network/reqresp/interface.ts index 93b0838716c8..99323caf4b1b 100644 --- a/packages/lodestar/src/network/reqresp/interface.ts +++ b/packages/lodestar/src/network/reqresp/interface.ts @@ -4,7 +4,7 @@ import {ForkName} from "@chainsafe/lodestar-params"; import {IBeaconConfig} from "@chainsafe/lodestar-config"; import {allForks, phase0} from "@chainsafe/lodestar-types"; import {ILogger} from "@chainsafe/lodestar-utils"; -import {IPeerMetadataStore, IPeerRpcScoreStore} from "../peers"; +import {IPeerRpcScoreStore} from "../peers"; import {MetadataController} from "../metadata"; import {INetworkEventBus} from "../events"; import {ReqRespHandlers} from "./handlers"; @@ -23,7 +23,7 @@ export interface IReqResp { request: phase0.BeaconBlocksByRangeRequest ): Promise; beaconBlocksByRoot(peerId: PeerId, request: phase0.BeaconBlocksByRootRequest): Promise; - pruneRateLimiterData(peerId: PeerId): void; + pruneOnPeerDisconnect(peerId: PeerId): void; } export interface IReqRespModules { @@ -32,7 +32,6 @@ export interface IReqRespModules { logger: ILogger; metadata: MetadataController; reqRespHandlers: ReqRespHandlers; - peerMetadata: IPeerMetadataStore; peerRpcScores: IPeerRpcScoreStore; networkEventBus: INetworkEventBus; metrics: IMetrics | null; diff --git a/packages/lodestar/src/network/reqresp/reqResp.ts b/packages/lodestar/src/network/reqresp/reqResp.ts index 4a59f1872b59..d112830add1f 100644 --- a/packages/lodestar/src/network/reqresp/reqResp.ts +++ b/packages/lodestar/src/network/reqresp/reqResp.ts @@ -15,7 +15,7 @@ import {IReqResp, IReqRespModules, IRateLimiter, Libp2pStream} from "./interface import {sendRequest} from "./request"; import {handleRequest, ResponseError} from "./response"; import {onOutgoingReqRespError} from "./score"; -import {IPeerMetadataStore, IPeerRpcScoreStore} from "../peers"; +import {IPeerRpcScoreStore} from "../peers"; import {assertSequentialBlocksInRange, formatProtocolId} from "./utils"; import {MetadataController} from "../metadata"; import {INetworkEventBus, NetworkEvent} from "../events"; @@ -48,7 +48,6 @@ export class ReqResp implements IReqResp { private logger: ILogger; private reqRespHandlers: ReqRespHandlers; private metadataController: MetadataController; - private peerMetadata: IPeerMetadataStore; private peerRpcScores: IPeerRpcScoreStore; private inboundRateLimiter: IRateLimiter; private networkEventBus: INetworkEventBus; @@ -58,12 +57,13 @@ export class ReqResp implements IReqResp { private respCount = 0; private metrics: IMetrics | null; + private readonly encodingPreference = new Map(); + constructor(modules: IReqRespModules, options: IReqRespOptions & RateLimiterOpts) { this.config = modules.config; this.libp2p = modules.libp2p; this.logger = modules.logger; this.reqRespHandlers = modules.reqRespHandlers; - this.peerMetadata = modules.peerMetadata; this.metadataController = modules.metadata; this.peerRpcScores = modules.peerRpcScores; this.inboundRateLimiter = new InboundRateLimiter(options, {...modules}); @@ -137,8 +137,9 @@ export class ReqResp implements IReqResp { ); } - pruneRateLimiterData(peerId: PeerId): void { + pruneOnPeerDisconnect(peerId: PeerId): void { this.inboundRateLimiter.prune(peerId); + this.encodingPreference.delete(peerId.toB58String()); } // Helper to reduce code duplication @@ -152,7 +153,7 @@ export class ReqResp implements IReqResp { try { this.metrics?.reqRespOutgoingRequests.inc({method}); - const encoding = this.peerMetadata.encoding.get(peerId) ?? Encoding.SSZ_SNAPPY; + const encoding = this.encodingPreference.get(peerId.toB58String()) ?? Encoding.SSZ_SNAPPY; const result = await sendRequest( {forkDigestContext: this.config, logger: this.logger, libp2p: this.libp2p}, peerId, @@ -190,7 +191,7 @@ export class ReqResp implements IReqResp { // TODO: Do we really need this now that there is only one encoding? // Remember the prefered encoding of this peer if (method === Method.Status) { - this.peerMetadata.encoding.set(peerId, encoding); + this.encodingPreference.set(peerId.toB58String(), encoding); } try { diff --git a/packages/lodestar/test/e2e/network/peers/peerManager.test.ts b/packages/lodestar/test/e2e/network/peers/peerManager.test.ts index 128ccf049dbc..b904579e6ad6 100644 --- a/packages/lodestar/test/e2e/network/peers/peerManager.test.ts +++ b/packages/lodestar/test/e2e/network/peers/peerManager.test.ts @@ -4,7 +4,7 @@ import sinon from "sinon"; import {expect} from "chai"; import {config} from "@chainsafe/lodestar-config/default"; import {IReqResp, ReqRespMethod} from "../../../../src/network/reqresp"; -import {PeerRpcScoreStore, PeerManager, Libp2pPeerMetadataStore} from "../../../../src/network/peers"; +import {PeerRpcScoreStore, PeerManager} from "../../../../src/network/peers"; import {NetworkEvent, NetworkEventBus} from "../../../../src/network"; import {createNode, getAttnets, getSyncnets} from "../../../utils/network"; import {MockBeaconChain} from "../../../utils/mocks/chain/chain"; @@ -57,8 +57,7 @@ describe("network / peers / PeerManager", function () { }); const reqResp = new ReqRespFake(); - const peerMetadata = new Libp2pPeerMetadataStore(libp2p.peerStore.metadataBook); - const peerRpcScores = new PeerRpcScoreStore(peerMetadata); + const peerRpcScores = new PeerRpcScoreStore(); const networkEventBus = new NetworkEventBus(); /* eslint-disable @typescript-eslint/no-empty-function */ const mockSubnetsService: IAttnetsService = { @@ -79,7 +78,6 @@ describe("network / peers / PeerManager", function () { metrics: null, chain, config: beaconConfig, - peerMetadata, peerRpcScores, networkEventBus, attnetsService: mockSubnetsService, @@ -94,7 +92,7 @@ describe("network / peers / PeerManager", function () { ); await peerManager.start(); - return {chain, libp2p, reqResp, peerMetadata, peerManager, networkEventBus}; + return {chain, libp2p, reqResp, peerManager, networkEventBus}; } // Create a real event emitter with stubbed methods @@ -107,11 +105,17 @@ describe("network / peers / PeerManager", function () { ping = sinon.stub(); beaconBlocksByRange = sinon.stub(); beaconBlocksByRoot = sinon.stub(); - pruneRateLimiterData = sinon.stub(); + pruneOnPeerDisconnect = sinon.stub(); } it("Should request metadata on receivedPing of unknown peer", async () => { - const {reqResp, networkEventBus} = await mockModules(); + const {reqResp, networkEventBus, peerManager} = await mockModules(); + + // Simulate connection so that PeerManager persists the metadata response + peerManager["onLibp2pPeerConnect"]({ + stat: {direction: "inbound", status: "open"}, + remotePeer: peerId1, + } as Connection); const seqNumber = BigInt(2); const metadata: phase0.Metadata = {seqNumber, attnets: []}; @@ -157,7 +161,7 @@ describe("network / peers / PeerManager", function () { }); it("On peerConnect handshake flow", async function () { - const {chain, libp2p, reqResp, peerMetadata, networkEventBus} = await mockModules(); + const {chain, libp2p, reqResp, peerManager, networkEventBus} = await mockModules(); // Simualate a peer connection, get() should return truthy libp2p.connectionManager.get = sinon.stub().returns({}); @@ -190,6 +194,9 @@ describe("network / peers / PeerManager", function () { expect(reqResp.status.callCount).to.equal(1, "reqResp.status must be called"); expect(reqResp.metadata.callCount).to.equal(1, "reqResp.metadata must be called"); - expect(peerMetadata.metadata.get(peerId1)).to.deep.equal(remoteMetadata, "Wrong stored metadata"); + expect(peerManager["connectedPeers"].get(peerId1.toB58String())?.metadata).to.deep.equal( + remoteMetadata, + "Wrong stored metadata" + ); }); }); diff --git a/packages/lodestar/test/unit/network/peers/metastore.test.ts b/packages/lodestar/test/unit/network/peers/metastore.test.ts deleted file mode 100644 index 1f5e0ffbd2c8..000000000000 --- a/packages/lodestar/test/unit/network/peers/metastore.test.ts +++ /dev/null @@ -1,69 +0,0 @@ -import {Libp2pPeerMetadataStore} from "../../../../src/network/peers/metastore"; -import sinon, {SinonStub, SinonStubbedInstance} from "sinon"; -import {ReqRespEncoding} from "../../../../src/network/reqresp"; -import {expect} from "chai"; -import PeerId from "peer-id"; -import {altair, phase0, ssz} from "@chainsafe/lodestar-types"; -import MetadataBook from "libp2p/src/peer-store/metadata-book"; -import ProtoBook from "libp2p/src/peer-store/proto-book"; - -describe("Libp2pPeerMetadataStore", function () { - let metabookStub: SinonStubbedInstance & MetadataBook; - - const peerId = PeerId.createFromB58String("Qma9T5YraSnpRDZqRR4krcSJabThc8nwZuJV3LercPHufi"); - - beforeEach(function () { - let stored: Buffer; - metabookStub = { - data: new Map>(), - delete: sinon.stub(), - deleteValue: sinon.stub(), - get: sinon.stub(), - getValue: sinon.stub().callsFake(() => { - return stored; - }) as SinonStub<[PeerId, string], Buffer>, - // TODO: fix upstream type (which also contains @ts-ignore) - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-ignore - set: sinon.stub().callsFake( - (peerId: PeerId, key: string, value: Buffer): ProtoBook => { - stored = value; - // eslint-disable-next-line @typescript-eslint/no-unsafe-return - return (metabookStub as unknown) as ProtoBook; - } - ) as SinonStub<[PeerId, string, Buffer], ProtoBook>, - }; - }); - - it("can store and retrieve encoding", function () { - const store = new Libp2pPeerMetadataStore(metabookStub); - const value = ReqRespEncoding.SSZ_SNAPPY; - store.encoding.set(peerId, value); - const result = store.encoding.get(peerId); - - expect(result).to.be.equal(value); - }); - - it("can store and retrieve metadata", function () { - const store = new Libp2pPeerMetadataStore(metabookStub); - const value: altair.Metadata = { - attnets: Array.from({length: 64}, () => true), - seqNumber: BigInt(20), - // This will serialize fine, to 0x00 - syncnets: [], - }; - store.metadata.set(peerId, value); - const result = store.metadata.get(peerId); - - expect(ssz.phase0.Metadata.equals(result as phase0.Metadata, value)).to.be.true; - }); - - it("can store and retrieve score", function () { - const store = new Libp2pPeerMetadataStore(metabookStub); - const value = 80; - store.rpcScore.set(peerId, value); - const result = store.rpcScore.get(peerId); - - expect(ssz.Number64.equals(result as number, value)).to.be.true; - }); -}); diff --git a/packages/lodestar/test/unit/network/peers/score.test.ts b/packages/lodestar/test/unit/network/peers/score.test.ts index 3bbf64820c4e..fe81c3f6e162 100644 --- a/packages/lodestar/test/unit/network/peers/score.test.ts +++ b/packages/lodestar/test/unit/network/peers/score.test.ts @@ -1,7 +1,6 @@ import {expect} from "chai"; import PeerId from "peer-id"; import {PeerAction, ScoreState, PeerRpcScoreStore} from "../../../../src/network/peers/score"; -import {IPeerMetadataStore} from "../../../../src/network/peers"; describe("simple block provider score tracking", function () { const peer = PeerId.createFromB58String("Qma9T5YraSnpRDZqRR4krcSJabThc8nwZuJV3LercPHufi"); @@ -9,13 +8,7 @@ describe("simple block provider score tracking", function () { // eslint-disable-next-line @typescript-eslint/explicit-function-return-type function mockStore() { - const store: IPeerMetadataStore = { - encoding: new PeerMap(), - metadata: new PeerMap(), - rpcScore: new PeerMap(), - rpcScoreLastUpdate: new PeerMap(), - }; - return {store, scoreStore: new PeerRpcScoreStore(store)}; + return {scoreStore: new PeerRpcScoreStore()}; } it("Should return default score, without any previous action", function () { @@ -47,10 +40,10 @@ describe("simple block provider score tracking", function () { ]; for (const [minScore, timeToDecay] of decayTimes) it(`Should decay MIN_SCORE to ${minScore} after ${timeToDecay} ms`, () => { - const {store, scoreStore} = mockStore(); - store.rpcScore.set(peer, MIN_SCORE); - store.rpcScoreLastUpdate.set(peer, Date.now() - timeToDecay * factorForJsBadMath); - scoreStore.update(peer); + const {scoreStore} = mockStore(); + scoreStore["scores"].set(peer.toB58String(), MIN_SCORE); + scoreStore["lastUpdate"].set(peer.toB58String(), Date.now() - timeToDecay * factorForJsBadMath); + scoreStore.update(); expect(scoreStore.getScore(peer)).to.be.greaterThan(minScore); }); @@ -61,13 +54,3 @@ describe("simple block provider score tracking", function () { expect(scoreStore.getScore(peer)).to.be.gte(MIN_SCORE); }); }); - -class PeerMap { - map = new Map(); - get(peer: PeerId): T | undefined { - return this.map.get(peer.toB58String()); - } - set(peer: PeerId, value: T): void { - this.map.set(peer.toB58String(), value); - } -} diff --git a/packages/lodestar/test/utils/peer.ts b/packages/lodestar/test/utils/peer.ts index a3827f16bdf2..467b85d3f580 100644 --- a/packages/lodestar/test/utils/peer.ts +++ b/packages/lodestar/test/utils/peer.ts @@ -1,6 +1,4 @@ import PeerId from "peer-id"; -import sinon from "sinon"; -import {PeerStoreBucket, IPeerMetadataStore} from "../../src/network/peers/metastore"; /** * Returns a valid PeerId with opts `bits: 256, keyType: "secp256k1"` @@ -10,23 +8,3 @@ export function getValidPeerId(): PeerId { const id = Buffer.from("002508021221039481269fe831799b1a0f1d521c1395b4831514859e4559c44d155eae46f03819", "hex"); return new PeerId(id); } - -function getStubbedMetadataStoreItem(): sinon.SinonStubbedInstance> { - return { - get: sinon.stub(), - set: sinon.stub(), - }; -} - -export type StubbedIPeerMetadataStore = { - [K in keyof IPeerMetadataStore]: sinon.SinonStubbedInstance; -}; - -export function getStubbedMetadataStore(): StubbedIPeerMetadataStore { - return { - encoding: getStubbedMetadataStoreItem(), - metadata: getStubbedMetadataStoreItem(), - rpcScore: getStubbedMetadataStoreItem(), - rpcScoreLastUpdate: getStubbedMetadataStoreItem(), - }; -} From 1378bb990b1d6becfffd77e1af22ff79b129d790 Mon Sep 17 00:00:00 2001 From: tuyennhv Date: Wed, 9 Mar 2022 23:44:59 +0700 Subject: [PATCH 05/10] Cache uncompressed data in Eth2InMessage (#3839) --- .../lodestar/src/network/gossip/encoding.ts | 76 +++++-------------- .../lodestar/src/network/gossip/gossipsub.ts | 17 +---- .../lodestar/src/network/gossip/interface.ts | 11 ++- .../src/network/gossip/validation/index.ts | 10 +-- 4 files changed, 38 insertions(+), 76 deletions(-) diff --git a/packages/lodestar/src/network/gossip/encoding.ts b/packages/lodestar/src/network/gossip/encoding.ts index b751b7e002d9..65f9219f1b4b 100644 --- a/packages/lodestar/src/network/gossip/encoding.ts +++ b/packages/lodestar/src/network/gossip/encoding.ts @@ -1,48 +1,28 @@ import {compress, uncompress} from "snappyjs"; import {intToBytes} from "@chainsafe/lodestar-utils"; import {hash} from "@chainsafe/ssz"; +import {ForkName} from "@chainsafe/lodestar-params"; import { DEFAULT_ENCODING, GOSSIP_MSGID_LENGTH, MESSAGE_DOMAIN_INVALID_SNAPPY, MESSAGE_DOMAIN_VALID_SNAPPY, } from "./constants"; -import {GossipEncoding, GossipTopic} from "./interface"; -import {ForkName} from "@chainsafe/lodestar-params"; - -export interface IUncompressCache { - uncompress(input: Uint8Array): Uint8Array; -} - -export class UncompressCache implements IUncompressCache { - private cache = new WeakMap(); - - uncompress(input: Uint8Array): Uint8Array { - let uncompressed = this.cache.get(input); - if (!uncompressed) { - uncompressed = uncompress(input); - this.cache.set(input, uncompressed); - } - return uncompressed; - } -} +import {Eth2InMessage, GossipEncoding, GossipTopic} from "./interface"; /** - * Decode message using `IUncompressCache`. Message will have been uncompressed before to compute the msgId. - * We must re-use that result to prevent uncompressing the object again here. + * Uncompressed data is used to + * - compute message id + * - if message is not seen then we use it to deserialize to gossip object + * + * We cache uncompressed data in InMessage to prevent uncompressing multiple times. */ -export function decodeMessageData( - encoding: GossipEncoding, - msgData: Uint8Array, - uncompressCache: IUncompressCache -): Uint8Array { - switch (encoding) { - case GossipEncoding.ssz_snappy: - return uncompressCache.uncompress(msgData); - - default: - throw new Error(`Unsupported encoding ${encoding}`); +export function getUncompressedData(msg: Eth2InMessage): Uint8Array { + if (!msg.uncompressedData) { + msg.uncompressedData = uncompress(msg.data); } + + return msg.uncompressedData; } export function encodeMessageData(encoding: GossipEncoding, msgData: Uint8Array): Uint8Array { @@ -58,18 +38,13 @@ export function encodeMessageData(encoding: GossipEncoding, msgData: Uint8Array) /** * Function to compute message id for all forks. */ -export function computeMsgId( - topic: GossipTopic, - topicStr: string, - msgData: Uint8Array, - uncompressCache: IUncompressCache -): Uint8Array { +export function computeMsgId(topic: GossipTopic, topicStr: string, msg: Eth2InMessage): Uint8Array { switch (topic.fork) { case ForkName.phase0: - return computeMsgIdPhase0(topic, msgData, uncompressCache); + return computeMsgIdPhase0(topic, msg); case ForkName.altair: case ForkName.bellatrix: - return computeMsgIdAltair(topic, topicStr, msgData, uncompressCache); + return computeMsgIdAltair(topic, topicStr, msg); } } @@ -79,18 +54,14 @@ export function computeMsgId( * SHA256(MESSAGE_DOMAIN_VALID_SNAPPY + snappy_decompress(message.data))[:20] * ``` */ -export function computeMsgIdPhase0( - topic: GossipTopic, - msgData: Uint8Array, - uncompressCache: IUncompressCache -): Uint8Array { +export function computeMsgIdPhase0(topic: GossipTopic, msg: Eth2InMessage): Uint8Array { switch (topic.encoding ?? DEFAULT_ENCODING) { case GossipEncoding.ssz_snappy: try { - const uncompressed = uncompressCache.uncompress(msgData); + const uncompressed = getUncompressedData(msg); return hashGossipMsgData(MESSAGE_DOMAIN_VALID_SNAPPY, uncompressed); } catch (e) { - return hashGossipMsgData(MESSAGE_DOMAIN_INVALID_SNAPPY, msgData); + return hashGossipMsgData(MESSAGE_DOMAIN_INVALID_SNAPPY, msg.data); } } } @@ -108,16 +79,11 @@ export function computeMsgIdPhase0( * ``` * https://github.com/ethereum/eth2.0-specs/blob/v1.1.0-alpha.7/specs/altair/p2p-interface.md#topics-and-messages */ -export function computeMsgIdAltair( - topic: GossipTopic, - topicStr: string, - msgData: Uint8Array, - uncompressCache: IUncompressCache -): Uint8Array { +export function computeMsgIdAltair(topic: GossipTopic, topicStr: string, msg: Eth2InMessage): Uint8Array { switch (topic.encoding ?? DEFAULT_ENCODING) { case GossipEncoding.ssz_snappy: try { - const uncompressed = uncompressCache.uncompress(msgData); + const uncompressed = getUncompressedData(msg); return hashGossipMsgData( MESSAGE_DOMAIN_VALID_SNAPPY, intToBytes(topicStr.length, 8), @@ -129,7 +95,7 @@ export function computeMsgIdAltair( MESSAGE_DOMAIN_INVALID_SNAPPY, intToBytes(topicStr.length, 8), Buffer.from(topicStr), - msgData + msg.data ); } } diff --git a/packages/lodestar/src/network/gossip/gossipsub.ts b/packages/lodestar/src/network/gossip/gossipsub.ts index 81a9cb6b471d..a8950ded91ad 100644 --- a/packages/lodestar/src/network/gossip/gossipsub.ts +++ b/packages/lodestar/src/network/gossip/gossipsub.ts @@ -1,7 +1,6 @@ /* eslint-disable @typescript-eslint/naming-convention */ import Gossipsub from "libp2p-gossipsub"; import {ERR_TOPIC_VALIDATOR_IGNORE, ERR_TOPIC_VALIDATOR_REJECT} from "libp2p-gossipsub/src/constants"; -import {InMessage} from "libp2p-interfaces/src/pubsub"; import Libp2p from "libp2p"; import {AbortSignal} from "@chainsafe/abort-controller"; import {IBeaconConfig} from "@chainsafe/lodestar-config"; @@ -19,9 +18,10 @@ import { GossipTypeMap, ValidatorFnsByType, GossipHandlers, + Eth2InMessage, } from "./interface"; import {getGossipSSZType, GossipTopicCache, stringifyGossipTopic} from "./topic"; -import {computeMsgId, encodeMessageData, UncompressCache} from "./encoding"; +import {computeMsgId, encodeMessageData} from "./encoding"; import {DEFAULT_ENCODING} from "./constants"; import {GossipValidationError} from "./errors"; import {GOSSIP_MAX_SIZE} from "../../constants"; @@ -30,7 +30,6 @@ import {Map2d, Map2dArr} from "../../util/map"; import pipe from "it-pipe"; import PeerStreams from "libp2p-interfaces/src/pubsub/peer-streams"; import BufferList from "bl"; -// import {RPC} from "libp2p-interfaces/src/pubsub/message/rpc"; import {RPC} from "libp2p-gossipsub/src/message/rpc"; import {normalizeInRpcMessage} from "libp2p-interfaces/src/pubsub/utils"; @@ -54,12 +53,6 @@ export interface IGossipsubModules { gossipHandlers: GossipHandlers; } -/** - * Cache message id right in message so that we don't have to compute it twice. - * When we send messages to other peers, protobuf will just ignore `msgId` field. - */ -type Eth2InMessage = InMessage & {msgId?: Uint8Array}; - /** * Wrapper around js-libp2p-gossipsub with the following extensions: * - Eth2 message id @@ -80,7 +73,6 @@ export class Eth2Gossipsub extends Gossipsub { // Internal caches private readonly gossipTopicCache: GossipTopicCache; - private readonly uncompressCache = new UncompressCache(); private readonly validatorFnsByType: ValidatorFnsByType; @@ -108,7 +100,6 @@ export class Eth2Gossipsub extends Gossipsub { const {validatorFnsByType, jobQueues} = createValidatorFnsByType(gossipHandlers, { config, logger, - uncompressCache: this.uncompressCache, metrics, signal, }); @@ -144,7 +135,7 @@ export class Eth2Gossipsub extends Gossipsub { if (!msgId) { const topicStr = msg.topicIDs[0]; const topic = this.gossipTopicCache.getTopic(topicStr); - msgId = computeMsgId(topic, topicStr, msg.data, this.uncompressCache); + msgId = computeMsgId(topic, topicStr, msg); msg.msgId = msgId; } return msgId; @@ -233,7 +224,7 @@ export class Eth2Gossipsub extends Gossipsub { * @override https://github.com/libp2p/js-libp2p-interfaces/blob/ff3bd10704a4c166ce63135747e3736915b0be8d/src/pubsub/index.js#L513 * Note: this does not call super. All logic is re-implemented below */ - async validate(message: InMessage): Promise { + async validate(message: Eth2InMessage): Promise { try { // messages must have a single topicID const topicStr = Array.isArray(message.topicIDs) ? message.topicIDs[0] : undefined; diff --git a/packages/lodestar/src/network/gossip/interface.ts b/packages/lodestar/src/network/gossip/interface.ts index f6670481fd4e..9e18d6b313ce 100644 --- a/packages/lodestar/src/network/gossip/interface.ts +++ b/packages/lodestar/src/network/gossip/interface.ts @@ -103,6 +103,15 @@ export interface IGossipModules { chain: IBeaconChain; } +/** + * Extend the standard InMessage with additional fields so that we don't have to compute them twice. + * When we send messages to other peers, protobuf will just ignore these fields. + */ +export type Eth2InMessage = InMessage & { + msgId?: Uint8Array; + uncompressedData?: Uint8Array; +}; + /** * Contains various methods for validation of incoming gossip topic data. * The conditions for valid gossip topics and how they are handled are specified here: @@ -114,7 +123,7 @@ export interface IGossipModules { * * js-libp2p-gossipsub expects validation functions that look like this */ -export type GossipValidatorFn = (topic: GossipTopic, message: InMessage, seenTimestampSec: number) => Promise; +export type GossipValidatorFn = (topic: GossipTopic, message: Eth2InMessage, seenTimestampSec: number) => Promise; export type ValidatorFnsByType = {[K in GossipType]: GossipValidatorFn}; diff --git a/packages/lodestar/src/network/gossip/validation/index.ts b/packages/lodestar/src/network/gossip/validation/index.ts index d235a41875b1..b15030da7ab8 100644 --- a/packages/lodestar/src/network/gossip/validation/index.ts +++ b/packages/lodestar/src/network/gossip/validation/index.ts @@ -14,16 +14,14 @@ import { } from "../interface"; import {GossipValidationError} from "../errors"; import {GossipActionError, GossipAction} from "../../../chain/errors"; -import {decodeMessageData, UncompressCache} from "../encoding"; import {createValidationQueues} from "./queue"; -import {DEFAULT_ENCODING} from "../constants"; import {getGossipAcceptMetadataByType, GetGossipAcceptMetadataFn} from "./onAccept"; +import {getUncompressedData} from "../encoding"; type ValidatorFnModules = { config: IChainForkConfig; logger: ILogger; metrics: IMetrics | null; - uncompressCache: UncompressCache; }; /** @@ -72,19 +70,17 @@ function getGossipValidatorFn( type: K, modules: ValidatorFnModules ): GossipValidatorFn { - const {config, logger, metrics, uncompressCache} = modules; + const {config, logger, metrics} = modules; const getGossipObjectAcceptMetadata = getGossipAcceptMetadataByType[type] as GetGossipAcceptMetadataFn; return async function gossipValidatorFn(topic, gossipMsg, seenTimestampSec) { // Define in scope above try {} to be used in catch {} if object was parsed let gossipObject; try { - const encoding = topic.encoding ?? DEFAULT_ENCODING; - // Deserialize object from bytes ONLY after being picked up from the validation queue try { const sszType = getGossipSSZType(topic); - const messageData = decodeMessageData(encoding, gossipMsg.data, uncompressCache); + const messageData = getUncompressedData(gossipMsg); gossipObject = // TODO: Review if it's really necessary to deserialize this as TreeBacked topic.type === GossipType.beacon_block || topic.type === GossipType.beacon_aggregate_and_proof From 69ce81e7f4f863f5cf963dbfdc71a8a5c80f0730 Mon Sep 17 00:00:00 2001 From: Lion - dapplion <35266934+dapplion@users.noreply.github.com> Date: Wed, 9 Mar 2022 22:19:32 +0530 Subject: [PATCH 06/10] Assert tag == local package.json version (#3840) * Fix RELEASE instructions * Assert tag == local package.json version --- .github/workflows/release.yml | 5 +++++ .../scripts/assert-same-package-version.sh | 19 +++++++++++++++++++ RELEASE.md | 4 ++-- 3 files changed, 26 insertions(+), 2 deletions(-) create mode 100755 .github/workflows/scripts/assert-same-package-version.sh diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 8270e27be2bb..ab69f9a183aa 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -30,6 +30,11 @@ jobs: env: CURRENT_TAG: ${{ steps.get_tag.outputs.tag }} IGNORE_PATTERN: beta + + - name: Assert tag == package.json version + run: .github/workflows/scripts/assert-same-package-version.sh + env: + TAG: ${{ steps.get_tag.outputs.tag }} outputs: is_beta: ${{ contains(github.ref, 'beta') }} tag: ${{ steps.get_tag.outputs.tag }} diff --git a/.github/workflows/scripts/assert-same-package-version.sh b/.github/workflows/scripts/assert-same-package-version.sh new file mode 100755 index 000000000000..5dee55c5a2f7 --- /dev/null +++ b/.github/workflows/scripts/assert-same-package-version.sh @@ -0,0 +1,19 @@ +# Our current release strategy does not gurantee that the tagged commit +# has the same version in the package.json. +# +# If that's not the case, no version will be published to NPM and a faulty image will be published to dockerhub + +LOCAL_VERSION=$(jq -r .version lerna.json) + +if [ -z "$TAG" ]; then + echo "ENV TAG is empty" + exit 1 +fi + +if [[ $TAG == *"$LOCAL_VERSION"* ]]; then + echo "TAG $TAG includes LOCAL_VERSION $LOCAL_VERSION" + exit 0 +else + echo "TAG $TAG does not include LOCAL_VERSION $LOCAL_VERSION" + exit 1 +fi diff --git a/RELEASE.md b/RELEASE.md index 61bcf138504b..7aa57c5b010b 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -17,7 +17,7 @@ export TAG=v0.34.0-beta.0 && git tag -a $TAG 9fceb02 -m "$TAG" && git push origi 4. The team creates a PR to bump `master` to the next version (in the example: `v0.35.0`) and continues releasing nightly builds. ``` -lerna version minor --no-git-tag-version +lerna version minor --no-git-tag-version --force-publish ``` After 3-5 days of testing: @@ -25,7 +25,7 @@ After 3-5 days of testing: 5. Tag final stable commit as `v0.34.0`, release and publish the stable release. This commit will be in `v0.34.x` branch and may note be on `master` if beta candidate required bug fixes. ``` -export TAG=v0.34.0 git && tag -a $TAG 9fceb02 -m "$TAG" && git push origin $TAG +export TAG=v0.34.0 && git tag -a $TAG 9fceb02 -m "$TAG" && git push origin $TAG ``` ## Pre-Releases From 5e2a644c639a1ec8396e0c7c5600ac5199c5b3f5 Mon Sep 17 00:00:00 2001 From: g11tech <76567250+g11tech@users.noreply.github.com> Date: Thu, 10 Mar 2022 22:57:16 +0530 Subject: [PATCH 07/10] Kiln testnet config (#3844) * Kiln testnet config * nethermind changes for auth url in devnet 5 * default port update for engine api * updating the kiln config --- .github/workflows/test-sim-merge.yml | 2 +- kiln/devnets/README.md | 10 ++++---- kiln/devnets/devnet4.vars | 15 ------------ kiln/devnets/devnet5.vars | 13 +++++----- kiln/devnets/kiln.vars | 16 +++++++++++++ kiln/devnets/setup.sh | 17 ++++++------- kiln/gethdocker/pre-merge.sh | 2 +- packages/cli/src/networks/index.ts | 10 ++++---- .../cli/src/networks/{kintsugi.ts => kiln.ts} | 8 +++---- .../networks/{kintsugi.ts => kiln.ts} | 24 +++++++++---------- packages/config/src/networks.ts | 8 +++---- packages/lodestar/src/executionEngine/http.ts | 7 +++++- 12 files changed, 70 insertions(+), 62 deletions(-) delete mode 100644 kiln/devnets/devnet4.vars create mode 100644 kiln/devnets/kiln.vars rename packages/cli/src/networks/{kintsugi.ts => kiln.ts} (55%) rename packages/config/src/chainConfig/networks/{kintsugi.ts => kiln.ts} (63%) diff --git a/.github/workflows/test-sim-merge.yml b/.github/workflows/test-sim-merge.yml index 42d8416f34e3..3d9a3ea7a8f6 100644 --- a/.github/workflows/test-sim-merge.yml +++ b/.github/workflows/test-sim-merge.yml @@ -4,7 +4,7 @@ on: [pull_request, push] env: GETH_COMMIT: 59a36720b322a1b7e7084de30d6bad2371e5a067 - NETHERMIND_COMMIT: 93a0799441c567f4075f544ae33379d77f269117 + NETHERMIND_COMMIT: e6537a3937b7af0cc9d7f892d5698394490eeec4 jobs: sim-merge-tests: diff --git a/kiln/devnets/README.md b/kiln/devnets/README.md index 93e94d92d0b9..29236bff4094 100644 --- a/kiln/devnets/README.md +++ b/kiln/devnets/README.md @@ -11,18 +11,18 @@ This is a setup to run and join the devnet with a single shell command. This scr ###### Just run the script with arguments ```bash -cd kintsugi/devnets -./setup.sh --dataDir kintsugi-data --elClient geth --devnetVars ./kintsugi.vars [--dockerWithSudo --withTerminal "gnome-terminal --disable-factory --"] +cd kiln/devnets +./setup.sh --dataDir kiln-data --elClient geth --devnetVars ./kiln.vars [--dockerWithSudo --withTerminal "gnome-terminal --disable-factory --"] ``` ###### Example scenarios 1. Run with separate terminals launched & attached (best for testing in local) : - `./setup.sh --dataDir kintsugi-data --elClient nethermind --devnetVars ./kintsugi.vars --withTerminal "gnome-terminal --disable-factory --" --dockerWithSudo ` + `./setup.sh --dataDir kiln-data --elClient nethermind --devnetVars ./kiln.vars --withTerminal "gnome-terminal --disable-factory --" --dockerWithSudo ` 2. Run _in-terminal_ attached with logs interleaved (best for testing in remote shell) : - `./setup.sh --dataDir kintsugi-data --elClient nethermind --devnetVars ./kintsugi.vars --dockerWithSudo` + `./setup.sh --dataDir kiln-data --elClient nethermind --devnetVars ./kiln.vars --dockerWithSudo` 3. Run detached (best for leaving it to run, typically after testing 1 or 2): - `./setup.sh --dataDir kintsugi-data --elClient nethermind --devnetVars ./kintsugi.vars --detached --dockerWithSudo` + `./setup.sh --dataDir kiln-data --elClient nethermind --devnetVars ./kiln.vars --detached --dockerWithSudo` You can alternate between `geth` and `nethermind` to experiment with the ELs being out of sync ( and catching up) with `lodestar`. diff --git a/kiln/devnets/devnet4.vars b/kiln/devnets/devnet4.vars deleted file mode 100644 index 631250a984db..000000000000 --- a/kiln/devnets/devnet4.vars +++ /dev/null @@ -1,15 +0,0 @@ -DEVNET_NAME=devnet4 - -GETH_IMAGE=parithoshj/geth:merge-8f408a2 -NETHERMIND_IMAGE=nethermindeth/nethermind:kiln_0.1 -LODESTAR_IMAGE=g11tech/lodestar:kilnv1 - -CONFIG_GIT_DIR=merge-devnet-4 - -LODESTAR_EXTRA_ARGS="--eth1.providerUrls http://127.0.0.1:8545 --execution.urls http://127.0.0.1:8545 --api.rest.enabled --api.rest.host 0.0.0.0" - -NETHERMIND_EXTRA_ARGS="--config kiln --Init.WebSocketsEnabled=true --JsonRpc.Enabled=true --JsonRpc.EnabledModules=net,eth,consensus,engine,subscribe,web3 --JsonRpc.Port=8545 --JsonRpc.WebSocketsPort=8546 --JsonRpc.Host=0.0.0.0 --Network.DiscoveryPort=30303 --Network.P2PPort=30303 --Merge.Enabled=true --Merge.TerminalTotalDifficulty=5000000000 --Init.DiagnosticMode=None" - -GETH_EXTRA_ARGS="--http --http.api engine,net,eth --http.port 8545 --ws --ws.api net,eth,engine --ws.port=8546 --ws.addr 0.0.0.0 --allow-insecure-unlock --networkid 1337752 --http.addr 0.0.0.0" - -EXTRA_BOOTNODES="" diff --git a/kiln/devnets/devnet5.vars b/kiln/devnets/devnet5.vars index 0e98f0f761ca..72f5cab23cab 100644 --- a/kiln/devnets/devnet5.vars +++ b/kiln/devnets/devnet5.vars @@ -1,15 +1,16 @@ DEVNET_NAME=devnet5 -GETH_IMAGE=parithoshj/geth:merge-59a3672 -NETHERMIND_IMAGE=nethermindeth/nethermind:kiln_0.4 +GETH_IMAGE=parithoshj/geth:merge-876ca42 +NETHERMIND_IMAGE=nethermindeth/nethermind:kiln_0.6 LODESTAR_IMAGE=chainsafe/lodestar:next - CONFIG_GIT_DIR=merge-devnet-5 -LODESTAR_EXTRA_ARGS="--eth1.providerUrls http://127.0.0.1:8545 --execution.urls http://127.0.0.1:8545 --api.rest.enabled --api.rest.host 0.0.0.0" +JWT_SECRET="0xdc6457099f127cf0bac78de8b297df04951281909db4f58b43def7c7151e765d" + +LODESTAR_EXTRA_ARGS="--eth1.providerUrls http://127.0.0.1:8545 --execution.urls http://127.0.0.1:8551 --api.rest.enabled --api.rest.host 0.0.0.0" -NETHERMIND_EXTRA_ARGS="--config kiln --Init.WebSocketsEnabled=true --JsonRpc.Enabled=true --JsonRpc.EnabledModules=net,eth,consensus,engine,subscribe,web3 --JsonRpc.Port=8545 --JsonRpc.WebSocketsPort=8546 --JsonRpc.Host=0.0.0.0 --Network.DiscoveryPort=30303 --Network.P2PPort=30303 --Merge.Enabled=true --Merge.TerminalTotalDifficulty=500000000000 --Init.DiagnosticMode=None" +NETHERMIND_EXTRA_ARGS="--config kiln --Network.DiscoveryPort=30303 --Network.P2PPort=30303 --Merge.Enabled=true --Merge.TerminalTotalDifficulty=500000000000 --Init.DiagnosticMode=None --JsonRpc.Enabled=true --JsonRpc.Host=0.0.0.0 --JsonRpc.AdditionalRpcUrls \"http://localhost:8545|http;ws|net;eth;subscribe;engine;web3;client|no-auth,http://localhost:8551|http;ws|net;eth;subscribe;engine;web3;client\"" -GETH_EXTRA_ARGS="--http --http.api engine,net,eth --http.port 8545 --ws --ws.api net,eth,engine --ws.port=8546 --ws.addr 0.0.0.0 --allow-insecure-unlock --networkid 1337762 --http.addr 0.0.0.0" +GETH_EXTRA_ARGS="--http --http.api engine,net,eth --http.port 8545 --allow-insecure-unlock --http.addr 0.0.0.0 --authrpc.port=8551 --networkid 1337762" EXTRA_BOOTNODES="" diff --git a/kiln/devnets/kiln.vars b/kiln/devnets/kiln.vars new file mode 100644 index 000000000000..29efe27e731f --- /dev/null +++ b/kiln/devnets/kiln.vars @@ -0,0 +1,16 @@ +DEVNET_NAME=kiln + +GETH_IMAGE=parithoshj/geth:merge-876ca42 +NETHERMIND_IMAGE=nethermindeth/nethermind:kiln_0.6 +LODESTAR_IMAGE=chainsafe/lodestar:next +CONFIG_GIT_DIR=kiln + +JWT_SECRET="0xdc6457099f127cf0bac78de8b297df04951281909db4f58b43def7c7151e765d" + +LODESTAR_EXTRA_ARGS="--eth1.providerUrls http://127.0.0.1:8545 --execution.urls http://127.0.0.1:8551 --api.rest.enabled --api.rest.host 0.0.0.0" + +NETHERMIND_EXTRA_ARGS="--config kiln --Network.DiscoveryPort=30303 --Network.P2PPort=30303 --Merge.Enabled=true --Merge.TerminalTotalDifficulty=1000000000000 --Init.DiagnosticMode=None --JsonRpc.Enabled=true --JsonRpc.Host=0.0.0.0 --JsonRpc.AdditionalRpcUrls \"http://localhost:8545|http;ws|net;eth;subscribe;engine;web3;client|no-auth,http://localhost:8551|http;ws|net;eth;subscribe;engine;web3;client\"" + +GETH_EXTRA_ARGS="--http --http.api engine,net,eth --http.port 8545 --allow-insecure-unlock --http.addr 0.0.0.0 --authrpc.port=8551 --networkid 1337802" + +EXTRA_BOOTNODES="" diff --git a/kiln/devnets/setup.sh b/kiln/devnets/setup.sh index 65eee17a670b..23726bf60c2a 100755 --- a/kiln/devnets/setup.sh +++ b/kiln/devnets/setup.sh @@ -15,9 +15,9 @@ nethermindImage=$NETHERMIND_IMAGE if [ ! -n "$dataDir" ] || [ ! -n "$devnetVars" ] || ([ "$elClient" != "geth" ] && [ "$elClient" != "nethermind" ]) then echo "usage: ./setup.sh --dataDir --elClient --devetVars [--dockerWithSudo --withTerminal \"gnome-terminal --disable-factory --\"]" - echo "example: ./setup.sh --dataDir devnet5-data --elClient nethermind --devnetVars ./devnet5.vars --dockerWithSudo --withTerminal \"gnome-terminal --disable-factory --\"" + echo "example: ./setup.sh --dataDir kiln-data --elClient nethermind --devnetVars ./kiln.vars --dockerWithSudo --withTerminal \"gnome-terminal --disable-factory --\"" echo "Note: if running on macOS where gnome-terminal is not available, remove the gnome-terminal related flags." - echo "example: ./setup.sh --dataDir devnet5-data --elClient geth --devnetVars ./devnet5.vars" + echo "example: ./setup.sh --dataDir kiln-data --elClient geth --devnetVars ./kiln.vars" exit; fi @@ -86,9 +86,9 @@ then fi; if [ $platform == 'Darwin' ] then - elCmd="$dockerCmd --rm --name $elName -v $currentDir/$dataDir/geth:/data $GETH_IMAGE --bootnodes $EXTRA_BOOTNODES$bootNode --datadir /data $GETH_EXTRA_ARGS" + elCmd="$dockerCmd --rm --name $elName -v $currentDir/$dataDir:/data $GETH_IMAGE --bootnodes $EXTRA_BOOTNODES$bootNode --datadir /data/geth --authrpc.jwtsecret /data/jwtsecret $GETH_EXTRA_ARGS" else - elCmd="$dockerCmd --rm --name $elName --network host -v $currentDir/$dataDir/geth:/data $GETH_IMAGE --bootnodes $EXTRA_BOOTNODES$bootNode --datadir /data $GETH_EXTRA_ARGS" + elCmd="$dockerCmd --rm --name $elName --network host -v $currentDir/$dataDir:/data $GETH_IMAGE --bootnodes $EXTRA_BOOTNODES$bootNode --datadir /data/geth --authrpc.jwtsecret /data/jwtsecret $GETH_EXTRA_ARGS" fi elif [ "$elClient" == "nethermind" ] then @@ -99,9 +99,9 @@ then if [ $platform == 'Darwin' ] then - elCmd="$dockerCmd --rm --name $elName -v $currentDir/$dataDir/$configGitDir:/config -v $currentDir/$dataDir/nethermind:/data $NETHERMIND_IMAGE --datadir /data --Init.ChainSpecPath=/config/nethermind_genesis.json $NETHERMIND_EXTRA_ARGS --Discovery.Bootnodes $EXTRA_BOOTNODES$bootNode" + elCmd="$dockerCmd --rm --name $elName -v $currentDir/$dataDir/$configGitDir:/config -v $currentDir/$dataDir:/data $NETHERMIND_IMAGE --datadir /data/nethermind --Init.ChainSpecPath=/config/nethermind_genesis.json --JsonRpc.JwtSecretFile /data/jwtsecret $NETHERMIND_EXTRA_ARGS --Discovery.Bootnodes $EXTRA_BOOTNODES$bootNode" else - elCmd="$dockerCmd --rm --name $elName --network host -v $currentDir/$dataDir/$configGitDir:/config -v $currentDir/$dataDir/nethermind:/data $NETHERMIND_IMAGE --datadir /data --Init.ChainSpecPath=/config/nethermind_genesis.json $NETHERMIND_EXTRA_ARGS --Discovery.Bootnodes $EXTRA_BOOTNODES$bootNode" + elCmd="$dockerCmd --rm --name $elName --network host -v $currentDir/$dataDir/$configGitDir:/config -v $currentDir/$dataDir:/data $NETHERMIND_IMAGE --datadir /data/nethermind --Init.ChainSpecPath=/config/nethermind_genesis.json --JsonRpc.JwtSecretFile /data/jwtsecret $NETHERMIND_EXTRA_ARGS --Discovery.Bootnodes $EXTRA_BOOTNODES$bootNode" fi fi @@ -117,11 +117,12 @@ clName="$DEVNET_NAME-lodestar" if [ $platform == 'Darwin' ] then - clCmd="$dockerCmd --rm --name $clName --net=container:$elName -v $currentDir/$dataDir/$configGitDir:/config -v $currentDir/$dataDir/lodestar:/data $LODESTAR_IMAGE beacon --rootDir /data --paramsFile /config/config.yaml --genesisStateFile /config/genesis.ssz --network.connectToDiscv5Bootnodes --network.discv5.enabled true --eth1.enabled true --eth1.depositContractDeployBlock $depositContractDeployBlock $LODESTAR_EXTRA_ARGS --network.discv5.bootEnrs $bootEnr" + clCmd="$dockerCmd --rm --name $clName --net=container:$elName -v $currentDir/$dataDir/$configGitDir:/config -v $currentDir/$dataDir:/data $LODESTAR_IMAGE beacon --rootDir /data/lodestar --paramsFile /config/config.yaml --genesisStateFile /config/genesis.ssz --network.connectToDiscv5Bootnodes --network.discv5.enabled true --eth1.enabled true --eth1.depositContractDeployBlock $depositContractDeployBlock $LODESTAR_EXTRA_ARGS --bootnodesFile /config/boot_enr.yaml --jwt-secret /data/jwtsecret" else - clCmd="$dockerCmd --rm --name $clName --network host -v $currentDir/$dataDir/$configGitDir:/config -v $currentDir/$dataDir/lodestar:/data $LODESTAR_IMAGE beacon --rootDir /data --paramsFile /config/config.yaml --genesisStateFile /config/genesis.ssz --network.connectToDiscv5Bootnodes --network.discv5.enabled true --eth1.enabled true --eth1.depositContractDeployBlock $depositContractDeployBlock $LODESTAR_EXTRA_ARGS --network.discv5.bootEnrs $bootEnr" + clCmd="$dockerCmd --rm --name $clName --network host -v $currentDir/$dataDir/$configGitDir:/config -v $currentDir/$dataDir:/data $LODESTAR_IMAGE beacon --rootDir /data/lodestar --paramsFile /config/config.yaml --genesisStateFile /config/genesis.ssz --network.connectToDiscv5Bootnodes --network.discv5.enabled true --eth1.enabled true --eth1.depositContractDeployBlock $depositContractDeployBlock $LODESTAR_EXTRA_ARGS --bootnodesFile /config/boot_enr.yaml --jwt-secret /data/jwtsecret" fi +echo -n $JWT_SECRET > $dataDir/jwtsecret run_cmd "$elCmd" elPid=$! echo "elPid= $elPid" diff --git a/kiln/gethdocker/pre-merge.sh b/kiln/gethdocker/pre-merge.sh index 8badbd0127e7..f8acd3c4b081 100755 --- a/kiln/gethdocker/pre-merge.sh +++ b/kiln/gethdocker/pre-merge.sh @@ -5,5 +5,5 @@ currentDir=$(pwd) . $scriptDir/common-setup.sh -# EL_BINARY_DIR refers to the local docker image build from kintsugi/gethdocker folder +# EL_BINARY_DIR refers to the local docker image build from kiln/gethdocker folder docker run --rm -u $(id -u ${USER}):$(id -g ${USER}) --network host -v $currentDir/$DATA_DIR:/data $EL_BINARY_DIR --http --ws -http.api "engine,net,eth,miner" --allow-insecure-unlock --unlock $pubKey --password /data/password.txt --datadir /data --nodiscover --mine --jwt-secret $JWT_SECRET_HEX diff --git a/packages/cli/src/networks/index.ts b/packages/cli/src/networks/index.ts index 17f0771b6bd6..71ed21675ef4 100644 --- a/packages/cli/src/networks/index.ts +++ b/packages/cli/src/networks/index.ts @@ -12,10 +12,10 @@ import fs from "node:fs"; import got from "got"; import * as mainnet from "./mainnet"; import * as prater from "./prater"; -import * as kintsugi from "./kintsugi"; +import * as kiln from "./kiln"; -export type NetworkName = "mainnet" | "prater" | "kintsugi" | "dev"; -export const networkNames: NetworkName[] = ["mainnet", "prater", "kintsugi"]; +export type NetworkName = "mainnet" | "prater" | "kiln" | "dev"; +export const networkNames: NetworkName[] = ["mainnet", "prater", "kiln"]; export type WeakSubjectivityFetchOptions = { weakSubjectivityServerUrl: string; @@ -36,8 +36,8 @@ function getNetworkData( return mainnet; case "prater": return prater; - case "kintsugi": - return kintsugi; + case "kiln": + return kiln; default: throw Error(`Network not supported: ${network}`); } diff --git a/packages/cli/src/networks/kintsugi.ts b/packages/cli/src/networks/kiln.ts similarity index 55% rename from packages/cli/src/networks/kintsugi.ts rename to packages/cli/src/networks/kiln.ts index fbce182dd0b7..2af7f827ce1c 100644 --- a/packages/cli/src/networks/kintsugi.ts +++ b/packages/cli/src/networks/kiln.ts @@ -1,12 +1,12 @@ -import {kintsugiChainConfig} from "@chainsafe/lodestar-config/networks"; +import {kilnChainConfig} from "@chainsafe/lodestar-config/networks"; -export const chainConfig = kintsugiChainConfig; +export const chainConfig = kilnChainConfig; /* eslint-disable max-len */ export const depositContractDeployBlock = 0; -export const genesisFileUrl = "https://raw.githubusercontent.com/eth-clients/merge-testnets/main/kintsugi/genesis.ssz"; +export const genesisFileUrl = "https://raw.githubusercontent.com/eth-clients/merge-testnets/main/kiln/genesis.ssz"; export const bootnodesFileUrl = - "https://raw.githubusercontent.com/eth-clients/merge-testnets/main/kintsugi/bootstrap_nodes.txt"; + "https://raw.githubusercontent.com/eth-clients/merge-testnets/main/kiln/bootstrap_nodes.txt"; export const bootEnrs = []; diff --git a/packages/config/src/chainConfig/networks/kintsugi.ts b/packages/config/src/chainConfig/networks/kiln.ts similarity index 63% rename from packages/config/src/chainConfig/networks/kintsugi.ts rename to packages/config/src/chainConfig/networks/kiln.ts index cad46a384471..7fcb5d34e96e 100644 --- a/packages/config/src/chainConfig/networks/kintsugi.ts +++ b/packages/config/src/chainConfig/networks/kiln.ts @@ -5,24 +5,24 @@ import {chainConfig as mainnet} from "../presets/mainnet"; /* eslint-disable max-len */ -export const kintsugiChainConfig: IChainConfig = { +export const kilnChainConfig: IChainConfig = { ...mainnet, - MIN_GENESIS_ACTIVE_VALIDATOR_COUNT: 72100, - // Dec 16th, 2021, 13:00 UTC - MIN_GENESIS_TIME: 1639659600, + MIN_GENESIS_ACTIVE_VALIDATOR_COUNT: 95000, + // Mar 11th, 2022, 14:00 UTC + MIN_GENESIS_TIME: 1647007200, // Gensis fork - GENESIS_FORK_VERSION: b("0x60000069"), + GENESIS_FORK_VERSION: b("0x70000069"), // 300 seconds (5 min) GENESIS_DELAY: 300, // Forking - ALTAIR_FORK_VERSION: b("0x61000070"), - ALTAIR_FORK_EPOCH: 10, + ALTAIR_FORK_VERSION: b("0x70000070"), + ALTAIR_FORK_EPOCH: 50, // Bellatrix - BELLATRIX_FORK_VERSION: b("0x62000071"), - BELLATRIX_FORK_EPOCH: 20, - TERMINAL_TOTAL_DIFFICULTY: BigInt(5000000000), + BELLATRIX_FORK_VERSION: b("0x70000071"), + BELLATRIX_FORK_EPOCH: 150, + TERMINAL_TOTAL_DIFFICULTY: BigInt(1000000000000), // Sharding SHARDING_FORK_VERSION: b("0x03000000"), SHARDING_FORK_EPOCH: Infinity, @@ -35,7 +35,7 @@ export const kintsugiChainConfig: IChainConfig = { // Deposit contract // --------------------------------------------------------------- // Custom Ethereum testnet - DEPOSIT_CHAIN_ID: 1337702, - DEPOSIT_NETWORK_ID: 1337702, + DEPOSIT_CHAIN_ID: 1337802, + DEPOSIT_NETWORK_ID: 1337802, DEPOSIT_CONTRACT_ADDRESS: b("0x4242424242424242424242424242424242424242"), }; diff --git a/packages/config/src/networks.ts b/packages/config/src/networks.ts index 35ada25f7e31..9fe09c3089ef 100644 --- a/packages/config/src/networks.ts +++ b/packages/config/src/networks.ts @@ -1,13 +1,13 @@ import {IChainConfig} from "./chainConfig"; import {mainnetChainConfig} from "./chainConfig/networks/mainnet"; import {praterChainConfig} from "./chainConfig/networks/prater"; -import {kintsugiChainConfig} from "./chainConfig/networks/kintsugi"; +import {kilnChainConfig} from "./chainConfig/networks/kiln"; -export {mainnetChainConfig, praterChainConfig, kintsugiChainConfig}; +export {mainnetChainConfig, praterChainConfig, kilnChainConfig}; -export type NetworkName = "mainnet" | "prater" | "kintsugi"; +export type NetworkName = "mainnet" | "prater" | "kiln"; export const networksChainConfig: Record = { mainnet: mainnetChainConfig, prater: praterChainConfig, - kintsugi: kintsugiChainConfig, + kiln: kilnChainConfig, }; diff --git a/packages/lodestar/src/executionEngine/http.ts b/packages/lodestar/src/executionEngine/http.ts index e9ae4f30ced4..fa6ebd1ec973 100644 --- a/packages/lodestar/src/executionEngine/http.ts +++ b/packages/lodestar/src/executionEngine/http.ts @@ -37,7 +37,12 @@ export type ExecutionEngineHttpOpts = { }; export const defaultExecutionEngineHttpOpts: ExecutionEngineHttpOpts = { - urls: ["http://localhost:8550"], + /** + * By default ELs host engine api on an auth protected 8551 port, would need a jwt secret to be + * specified to bundle jwt tokens if that is the case. In case one has access to an open + * port/url, one can override this and skip providing a jwt secret. + */ + urls: ["http://localhost:8551"], timeout: 12000, }; From e30eca0b2af1870113d36a5c6da2d85f8549e79d Mon Sep 17 00:00:00 2001 From: tuyennhv Date: Fri, 11 Mar 2022 00:41:23 +0700 Subject: [PATCH 08/10] Migrate to libp2p > 0.35.8 and gossipsub 0.12.2 (#3661) * Migrate to libp2p 0.36.2 and gossipsub 0.13.0 * Fix merge issues * Use fs.renameSync * Remove unnecessary async/await Co-authored-by: Lion - dapplion <35266934+dapplion@users.noreply.github.com> --- packages/cli/src/cmds/beacon/handler.ts | 4 + .../cli/src/migrations/deleteOldPeerstore.ts | 38 +++ packages/cli/src/migrations/index.ts | 1 + packages/lodestar/package.json | 16 +- .../lodestar/src/network/gossip/gossipsub.ts | 106 ++++-- packages/lodestar/src/network/network.ts | 7 +- .../lodestar/src/network/nodejs/bundle.ts | 1 - packages/lodestar/src/network/nodejs/util.ts | 6 +- .../lodestar/src/network/peers/datastore.ts | 187 +++++++++++ .../lodestar/src/network/peers/discover.ts | 2 +- .../lodestar/src/network/peers/peerManager.ts | 4 +- .../lodestar/src/network/reqresp/reqResp.ts | 8 +- .../src/network/reqresp/request/index.ts | 2 +- .../src/network/reqresp/response/index.ts | 2 +- packages/lodestar/src/network/util.ts | 10 +- .../test/e2e/network/gossipsub.test.ts | 2 +- .../test/unit/network/nodejs/libp2p.test.ts | 2 +- .../test/unit/network/peers/datastore.test.ts | 72 +++++ .../test/unit/network/peers/score.test.ts | 2 +- .../lodestar/test/unit/network/util.test.ts | 4 +- yarn.lock | 302 ++++++++++++------ 21 files changed, 619 insertions(+), 159 deletions(-) create mode 100644 packages/cli/src/migrations/deleteOldPeerstore.ts create mode 100644 packages/cli/src/migrations/index.ts create mode 100644 packages/lodestar/src/network/peers/datastore.ts create mode 100644 packages/lodestar/test/unit/network/peers/datastore.test.ts diff --git a/packages/cli/src/cmds/beacon/handler.ts b/packages/cli/src/cmds/beacon/handler.ts index 328773e3882b..cd5c9695c4c8 100644 --- a/packages/cli/src/cmds/beacon/handler.ts +++ b/packages/cli/src/cmds/beacon/handler.ts @@ -15,6 +15,7 @@ import {IBeaconArgs} from "./options"; import {getBeaconPaths} from "./paths"; import {initBeaconState} from "./initBeaconState"; import {getVersion, getVersionGitData} from "../../util/version"; +import {deleteOldPeerstorePreV036} from "../../migrations"; /** * Runs a beacon node. @@ -55,6 +56,9 @@ export async function beaconHandler(args: IBeaconArgs & IGlobalArgs): Promise = null; // additional metrics registries const metricsRegistries = []; diff --git a/packages/cli/src/migrations/deleteOldPeerstore.ts b/packages/cli/src/migrations/deleteOldPeerstore.ts new file mode 100644 index 000000000000..1f98f5b9b4dd --- /dev/null +++ b/packages/cli/src/migrations/deleteOldPeerstore.ts @@ -0,0 +1,38 @@ +import * as fs from "node:fs"; +import {LevelDbController} from "@chainsafe/lodestar-db"; +import {ILogger} from "@chainsafe/lodestar-utils"; + +/** + * As of libp2p v0.36.0 (https://github.com/libp2p/js-libp2p/commit/978eb3676fad5d5d50ddb28d1a7868f448cbb20b) + * the peerstore format has changed in a breaking way. + * + * Because of that, we need to wipe the old peerstore if it exists. + */ +export async function deleteOldPeerstorePreV036(peerStoreDir: string, logger: ILogger): Promise { + const db = new LevelDbController({name: peerStoreDir}, {logger}); + await db.start(); + + // only read a single key + const keys = await db.keys({limit: 1}); + // the old peerstore had keys that look like so: + const isOldPeerstore = Boolean( + keys.find((k) => { + const key = k.toString(); + return ( + key.startsWith("/peers/addrs") || + key.startsWith("/peers/keys") || + key.startsWith("/peers/metadata") || + key.startsWith("/peers/proto") + ); + }) + ); + await db.stop(); + + if (isOldPeerstore) { + if (peerStoreDir.endsWith("/")) { + peerStoreDir = peerStoreDir.slice(0, peerStoreDir.length - 1); + } + fs.renameSync(peerStoreDir, peerStoreDir + ".pre-0.36.0.bak"); + logger.info("Migrated old peerstore"); + } +} diff --git a/packages/cli/src/migrations/index.ts b/packages/cli/src/migrations/index.ts new file mode 100644 index 000000000000..c2cfa0e621b0 --- /dev/null +++ b/packages/cli/src/migrations/index.ts @@ -0,0 +1 @@ +export * from "./deleteOldPeerstore"; diff --git a/packages/lodestar/package.json b/packages/lodestar/package.json index 8aa84a8e68d3..c3a9fdf95956 100644 --- a/packages/lodestar/package.json +++ b/packages/lodestar/package.json @@ -77,8 +77,10 @@ "@chainsafe/persistent-merkle-tree": "^0.3.7", "@chainsafe/snappy-stream": "5.0.0", "@chainsafe/ssz": "^0.8.20", + "@chainsafe/as-sha256": "^0.2.4", "@ethersproject/abi": "^5.0.0", "@types/datastore-level": "^3.0.0", + "datastore-core": "^7.0.1", "bl": "^5.0.0", "buffer-xor": "^2.0.2", "cross-fetch": "^3.1.4", @@ -89,18 +91,18 @@ "gc-stats": "^1.4.0", "http-terminator": "^2.0.3", "interface-datastore": "^5.1.2", - "it-all": "^1.0.2", + "it-all": "^1.0.4", "it-pipe": "^1.1.0", "jwt-simple": "0.5.6", - "libp2p": "^0.32.4", - "libp2p-bootstrap": "^0.13.0", - "libp2p-gossipsub": "^0.11.1", - "libp2p-interfaces": "^1.1.0", - "libp2p-mdns": "^0.17.0", + "libp2p": "^0.36.2", + "libp2p-bootstrap": "^0.14.0", + "libp2p-gossipsub": "^0.13.0", + "libp2p-interfaces": "^4.0.4", + "libp2p-mdns": "^0.18.0", "libp2p-mplex": "^0.10.5", "libp2p-tcp": "^0.17.2", "multiaddr": "^10.0.1", - "peer-id": "^0.15.3", + "peer-id": "^0.16.0", "prom-client": "^13.2.0", "prometheus-gc-stats": "^0.6.3", "snappyjs": "^0.6.0", diff --git a/packages/lodestar/src/network/gossip/gossipsub.ts b/packages/lodestar/src/network/gossip/gossipsub.ts index a8950ded91ad..eb994d482ec5 100644 --- a/packages/lodestar/src/network/gossip/gossipsub.ts +++ b/packages/lodestar/src/network/gossip/gossipsub.ts @@ -1,6 +1,9 @@ /* eslint-disable @typescript-eslint/naming-convention */ import Gossipsub from "libp2p-gossipsub"; +import {messageIdToString} from "libp2p-gossipsub/src/utils/messageIdToString"; +import SHA256 from "@chainsafe/as-sha256"; import {ERR_TOPIC_VALIDATOR_IGNORE, ERR_TOPIC_VALIDATOR_REJECT} from "libp2p-gossipsub/src/constants"; +import {InMessage, utils} from "libp2p-interfaces/src/pubsub"; import Libp2p from "libp2p"; import {AbortSignal} from "@chainsafe/abort-controller"; import {IBeaconConfig} from "@chainsafe/lodestar-config"; @@ -27,9 +30,7 @@ import {GossipValidationError} from "./errors"; import {GOSSIP_MAX_SIZE} from "../../constants"; import {createValidatorFnsByType} from "./validation"; import {Map2d, Map2dArr} from "../../util/map"; -import pipe from "it-pipe"; import PeerStreams from "libp2p-interfaces/src/pubsub/peer-streams"; -import BufferList from "bl"; import {RPC} from "libp2p-gossipsub/src/message/rpc"; import {normalizeInRpcMessage} from "libp2p-interfaces/src/pubsub/utils"; @@ -88,6 +89,7 @@ export class Eth2Gossipsub extends Gossipsub { Dlazy: 6, scoreParams: computeGossipPeerScoreParams(modules), scoreThresholds: gossipScoreThresholds, + fastMsgIdFn: (msg: InMessage) => Buffer.from(SHA256.digest(msg.data)).toString("hex"), }); const {config, logger, metrics, signal, gossipHandlers} = modules; this.config = config; @@ -111,13 +113,13 @@ export class Eth2Gossipsub extends Gossipsub { } } - start(): void { - super.start(); + async start(): Promise { + return super.start(); } - stop(): void { + async stop(): Promise { try { - super.stop(); + await super.stop(); } catch (error) { if ((error as GossipValidationError).code !== "ERR_HEARTBEAT_NO_RUNNING") { throw error; @@ -141,31 +143,16 @@ export class Eth2Gossipsub extends Gossipsub { return msgId; } - // Temporaly reverts https://github.com/libp2p/js-libp2p-interfaces/pull/103 while a proper fixed is done upstream - // await-ing _processRpc causes messages to be processed 10-20 seconds latter than when received. This kills the node - async _processMessages( - idB58Str: string, - stream: AsyncIterable, - peerStreams: PeerStreams - ): Promise { - try { - await pipe(stream, async (source) => { - for await (const data of source) { - const rpcBytes = data instanceof Uint8Array ? data : data.slice(); - const rpcMsg = this._decodeRpc(rpcBytes); - - this._processRpc(idB58Str, peerStreams, rpcMsg).catch((e) => { - this.log("_processRpc error", (e as Error).stack); - }); - } - }); - } catch (err) { - this._onPeerDisconnected(peerStreams.id, err as Error); - } + /** + * Get cached message id string if we have it. + */ + getCachedMsgIdStr(msg: Eth2InMessage): string | undefined { + const cachedMsgId = msg.msgId; + return cachedMsgId ? messageIdToString(cachedMsgId) : undefined; } // Temporaly reverts https://github.com/libp2p/js-libp2p-interfaces/pull/103 while a proper fixed is done upstream - // await-ing _processRpc causes messages to be processed 10-20 seconds latter than when received. This kills the node + // Lodestar wants to use our own queue instead of gossipsub queue introduced in https://github.com/libp2p/js-libp2p-interfaces/pull/103 async _processRpc(idB58Str: string, peerStreams: PeerStreams, rpc: RPC): Promise { this.log("rpc from", idB58Str); const subs = rpc.subscriptions; @@ -204,11 +191,67 @@ export class Eth2Gossipsub extends Gossipsub { // not a direct implementation of js-libp2p-gossipsub, this is from gossipsub // https://github.com/ChainSafe/js-libp2p-gossipsub/blob/751ea73e9b7dc2287ca56786857d32ec2ce796b9/ts/index.ts#L366 if (rpc.control) { - super._processRpcControlMessage(idB58Str, rpc.control); + await super._processRpcControlMessage(idB58Str, rpc.control); } return true; } + /** + * Similar to gossipsub 0.13.0 except that no await + * TODO: override getMsgIdIfNotSeen and add metric + * See https://github.com/ChainSafe/js-libp2p-gossipsub/pull/187/files + */ + async _processRpcMessage(msg: InMessage): Promise { + let canonicalMsgIdStr; + if (this.getFastMsgIdStr && this.fastMsgIdCache) { + // check duplicate + // change: no await needed + const fastMsgIdStr = this.getFastMsgIdStr(msg); + canonicalMsgIdStr = this.fastMsgIdCache.get(fastMsgIdStr); + if (canonicalMsgIdStr !== undefined) { + void this.score.duplicateMessage(msg, canonicalMsgIdStr); + return; + } + // change: no await needed + canonicalMsgIdStr = messageIdToString(this.getMsgId(msg)); + + this.fastMsgIdCache.put(fastMsgIdStr, canonicalMsgIdStr); + } else { + // check duplicate + // change: no await needed + canonicalMsgIdStr = messageIdToString(this.getMsgId(msg)); + if (this.seenCache.has(canonicalMsgIdStr)) { + void this.score.duplicateMessage(msg, canonicalMsgIdStr); + return; + } + } + + // put in cache + this.seenCache.put(canonicalMsgIdStr); + + await this.score.validateMessage(canonicalMsgIdStr); + + // await super._processRpcMessage(msg); + // this is from libp2p-interface 4.0.4 + // https://github.com/libp2p/js-libp2p-interfaces/blob/libp2p-interfaces%404.0.4/packages/interfaces/src/pubsub/index.js#L461 + if (this.peerId.toB58String() === msg.from && !this.emitSelf) { + return; + } + + // Ensure the message is valid before processing it + try { + await this.validate(msg); + } catch (/** @type {any} */ err) { + this.log("Message is invalid, dropping it. %O", err); + return; + } + + // Emit to self: no need as we don't do that in this child class + // this._emitMessage(msg); + + return this._publish((utils.normalizeOutRpcMessage(msg) as unknown) as InMessage); + } + // // Snippet of _processRpcMessage from https://github.com/libp2p/js-libp2p-interfaces/blob/92245d66b0073f0a72fed9f7abcf4b533102f1fd/packages/interfaces/src/pubsub/index.js#L442 // async _processRpcMessage(msg: InMessage): Promise { // try { @@ -258,8 +301,9 @@ export class Eth2Gossipsub extends Gossipsub { // JobQueue may throw non-typed errors const code = e instanceof GossipValidationError ? e.code : ERR_TOPIC_VALIDATOR_IGNORE; // async to compute msgId with sha256 from multiformats/hashes/sha2 - await this.score.rejectMessage(message, code); - await this.gossipTracer.rejectMessage(message, code); + const messageId = await this.getCanonicalMsgIdStr(message); + await this.score.rejectMessage(message, messageId, code); + await this.gossipTracer.rejectMessage(messageId, code); throw e; } } diff --git a/packages/lodestar/src/network/network.ts b/packages/lodestar/src/network/network.ts index cc62c66ca9c2..bf8750002bc5 100644 --- a/packages/lodestar/src/network/network.ts +++ b/packages/lodestar/src/network/network.ts @@ -123,7 +123,7 @@ export class Network implements INetwork { this.reqResp.start(); this.metadata.start(this.getEnr(), this.config.getForkName(this.clock.currentSlot)); await this.peerManager.start(); - this.gossip.start(); + await this.gossip.start(); this.attnetsService.start(); this.syncnetsService.start(); const multiaddresses = this.libp2p.multiaddrs.map((m) => m.toString()).join(","); @@ -134,11 +134,10 @@ export class Network implements INetwork { // Must goodbye and disconnect before stopping libp2p await this.peerManager.goodbyeAndDisconnectAllPeers(); await this.peerManager.stop(); - this.gossip.stop(); + await this.gossip.stop(); this.reqResp.stop(); this.attnetsService.stop(); this.syncnetsService.stop(); - this.gossip.stop(); await this.libp2p.stop(); } @@ -229,7 +228,7 @@ export class Network implements INetwork { // Debug async connectToPeer(peer: PeerId, multiaddr: Multiaddr[]): Promise { - this.libp2p.peerStore.addressBook.add(peer, multiaddr); + await this.libp2p.peerStore.addressBook.add(peer, multiaddr); await this.libp2p.dial(peer); } diff --git a/packages/lodestar/src/network/nodejs/bundle.ts b/packages/lodestar/src/network/nodejs/bundle.ts index 131d8b8324d1..fa04409cf05b 100644 --- a/packages/lodestar/src/network/nodejs/bundle.ts +++ b/packages/lodestar/src/network/nodejs/bundle.ts @@ -59,7 +59,6 @@ export class NodejsNode extends LibP2p { datastore: options.datastore, peerStore: { persistence: !!options.datastore, - threshold: 10, }, config: { nat: { diff --git a/packages/lodestar/src/network/nodejs/util.ts b/packages/lodestar/src/network/nodejs/util.ts index 47174bd01811..637f745490a3 100644 --- a/packages/lodestar/src/network/nodejs/util.ts +++ b/packages/lodestar/src/network/nodejs/util.ts @@ -8,7 +8,7 @@ import {NodejsNode} from "./bundle"; import {defaultDiscv5Options, defaultNetworkOptions, INetworkOptions} from "../options"; import {isLocalMultiAddr, clearMultiaddrUDP} from ".."; import {ENR} from "@chainsafe/discv5"; -import LevelDatastore from "datastore-level"; +import {Eth2PeerDataStore} from "../peers/datastore"; export type NodeJsLibp2pOpts = { peerStoreDir?: string; @@ -42,9 +42,9 @@ export async function createNodeJsLibp2p( } } - let datastore: undefined | LevelDatastore = undefined; + let datastore: undefined | Eth2PeerDataStore = undefined; if (peerStoreDir) { - datastore = new LevelDatastore(peerStoreDir); + datastore = new Eth2PeerDataStore(peerStoreDir); await datastore.open(); } diff --git a/packages/lodestar/src/network/peers/datastore.ts b/packages/lodestar/src/network/peers/datastore.ts new file mode 100644 index 000000000000..05fe15c85860 --- /dev/null +++ b/packages/lodestar/src/network/peers/datastore.ts @@ -0,0 +1,187 @@ +import {BaseDatastore} from "datastore-core"; +import LevelDatastore from "datastore-level"; +import {Key, KeyQuery, Query, Options, Pair} from "interface-datastore"; + +type MemoryItem = { + lastAccessedMs: number; + data: Uint8Array; +}; + +/** + * Before libp2p 0.35, peerstore stays in memory and periodically write to db after n dirty items + * This has a memory issue because all peer data stays in memory and loaded at startup time + * This is written for libp2p >=0.35, we maintain the same mechanism but with bounded data structure + * This datastore includes a memory datastore and fallback to db datastore + * Use an in-memory datastore with last accessed time and _maxMemoryItems, on start it's empty (lazy load) + * - get: Search in-memory datastore first, if not found search from db. + * - If found from db, add back to the in-memory datastore + * - Update lastAccessedMs + * - put: move oldest items from memory to db if there are more than _maxMemoryItems items in memory + * - update memory datastore, only update db datastore if there are at least _threshold dirty items + * - Update lastAccessedMs + */ +export class Eth2PeerDataStore extends BaseDatastore { + private _dbDatastore: LevelDatastore; + private _memoryDatastore: Map; + /** Same to PersistentPeerStore of the old libp2p implementation */ + private _dirtyItems = new Set(); + /** If there are more dirty items than threshold, commit data to db */ + private _threshold: number; + /** If there are more memory items than this, prune oldest ones from memory and move to db */ + private _maxMemoryItems: number; + + constructor( + dbDatastore: LevelDatastore | string, + {threshold = 5, maxMemoryItems = 50}: {threshold?: number | undefined; maxMemoryItems?: number | undefined} = {} + ) { + super(); + + if (threshold <= 0 || maxMemoryItems <= 0) { + throw Error(`Invalid threshold ${threshold} or maxMemoryItems ${maxMemoryItems}`); + } + if (threshold > maxMemoryItems) { + throw Error(`Threshold ${threshold} should be at most maxMemoryItems ${maxMemoryItems}`); + } + + this._dbDatastore = typeof dbDatastore === "string" ? new LevelDatastore(dbDatastore) : dbDatastore; + this._memoryDatastore = new Map(); + this._threshold = threshold; + this._maxMemoryItems = maxMemoryItems; + } + + async open(): Promise { + return this._dbDatastore.open(); + } + + async close(): Promise { + return this._dbDatastore.close(); + } + + async put(key: Key, val: Uint8Array): Promise { + return this._put(key, val, false); + } + + /** + * Same interface to put with "fromDb" option, if this item is updated back from db + * Move oldest items from memory data store to db if it's over this._maxMemoryItems + */ + async _put(key: Key, val: Uint8Array, fromDb = false): Promise { + while (this._memoryDatastore.size >= this._maxMemoryItems) { + // it's likely this is called only 1 time + await this.pruneMemoryDatastore(); + } + + const keyStr = key.toString(); + const memoryItem = this._memoryDatastore.get(keyStr); + if (memoryItem) { + // update existing + memoryItem.lastAccessedMs = Date.now(); + memoryItem.data = val; + } else { + // new + this._memoryDatastore.set(keyStr, {data: val, lastAccessedMs: Date.now()}); + } + + if (!fromDb) await this._addDirtyItem(keyStr); + } + + /** + * Check memory datastore - update lastAccessedMs, then db datastore + * If found in db datastore then update back the memory datastore + * This throws error if not found + * see https://github.com/ipfs/js-datastore-level/blob/38f44058dd6be858e757a1c90b8edb31590ec0bc/src/index.js#L102 + */ + async get(key: Key): Promise { + const keyStr = key.toString(); + const memoryItem = this._memoryDatastore.get(keyStr); + if (memoryItem) { + memoryItem.lastAccessedMs = Date.now(); + return memoryItem.data; + } + + // this throws error if not found + const dbValue = await this._dbDatastore.get(key); + // don't call this._memoryDatastore.set directly + // we want to get through prune() logic with fromDb as true + await this._put(key, dbValue, true); + return dbValue; + } + + async has(key: Key): Promise { + try { + await this.get(key); + } catch (err) { + // this is the same to how js-datastore-level handles notFound error + // https://github.com/ipfs/js-datastore-level/blob/38f44058dd6be858e757a1c90b8edb31590ec0bc/src/index.js#L121 + if (((err as unknown) as {notFound: boolean}).notFound) return false; + throw err; + } + return true; + } + + async delete(key: Key): Promise { + this._memoryDatastore.delete(key.toString()); + await this._dbDatastore.delete(key); + } + + async *_all(q: Query, options?: Options): AsyncIterable { + for (const [key, value] of this._memoryDatastore.entries()) { + yield { + key: new Key(key), + value: value.data, + }; + } + yield* this._dbDatastore.query(q, options); + } + + async *_allKeys(q: KeyQuery, options?: Options): AsyncIterable { + for (const key of this._memoryDatastore.keys()) { + yield new Key(key); + } + yield* this._dbDatastore.queryKeys(q, options); + } + + private async _addDirtyItem(keyStr: string): Promise { + this._dirtyItems.add(keyStr); + if (this._dirtyItems.size >= this._threshold) { + try { + await this._commitData(); + // eslint-disable-next-line no-empty + } catch (e) {} + } + } + + private async _commitData(): Promise { + const batch = this._dbDatastore.batch(); + for (const keyStr of this._dirtyItems) { + const memoryItem = this._memoryDatastore.get(keyStr); + if (memoryItem) { + batch.put(new Key(keyStr), memoryItem.data); + } + } + await batch.commit(); + this._dirtyItems.clear(); + } + + /** + * Prune from memory and move to db + */ + private async pruneMemoryDatastore(): Promise { + let oldestAccessedMs = Date.now() + 1000; + let oldestKey: string | undefined = undefined; + let oldestValue: Uint8Array | undefined = undefined; + + for (const [key, value] of this._memoryDatastore) { + if (value.lastAccessedMs < oldestAccessedMs) { + oldestAccessedMs = value.lastAccessedMs; + oldestKey = key; + oldestValue = value.data; + } + } + + if (oldestKey && oldestValue) { + await this._dbDatastore.put(new Key(oldestKey), oldestValue); + this._memoryDatastore.delete(oldestKey); + } + } +} diff --git a/packages/lodestar/src/network/peers/discover.ts b/packages/lodestar/src/network/peers/discover.ts index 5141d0d22bc5..f8b5ebd5f085 100644 --- a/packages/lodestar/src/network/peers/discover.ts +++ b/packages/lodestar/src/network/peers/discover.ts @@ -363,7 +363,7 @@ export class PeerDiscovery { // Must add the multiaddrs array to the address book before dialing // https://github.com/libp2p/js-libp2p/blob/aec8e3d3bb1b245051b60c2a890550d262d5b062/src/index.js#L638 - this.libp2p.peerStore.addressBook.add(peerId, [multiaddrTCP]); + await this.libp2p.peerStore.addressBook.add(peerId, [multiaddrTCP]); // Note: PeerDiscovery adds the multiaddrTCP beforehand const peerIdShort = prettyPrintPeerId(peerId); diff --git a/packages/lodestar/src/network/peers/peerManager.ts b/packages/lodestar/src/network/peers/peerManager.ts index af56504b1249..2526406b2cd9 100644 --- a/packages/lodestar/src/network/peers/peerManager.ts +++ b/packages/lodestar/src/network/peers/peerManager.ts @@ -551,7 +551,7 @@ export class PeerManager { } /** Register peer count metrics */ - private runPeerCountMetrics(metrics: IMetrics): void { + private async runPeerCountMetrics(metrics: IMetrics): Promise { let total = 0; const peersByDirection = new Map(); const peersByClient = new Map(); @@ -560,7 +560,7 @@ export class PeerManager { if (openCnx) { const direction = openCnx.stat.direction; peersByDirection.set(direction, 1 + (peersByDirection.get(direction) ?? 0)); - const client = getClientFromPeerStore(openCnx.remotePeer, this.libp2p.peerStore.metadataBook); + const client = await getClientFromPeerStore(openCnx.remotePeer, this.libp2p.peerStore.metadataBook); peersByClient.set(client, 1 + (peersByClient.get(client) ?? 0)); total++; } diff --git a/packages/lodestar/src/network/reqresp/reqResp.ts b/packages/lodestar/src/network/reqresp/reqResp.ts index d112830add1f..3d7de09a754d 100644 --- a/packages/lodestar/src/network/reqresp/reqResp.ts +++ b/packages/lodestar/src/network/reqresp/reqResp.ts @@ -72,10 +72,10 @@ export class ReqResp implements IReqResp { this.metrics = modules.metrics; } - start(): void { + async start(): Promise { this.controller = new AbortController(); for (const [method, version, encoding] of protocolsSupported) { - this.libp2p.handle( + await this.libp2p.handle( formatProtocolId(method, version, encoding), (this.getRequestHandler({method, version, encoding}) as unknown) as (props: HandlerProps) => void ); @@ -83,9 +83,9 @@ export class ReqResp implements IReqResp { this.inboundRateLimiter.start(); } - stop(): void { + async stop(): Promise { for (const [method, version, encoding] of protocolsSupported) { - this.libp2p.unhandle(formatProtocolId(method, version, encoding)); + await this.libp2p.unhandle(formatProtocolId(method, version, encoding)); } this.controller.abort(); this.inboundRateLimiter.stop(); diff --git a/packages/lodestar/src/network/reqresp/request/index.ts b/packages/lodestar/src/network/reqresp/request/index.ts index f7ead84af6db..9d3b7d030fd9 100644 --- a/packages/lodestar/src/network/reqresp/request/index.ts +++ b/packages/lodestar/src/network/reqresp/request/index.ts @@ -55,7 +55,7 @@ export async function sendRequest { const {REQUEST_TIMEOUT, DIAL_TIMEOUT} = {...timeoutOptions, ...options}; const peer = prettyPrintPeerId(peerId); - const client = getClientFromPeerStore(peerId, libp2p.peerStore.metadataBook); + const client = await getClientFromPeerStore(peerId, libp2p.peerStore.metadataBook); const logCtx = {method, encoding, client, peer, requestId}; if (signal?.aborted) { diff --git a/packages/lodestar/src/network/reqresp/response/index.ts b/packages/lodestar/src/network/reqresp/response/index.ts index 32d5661db132..8c48a635a354 100644 --- a/packages/lodestar/src/network/reqresp/response/index.ts +++ b/packages/lodestar/src/network/reqresp/response/index.ts @@ -46,7 +46,7 @@ export async function handleRequest( signal?: AbortSignal, requestId = 0 ): Promise { - const client = getClientFromPeerStore(peerId, libp2p.peerStore.metadataBook); + const client = await getClientFromPeerStore(peerId, libp2p.peerStore.metadataBook); const logCtx = {method: protocol.method, client, peer: prettyPrintPeerId(peerId), requestId}; let responseError: Error | null = null; diff --git a/packages/lodestar/src/network/util.ts b/packages/lodestar/src/network/util.ts index 965c5c4c44c0..b9f524e9f1b7 100644 --- a/packages/lodestar/src/network/util.ts +++ b/packages/lodestar/src/network/util.ts @@ -7,7 +7,7 @@ import PeerId from "peer-id"; import {Multiaddr} from "multiaddr"; import {networkInterfaces} from "node:os"; import {ENR} from "@chainsafe/discv5"; -import MetadataBook from "libp2p/src/peer-store/metadata-book"; +import {MetadataBook} from "libp2p/src/peer-store/types"; import {clientFromAgentVersion, ClientKind} from "./peers/client"; // peers @@ -70,11 +70,11 @@ export function prettyPrintPeerId(peerId: PeerId): string { return `${id.substr(0, 2)}...${id.substr(id.length - 6, id.length)}`; } -export function getClientFromPeerStore(peerId: PeerId, metadataBook: MetadataBook): ClientKind { - const agentVersion = getAgentVersionFromPeerStore(peerId, metadataBook); +export async function getClientFromPeerStore(peerId: PeerId, metadataBook: MetadataBook): Promise { + const agentVersion = await getAgentVersionFromPeerStore(peerId, metadataBook); return clientFromAgentVersion(agentVersion); } -export function getAgentVersionFromPeerStore(peerId: PeerId, metadataBook: MetadataBook): string { - return new TextDecoder().decode(metadataBook.getValue(peerId, "AgentVersion")) || "N/A"; +export async function getAgentVersionFromPeerStore(peerId: PeerId, metadataBook: MetadataBook): Promise { + return new TextDecoder().decode(await metadataBook.getValue(peerId, "AgentVersion")) || "N/A"; } diff --git a/packages/lodestar/test/e2e/network/gossipsub.test.ts b/packages/lodestar/test/e2e/network/gossipsub.test.ts index 95f665ea9ef5..a3e170575244 100644 --- a/packages/lodestar/test/e2e/network/gossipsub.test.ts +++ b/packages/lodestar/test/e2e/network/gossipsub.test.ts @@ -30,7 +30,7 @@ const opts: INetworkOptions = { discv5: null, }; -describe("network", function () { +describe("gossipsub", function () { if (this.timeout() < 15 * 1000) this.timeout(15 * 1000); this.retries(2); // This test fail sometimes, with a 5% rate. diff --git a/packages/lodestar/test/unit/network/nodejs/libp2p.test.ts b/packages/lodestar/test/unit/network/nodejs/libp2p.test.ts index 56bb5084b9c1..612783993ad4 100644 --- a/packages/lodestar/test/unit/network/nodejs/libp2p.test.ts +++ b/packages/lodestar/test/unit/network/nodejs/libp2p.test.ts @@ -23,7 +23,7 @@ describe("[network] nodejs libp2p", () => { await Promise.all([nodeA.start(), nodeB.start()]); - nodeA.peerStore.addressBook.add(nodeB.peerId, nodeB.multiaddrs); + await nodeA.peerStore.addressBook.add(nodeB.peerId, nodeB.multiaddrs); // connect await Promise.all([ diff --git a/packages/lodestar/test/unit/network/peers/datastore.test.ts b/packages/lodestar/test/unit/network/peers/datastore.test.ts new file mode 100644 index 000000000000..971e72840878 --- /dev/null +++ b/packages/lodestar/test/unit/network/peers/datastore.test.ts @@ -0,0 +1,72 @@ +import {expect} from "chai"; +import LevelDatastore from "datastore-level"; +import {Key} from "interface-datastore"; +import sinon from "sinon"; +import {Eth2PeerDataStore} from "../../../../src/network/peers/datastore"; + +describe("Eth2PeerDataStore", () => { + let eth2Datastore: Eth2PeerDataStore; + let dbDatastoreStub: sinon.SinonStubbedInstance & LevelDatastore; + const sandbox = sinon.createSandbox(); + + beforeEach(() => { + sandbox.useFakeTimers(); + dbDatastoreStub = sandbox.createStubInstance(LevelDatastore); + eth2Datastore = new Eth2PeerDataStore(dbDatastoreStub, {threshold: 2, maxMemoryItems: 3}); + }); + + afterEach(() => { + sandbox.restore(); + }); + + it("should persist to db after threshold put", async () => { + await eth2Datastore.put(new Key("k1"), Buffer.from("1")); + expect(dbDatastoreStub.batch.calledOnce).to.be.false; + await eth2Datastore.put(new Key("k2"), Buffer.from("2")); + expect(dbDatastoreStub.batch.calledOnce).to.be.true; + }); + + it("should persist to db the oldest item after max", async () => { + // oldest item + await eth2Datastore.put(new Key("k1"), Buffer.from("1")); + expect(await eth2Datastore.get(new Key("k1"))).to.be.deep.equal(Buffer.from("1")); + sandbox.clock.tick(1000); + + // 2nd, not call dbDatastoreStub.put yet + await eth2Datastore.put(new Key("k2"), Buffer.from("2")); + expect(await eth2Datastore.get(new Key("k1"))).to.be.deep.equal(Buffer.from("1")); + expect(dbDatastoreStub.put.calledOnce).to.be.false; + // 3rd item, not call dbDatastoreStub.put yet + await eth2Datastore.put(new Key("k3"), Buffer.from("3")); + expect(await eth2Datastore.get(new Key("k3"))).to.be.deep.equal(Buffer.from("3")); + expect(dbDatastoreStub.put.calledOnce).to.be.false; + + // 4th item, should evict 1st item since it's oldest + await eth2Datastore.put(new Key("k4"), Buffer.from("4")); + expect(await eth2Datastore.get(new Key("k4"))).to.be.deep.equal(Buffer.from("4")); + expect(dbDatastoreStub.put.calledOnceWith(new Key("k1"), Buffer.from("1"))).to.be.true; + + // still able to get k1 from datastore + expect(dbDatastoreStub.get.calledOnce).to.be.false; + dbDatastoreStub.get.resolves(Buffer.from("1")); + expect(await eth2Datastore.get(new Key("k1"))).to.be.deep.equal(Buffer.from("1")); + expect(dbDatastoreStub.get.calledOnce).to.be.true; + + // access k1 again, should not query db + expect(await eth2Datastore.get(new Key("k1"))).to.be.deep.equal(Buffer.from("1")); + expect(dbDatastoreStub.get.calledOnce).to.be.true; + expect(dbDatastoreStub.get.calledTwice).to.be.false; + }); + + it("should put to memory cache if item was found from db", async () => { + dbDatastoreStub.get.resolves(Buffer.from("1")); + // query db for the first time + expect(await eth2Datastore.get(new Key("k1"))).to.be.deep.equal(Buffer.from("1")); + expect(dbDatastoreStub.get.calledOnce).to.be.true; + + // this time it should not query from db + expect(await eth2Datastore.get(new Key("k1"))).to.be.deep.equal(Buffer.from("1")); + expect(dbDatastoreStub.get.calledOnce).to.be.true; + expect(dbDatastoreStub.get.calledTwice).to.be.false; + }); +}); diff --git a/packages/lodestar/test/unit/network/peers/score.test.ts b/packages/lodestar/test/unit/network/peers/score.test.ts index fe81c3f6e162..04cb0a35de54 100644 --- a/packages/lodestar/test/unit/network/peers/score.test.ts +++ b/packages/lodestar/test/unit/network/peers/score.test.ts @@ -25,7 +25,7 @@ describe("simple block provider score tracking", function () { ]; for (const [peerAction, times] of timesToBan) - it(`Should ban peer after ${times} ${peerAction}`, () => { + it(`Should ban peer after ${times} ${peerAction}`, async () => { const {scoreStore} = mockStore(); for (let i = 0; i < times; i++) scoreStore.applyAction(peer, peerAction); expect(scoreStore.getScoreState(peer)).to.be.equal(ScoreState.Banned); diff --git a/packages/lodestar/test/unit/network/util.test.ts b/packages/lodestar/test/unit/network/util.test.ts index ce9a0d0493ab..bdfb4e474442 100644 --- a/packages/lodestar/test/unit/network/util.test.ts +++ b/packages/lodestar/test/unit/network/util.test.ts @@ -82,14 +82,14 @@ describe("getAgentVersionFromPeerStore", () => { // Write peers to peerStore for (let i = 0; i < numPeers; i++) { const peerId = await createPeerId(); - libp2p.peerStore.metadataBook._setValue(peerId, "AgentVersion", testAgentVersion); + await libp2p.peerStore.metadataBook.setValue(peerId, "AgentVersion", testAgentVersion); peers.push(peerId); } // start the benchmark const start = Date.now(); for (const peer of peers) { - const version = getAgentVersionFromPeerStore(peer, libp2p.peerStore.metadataBook); + const version = await getAgentVersionFromPeerStore(peer, libp2p.peerStore.metadataBook); expect(version).to.be.equal(new TextDecoder().decode(testAgentVersion)); } const timeDiff = Date.now() - start; diff --git a/yarn.lock b/yarn.lock index 754a109d1514..3a2a82b5f098 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1608,18 +1608,6 @@ npmlog "^4.1.2" write-file-atomic "^3.0.3" -"@motrix/nat-api@^0.3.1": - version "0.3.2" - resolved "https://registry.yarnpkg.com/@motrix/nat-api/-/nat-api-0.3.2.tgz#a1164e25b1401279e2170666b0df3455812e7e1e" - integrity sha512-T3LSHnEUULbSU1o1zCZZ1ul8l8Jm98f0fz/0BeF7DhNvrV63YllLCD4vUR9hFZWu/+WTIVPnbH8dBK5Ckuveuw== - dependencies: - async "^3.2.0" - debug "^4.3.1" - default-gateway "^6.0.3" - request "^2.88.2" - unordered-array-remove "^1.0.2" - xml2js "^0.4.23" - "@multiformats/base-x@^4.0.1": version "4.0.1" resolved "https://registry.yarnpkg.com/@multiformats/base-x/-/base-x-4.0.1.tgz#95ff0fa58711789d53aefb2590a8b7a4e715d121" @@ -1635,6 +1623,11 @@ resolved "https://registry.yarnpkg.com/@noble/ed25519/-/ed25519-1.3.0.tgz#a55b6bdd18991d6051f819e9c71d9341706a751e" integrity sha512-k6ddjHcmfHF5LoZRv2j7WktgY8C8lzAqiu5ukWfGIlPUlpLn1tn1pfILXaXHY1DCG0s3qvGM1SluLtVpckWwMA== +"@noble/ed25519@^1.5.1": + version "1.5.1" + resolved "https://registry.yarnpkg.com/@noble/ed25519/-/ed25519-1.5.1.tgz#e5f602440b03046b02b79a49c143460bbb728e64" + integrity sha512-VX0TyIhKghm/NoTno/k71oCQO55f36yiSwuNOqEHu9BvxQuzel0tsvcsU2KjX/iN6pCkN53Bvfq1+gWNiwXDGQ== + "@noble/secp256k1@^1.3.0": version "1.3.0" resolved "https://registry.yarnpkg.com/@noble/secp256k1/-/secp256k1-1.3.0.tgz#426880cf0355b24d81c129af1ec31dfa6eee8b9c" @@ -2278,10 +2271,12 @@ dependencies: datastore-level "*" -"@types/debug@^4.1.5": - version "4.1.5" - resolved "https://registry.yarnpkg.com/@types/debug/-/debug-4.1.5.tgz#b14efa8852b7768d898906613c23f688713e02cd" - integrity sha512-Q1y515GcOdTHgagaVFhHnIFQ38ygs/kmxdNpvpou+raI9UO3YZcHDngBSYKQklcKlvA7iuQlmIKbzvmxcOE9CQ== +"@types/debug@^4.1.7": + version "4.1.7" + resolved "https://registry.yarnpkg.com/@types/debug/-/debug-4.1.7.tgz#7cc0ea761509124709b8b2d1090d8f6c17aadb82" + integrity sha512-9AonUzyTjXXhEOa0DnqpzZi6VHlqKMswga9EXjpXnnqxwLtdvPPtlO8evrI5D9S6asFRCQ6v+wpiUKbw+vKqyg== + dependencies: + "@types/ms" "*" "@types/encoding-down@*": version "5.0.0" @@ -2474,6 +2469,11 @@ resolved "https://registry.yarnpkg.com/@types/mockery/-/mockery-1.4.29.tgz#9ba22df37f07e3780fff8531d1a38e633f9457a5" integrity sha1-m6It838H43gP/4Ux0aOOYz+UV6U= +"@types/ms@*": + version "0.7.31" + resolved "https://registry.yarnpkg.com/@types/ms/-/ms-0.7.31.tgz#31b7ca6407128a3d2bbc27fe2d21b345397f6197" + integrity sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA== + "@types/node-fetch@^2.5.0": version "2.5.10" resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.5.10.tgz#9b4d4a0425562f9fcea70b12cb3fcdd946ca8132" @@ -2942,7 +2942,7 @@ ansi-styles@^4.0.0, ansi-styles@^4.1.0: dependencies: color-convert "^2.0.1" -any-signal@2.1.2, any-signal@^2.1.0, any-signal@^2.1.1: +any-signal@2.1.2, any-signal@^2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/any-signal/-/any-signal-2.1.2.tgz#8d48270de0605f8b218cf9abe8e9c6a0e7418102" integrity sha512-B+rDnWasMi/eWcajPcCWSlYc7muXOrcYrqgyzcdKisl2H/WTlQ0gip1KyQfr0ZlxJdsuWCj/LWwQm7fhyhRfIQ== @@ -2950,6 +2950,11 @@ any-signal@2.1.2, any-signal@^2.1.0, any-signal@^2.1.1: abort-controller "^3.0.0" native-abort-controller "^1.0.3" +any-signal@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/any-signal/-/any-signal-3.0.0.tgz#4f6ee491e5cdda9e9a544f50fdf1d14be40535b6" + integrity sha512-l1H1GEkGGIXVGfCtvq8N68YI7gHajmfzRdKhmb8sGyAQpLCblirLa8eB09j4uKaiwe7vodAChocUf7AT3mYq5g== + anymatch@~3.1.1: version "3.1.2" resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716" @@ -4214,6 +4219,40 @@ datastore-core@^5.0.0: it-take "^1.0.1" uint8arrays "^3.0.0" +datastore-core@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/datastore-core/-/datastore-core-7.0.0.tgz#1dff18ea0504692e63ab818e6ac9e448cac0727d" + integrity sha512-FdudjeH+GHjbuU8X1mIKIv8uHAErF6qtjqWFJiVKOdITk6KdiSKis4Hsfu80RWDEFQ9l9s47XIVf5FMKTtoeWA== + dependencies: + debug "^4.1.1" + err-code "^3.0.1" + interface-datastore "^6.0.2" + it-drain "^1.0.4" + it-filter "^1.0.2" + it-map "^1.0.5" + it-merge "^1.0.1" + it-pipe "^1.1.0" + it-pushable "^1.4.2" + it-take "^1.0.1" + uint8arrays "^3.0.0" + +datastore-core@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/datastore-core/-/datastore-core-7.0.1.tgz#f50f30bb55474a569118d41bba6052896b096aec" + integrity sha512-TrV0PRtwwDo2OfzYpnVQmVgDc4HwtpYkzb6da5GZxKElZN7eDT5mBtrkVbXbyTn+Y2+WPiMBm6/KbJD7p0TBfA== + dependencies: + debug "^4.1.1" + err-code "^3.0.1" + interface-datastore "^6.0.2" + it-drain "^1.0.4" + it-filter "^1.0.2" + it-map "^1.0.5" + it-merge "^1.0.1" + it-pipe "^1.1.0" + it-pushable "^1.4.2" + it-take "^1.0.1" + uint8arrays "^3.0.0" + datastore-level@*, datastore-level@^6.0.2: version "6.0.2" resolved "https://registry.yarnpkg.com/datastore-level/-/datastore-level-6.0.2.tgz#ad4f2dd531c5fd5b72e38b2307843515f7896e2b" @@ -4326,7 +4365,7 @@ deepmerge@^4.2.2: resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955" integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== -default-gateway@^6.0.3: +default-gateway@^6.0.2: version "6.0.3" resolved "https://registry.yarnpkg.com/default-gateway/-/default-gateway-6.0.3.tgz#819494c888053bdb743edbf343d6cdf7f2943a71" integrity sha512-fwSOJsbbNzZ/CUFpqFBqYfYNLj1NbMPm8MMCIzHjC83iSJRBEGmDUxU+WP661BaBQImeC2yHwXtz+P/O9o+XEg== @@ -4698,10 +4737,10 @@ es6-object-assign@^1.1.0: resolved "https://registry.yarnpkg.com/es6-object-assign/-/es6-object-assign-1.1.0.tgz#c2c3582656247c39ea107cb1e6652b6f9f24523c" integrity sha1-wsNYJlYkfDnqEHyx5mUrb58kUjw= -es6-promisify@^6.1.1: - version "6.1.1" - resolved "https://registry.yarnpkg.com/es6-promisify/-/es6-promisify-6.1.1.tgz#46837651b7b06bf6fff893d03f29393668d01621" - integrity sha512-HBL8I3mIki5C1Cc9QjKUenHtnG0A5/xA8Q/AllRcfiwl2CZFXGK7ddBiCoRwAix4i2KxcQfjtIVcrVbB3vbmwg== +es6-promisify@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/es6-promisify/-/es6-promisify-7.0.0.tgz#9a710008dd6a4ab75a89e280bad787bfb749927b" + integrity sha512-ginqzK3J90Rd4/Yz7qRrqUeIpe3TwSXTPPZtPne7tGBPeAaQiU8qt4fpKApnxHcq1AwtUdHVg5P77x/yrggG8Q== escalade@^3.1.1: version "3.1.1" @@ -6053,11 +6092,25 @@ interface-datastore@^5.1.1, interface-datastore@^5.1.2: nanoid "^3.0.2" uint8arrays "^3.0.0" +interface-datastore@^6.0.2: + version "6.0.3" + resolved "https://registry.yarnpkg.com/interface-datastore/-/interface-datastore-6.0.3.tgz#f42163e4bfaea9e2fcde82e45d4bf2849e1fbbf5" + integrity sha512-61eOyzh7zH1ks/56hPudW6pbqsOdoHSYMVjuqlIlZGjyg0svR6DHlCcaeSJfWW8t6dsPl1n7qKBdk8ZqPzXuLA== + dependencies: + interface-store "^2.0.1" + nanoid "^3.0.2" + uint8arrays "^3.0.0" + interface-store@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/interface-store/-/interface-store-0.1.1.tgz#c7e115ec16e683911f02428826359ae287d19e16" integrity sha512-ynnjIOybDZc0Brep3HHSa2RVlo/M5g7kuL/leui7o21EusKcLJS170vCJ8rliisc3c4jyd9ao5PthkGlBaX29g== +interface-store@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/interface-store/-/interface-store-2.0.1.tgz#b944573b9d27190bca9be576c97bfde907931dee" + integrity sha512-TfjYMdk4RlaGPA0VGk8fVPM+xhFbjiA2mTv1AqhiFh3N+ZEwoJnmDu/EBdKXzl80nyd0pvKui3RTC3zFgHMjTA== + interpret@^1.0.0: version "1.4.0" resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.4.0.tgz#665ab8bc4da27a774a40584e812e3e0fa45b1a1e" @@ -6513,6 +6566,11 @@ it-all@^1.0.4: resolved "https://registry.yarnpkg.com/it-all/-/it-all-1.0.5.tgz#e880510d7e73ebb79063a76296a2eb3cb77bbbdb" integrity sha512-ygD4kA4vp8fi+Y+NBgEKt6W06xSbv6Ub/0V8d1r3uCyJ9Izwa1UspkIOlqY9fOee0Z1w3WRo1+VWyAU4DgtufA== +it-all@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/it-all/-/it-all-1.0.6.tgz#852557355367606295c4c3b7eff0136f07749335" + integrity sha512-3cmCc6Heqe3uWi3CVM/k51fa/XbMFpQVzFoDsV0IZNHSQDyAXl3c4MjHkFX5kF3922OGj7Myv1nSEUgRtcuM1A== + it-buffer@^0.1.2, it-buffer@^0.1.3: version "0.1.3" resolved "https://registry.yarnpkg.com/it-buffer/-/it-buffer-0.1.3.tgz#efebef1cc35a6133cb9558e759345d4f17b3e1d0" @@ -6536,6 +6594,11 @@ it-first@^1.0.4, it-first@^1.0.6: resolved "https://registry.yarnpkg.com/it-first/-/it-first-1.0.6.tgz#a015ecfc62d2d517382138da4142b35e61f4131e" integrity sha512-wiI02c+G1BVuu0jz30Nsr1/et0cpSRulKUusN8HDZXxuX4MdUzfMp2P4JUk+a49Wr1kHitRLrnnh3+UzJ6neaQ== +it-foreach@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/it-foreach/-/it-foreach-0.1.1.tgz#8dce2d16567cfac007977e2daae7699c82c58d70" + integrity sha512-ZLxL651N5w5SL/EIIcrXELgYrrkuEKj/TErG93C4lr6lNZziKsf338ljSG85PjQfu7Frg/1wESl5pLrPSFXI9g== + it-glob@~0.0.11: version "0.0.13" resolved "https://registry.yarnpkg.com/it-glob/-/it-glob-0.0.13.tgz#78913fe835fcf0d46afcdb6634eb069acdfc4fbc" @@ -6625,6 +6688,13 @@ it-reader@^3.0.0: dependencies: bl "^5.0.0" +it-sort@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/it-sort/-/it-sort-1.0.1.tgz#2b7c100225b04147f06a4962c57e22cd2f35f855" + integrity sha512-c+C48cP7XMMebB9irLrJs2EmpLILId8NYSojqAqN8etE8ienx0azBgaKvZHYH1DkerqIul0Fl2FqISu2BZgTEQ== + dependencies: + it-all "^1.0.6" + it-take@^1.0.0, it-take@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/it-take/-/it-take-1.0.1.tgz#155b0f8ed4b6ff5eb4e9e7a2f4395f16b137b68a" @@ -6964,17 +7034,17 @@ libnpmpublish@4.0.0, libnpmpublish@^4.0.0: semver "^7.1.3" ssri "^8.0.0" -libp2p-bootstrap@^0.13.0: - version "0.13.0" - resolved "https://registry.yarnpkg.com/libp2p-bootstrap/-/libp2p-bootstrap-0.13.0.tgz#6edba44eeac5a421718f9d6f3594ad7863cf36fc" - integrity sha512-8sXEZrikY+chKvMorkvOi9E/v9GvwsYr9DAEfzQZrOKQZByqhan1aXQKWrSpc4AxEv5/UopRzu1P47bkOi8wdw== +libp2p-bootstrap@^0.14.0: + version "0.14.0" + resolved "https://registry.yarnpkg.com/libp2p-bootstrap/-/libp2p-bootstrap-0.14.0.tgz#820cc2d259a4710c660d239c5e2043384a1317b2" + integrity sha512-j3slZo5nOdA8wVlav8dRZeAXutZ7psz/f10DLoIEX/EFif7uU5oZfIYvjbVGo3ZDl+VQLo2tR0m1lV0westQ3g== dependencies: debug "^4.3.1" mafmt "^10.0.0" multiaddr "^10.0.0" - peer-id "^0.15.0" + peer-id "^0.16.0" -libp2p-crypto@^0.19.0, libp2p-crypto@^0.19.4, libp2p-crypto@^0.19.5, libp2p-crypto@^0.19.7: +libp2p-crypto@^0.19.0, libp2p-crypto@^0.19.7: version "0.19.7" resolved "https://registry.yarnpkg.com/libp2p-crypto/-/libp2p-crypto-0.19.7.tgz#e96a95bd430e672a695209fe0fbd2bcbd348bc35" integrity sha512-Qb5o/3WFKF2j6mYSt4UBPyi2kbKl3jYV0podBJoJCw70DlpM5Xc+oh3fFY9ToSunu8aSQQ5GY8nutjXgX/uGRA== @@ -7005,50 +7075,63 @@ libp2p-crypto@^0.21.0: protobufjs "^6.11.2" uint8arrays "^3.0.0" -libp2p-gossipsub@^0.11.1: - version "0.11.1" - resolved "https://registry.yarnpkg.com/libp2p-gossipsub/-/libp2p-gossipsub-0.11.1.tgz#591151b9a3a029e3bc7b9fb02a5dd61968e08b7e" - integrity sha512-apWeUsCoCIaXFw7NMAm/UH3OiODS/00hRc3ZSckvrV3lRfXh6TU1Q+IbPX0Z4u0ODo7eLXuq6n+JpYitlkB2qg== +libp2p-crypto@^0.21.2: + version "0.21.2" + resolved "https://registry.yarnpkg.com/libp2p-crypto/-/libp2p-crypto-0.21.2.tgz#7f9875436f24ca3887b077210b217b702bd72916" + integrity sha512-EXFrhSpiHtJ+/L8xXDvQNK5VjUMG51u878jzZcaT5XhuN/zFg6PWJFnl/qB2Y2j7eMWnvCRP7Kp+ua2H36cG4g== dependencies: - "@types/debug" "^4.1.5" + "@noble/ed25519" "^1.5.1" + "@noble/secp256k1" "^1.3.0" + err-code "^3.0.1" + iso-random-stream "^2.0.0" + multiformats "^9.4.5" + node-forge "^1.2.1" + protobufjs "^6.11.2" + uint8arrays "^3.0.0" + +libp2p-gossipsub@^0.13.0: + version "0.13.0" + resolved "https://registry.yarnpkg.com/libp2p-gossipsub/-/libp2p-gossipsub-0.13.0.tgz#a70db85139c62d7a8ad273be3ba01d1c9f338f7b" + integrity sha512-xy2jRZGmJpjy++Di6f1admtjve8Fx0z5l8NISTQS282egwbRMmTPE6/UeYktb6hNGAgtSTIwXdHjXmMOiTarFA== + dependencies: + "@types/debug" "^4.1.7" debug "^4.3.1" denque "^1.5.0" err-code "^3.0.1" it-pipe "^1.1.0" - libp2p-interfaces "^1.0.1" - peer-id "^0.15.0" + libp2p-interfaces "^4.0.4" + peer-id "^0.16.0" protobufjs "^6.11.2" - time-cache "^0.3.0" uint8arrays "^3.0.0" -libp2p-interfaces@^1.0.0, libp2p-interfaces@^1.0.1, libp2p-interfaces@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/libp2p-interfaces/-/libp2p-interfaces-1.1.0.tgz#20fdf21a3da022872abcf6a000e30495d39ab965" - integrity sha512-5nc/HZJgeks1qfkyYQdI84hcZLF4SJKJSUx33JpO0w7v7R+obz+HOwk0GSa4/ZvQHjX+/+OWC4NYVA0yZxZXag== +libp2p-interfaces@^4.0.0, libp2p-interfaces@^4.0.4: + version "4.0.6" + resolved "https://registry.yarnpkg.com/libp2p-interfaces/-/libp2p-interfaces-4.0.6.tgz#462062e04a680703bca18eb1d7a9c963a39344d1" + integrity sha512-3KjzNEIWhi+VoOamLvgKKUE/xqwxSw/JYqsBnfMhAWVRvRtosROtVT03wci2XbuuowCYw+/hEX1xKJIR1w5n0A== dependencies: - abort-controller "^3.0.0" abortable-iterator "^3.0.0" debug "^4.3.1" err-code "^3.0.1" it-length-prefixed "^5.0.2" it-pipe "^1.1.0" it-pushable "^1.4.2" - libp2p-crypto "^0.19.5" + libp2p-crypto "^0.21.0" multiaddr "^10.0.0" multiformats "^9.1.2" - peer-id "^0.15.0" + p-queue "^6.6.2" + peer-id "^0.16.0" protobufjs "^6.10.2" uint8arrays "^3.0.0" -libp2p-mdns@^0.17.0: - version "0.17.0" - resolved "https://registry.yarnpkg.com/libp2p-mdns/-/libp2p-mdns-0.17.0.tgz#5b321229b75f4f2477ee384efff56e9ab6d17b3d" - integrity sha512-W9i1WFTahJkA5rgFNeIpKkp416ciKIkAgd3nckcLat9eO6P8xAxWOm2Ft8sR8FuJSPMJMU/Gp3PJgt8I7Y2+wA== +libp2p-mdns@^0.18.0: + version "0.18.0" + resolved "https://registry.yarnpkg.com/libp2p-mdns/-/libp2p-mdns-0.18.0.tgz#20b124468dc0d10f614e1c5d042190313888ebd9" + integrity sha512-IBCKRuNc5USlli9QF/gOq2loCssE4ZKkVRhUNuAVBRXJ8ueqFEquc5R5C1sWy7AOgbycTgeNcxzSa1kuNb6nbg== dependencies: debug "^4.3.1" multiaddr "^10.0.0" multicast-dns "^7.2.0" - peer-id "^0.15.0" + peer-id "^0.16.0" libp2p-mplex@^0.10.5: version "0.10.5" @@ -7091,57 +7174,61 @@ libp2p-utils@^0.4.0: multiaddr "^10.0.0" private-ip "^2.1.1" -libp2p@^0.32.4: - version "0.32.4" - resolved "https://registry.yarnpkg.com/libp2p/-/libp2p-0.32.4.tgz#999f31d8dad7cd51364ae21573dfaf879678970c" - integrity sha512-GSImpWJmjFqjXrv9sgJfwaWhMF+J07nNZJknobvgWXXki9W/1a5UsNVyw/1Z2licvsc+aUmCxDgV92lbUvTeSw== +libp2p@^0.36.2: + version "0.36.2" + resolved "https://registry.yarnpkg.com/libp2p/-/libp2p-0.36.2.tgz#d8f8fc1021d4eff920db74fbe27038771c92f309" + integrity sha512-UpNYBMQVivMu56zoibdGitopv39uBBAybIBOEGWmFy/I2NnTVGUutLPrxo47AuN2kntYgo/TNJfW+PpswUgSaw== dependencies: - "@motrix/nat-api" "^0.3.1" "@vascosantos/moving-average" "^1.1.0" - abort-controller "^3.0.0" + abortable-iterator "^3.0.0" aggregate-error "^3.1.0" - any-signal "^2.1.1" + any-signal "^3.0.0" bignumber.js "^9.0.1" class-is "^1.1.0" + datastore-core "^7.0.0" debug "^4.3.1" err-code "^3.0.0" - es6-promisify "^6.1.1" + es6-promisify "^7.0.0" events "^3.3.0" hashlru "^2.3.0" - interface-datastore "^5.1.1" + interface-datastore "^6.0.2" it-all "^1.0.4" it-buffer "^0.1.2" it-drain "^1.0.3" it-filter "^1.0.1" it-first "^1.0.4" + it-foreach "^0.1.1" it-handshake "^2.0.0" it-length-prefixed "^5.0.2" it-map "^1.0.4" it-merge "^1.0.0" it-pipe "^1.1.0" + it-sort "^1.0.1" it-take "^1.0.0" - libp2p-crypto "^0.19.4" - libp2p-interfaces "^1.0.0" + libp2p-crypto "^0.21.2" + libp2p-interfaces "^4.0.0" libp2p-utils "^0.4.0" mafmt "^10.0.0" merge-options "^3.0.4" + mortice "^2.0.1" multiaddr "^10.0.0" multiformats "^9.0.0" - multistream-select "^2.0.0" + multistream-select "^3.0.0" mutable-proxy "^1.0.0" - node-forge "^0.10.0" + nat-api "^0.3.1" + node-forge "^1.2.1" p-any "^3.0.0" p-fifo "^1.0.0" p-retry "^4.4.0" p-settle "^4.1.1" - peer-id "^0.15.0" + peer-id "^0.16.0" private-ip "^2.1.0" protobufjs "^6.10.2" retimer "^3.0.0" sanitize-filename "^1.6.3" set-delayed-interval "^1.0.0" streaming-iterables "^6.0.0" - timeout-abort-controller "^1.1.1" + timeout-abort-controller "^3.0.0" uint8arrays "^3.0.0" varint "^6.0.0" wherearewe "^1.0.0" @@ -7278,11 +7365,6 @@ lodash.templatesettings@^4.0.0: dependencies: lodash._reinterpolate "^3.0.0" -lodash.throttle@^4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/lodash.throttle/-/lodash.throttle-4.1.1.tgz#c23e91b710242ac70c37f1e1cda9274cc39bf2f4" - integrity sha1-wj6RtxAkKscMN/HhzaknTMOb8vQ= - lodash.truncate@^4.4.2: version "4.4.2" resolved "https://registry.yarnpkg.com/lodash.truncate/-/lodash.truncate-4.4.2.tgz#5a350da0b1113b837ecfffd5812cbe58d6eae193" @@ -7731,6 +7813,16 @@ moment@^2.11.2: resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.1.tgz#b2be769fa31940be9eeea6469c075e35006fa3d3" integrity sha512-kHmoybcPV8Sqy59DwNDY3Jefr64lK/by/da0ViFcuA4DH0vQg5Q6Ze5VimxkfQNSC+Mls/Kx53s7TjP1RhFEDQ== +mortice@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/mortice/-/mortice-2.0.1.tgz#047b83c8c57d49e90e586f1f9e7d63e1f80d4a2b" + integrity sha512-9gsXmjq+5LZmXDIoyC/crf2i/7CUwDGSBEwSEsr1i/WfKmJ6DVt38B5kg6BE/WF/1/yfGJYiB1Wyiu423iI3nQ== + dependencies: + nanoid "^3.1.20" + observable-webworkers "^1.0.0" + p-queue "^6.0.0" + promise-timeout "^1.3.0" + ms@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" @@ -7803,11 +7895,12 @@ multimatch@^5.0.0: arrify "^2.0.1" minimatch "^3.0.4" -multistream-select@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/multistream-select/-/multistream-select-2.0.1.tgz#120608d54c146762d16d1df5919a2f0326a78527" - integrity sha512-ziVNT/vux0uUElP4OKNMVr0afU/X6PciAmT2UJNolhzhSLXIwFAaYfmLajD8NoZ+DsBQ1bp0zZ2nMVPF+FhClA== +multistream-select@^3.0.0: + version "3.0.2" + resolved "https://registry.yarnpkg.com/multistream-select/-/multistream-select-3.0.2.tgz#18919b3c74c8eac6ae9b1ba9b8ac5af79cfab3e8" + integrity sha512-ICGA8DAviZj6Xo1NkaRV3J38M+tFDoWiGtO1ksluyMnskAsdGjAzocg806OzpQPivNGWWboX3CrFT2Tk4UdYXA== dependencies: + abortable-iterator "^3.0.0" bl "^5.0.0" debug "^4.1.1" err-code "^3.0.1" @@ -7864,6 +7957,18 @@ napi-macros@~2.0.0: resolved "https://registry.yarnpkg.com/napi-macros/-/napi-macros-2.0.0.tgz#2b6bae421e7b96eb687aa6c77a7858640670001b" integrity sha512-A0xLykHtARfueITVDernsAWdtIMbOJgKgcluwENp3AlsKN/PloyO10HtmoqnFAQAcxPkgZN7wdfPfEd0zNGxbg== +nat-api@^0.3.1: + version "0.3.1" + resolved "https://registry.yarnpkg.com/nat-api/-/nat-api-0.3.1.tgz#1e7808ed1c21c8c9b267d14be40cdb1e87d5271b" + integrity sha512-5cyLugEkXnKSKSvVjKjxxPMLDnkwY3boZLbATWwiGJ4T/3UvIpiQmzb2RqtxxEFcVo/7PwsHPGN0MosopONO8Q== + dependencies: + async "^3.2.0" + debug "^4.2.0" + default-gateway "^6.0.2" + request "^2.88.2" + unordered-array-remove "^1.0.2" + xml2js "^0.1.0" + native-abort-controller@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/native-abort-controller/-/native-abort-controller-1.0.3.tgz#35974a2e189c0d91399c8767a989a5bf058c1435" @@ -7951,6 +8056,11 @@ node-forge@^0.10.0: resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.10.0.tgz#32dea2afb3e9926f02ee5ce8794902691a676bf3" integrity sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA== +node-forge@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.2.1.tgz#82794919071ef2eb5c509293325cec8afd0fd53c" + integrity sha512-Fcvtbb+zBcZXbTTVwqGA5W+MKBj56UjVRevvchv5XrcyXbmNdesfZL37nlcWOfpgHhgmxApw3tQbTr4CqNmX4w== + node-gyp-build@^4.2.0, node-gyp-build@~4.2.1: version "4.2.3" resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.2.3.tgz#ce6277f853835f718829efb47db20f3e4d9c4739" @@ -8335,6 +8445,11 @@ observable-fns@^0.6.1: resolved "https://registry.yarnpkg.com/observable-fns/-/observable-fns-0.6.1.tgz#636eae4fdd1132e88c0faf38d33658cc79d87e37" integrity sha512-9gRK4+sRWzeN6AOewNBTLXir7Zl/i3GB6Yl26gK4flxz8BXVpD3kt8amREmWNb0mxYOGDotvE5a4N+PtGGKdkg== +observable-webworkers@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/observable-webworkers/-/observable-webworkers-1.0.0.tgz#dcbd484a9644d512accc351962c6e710313fbb68" + integrity sha512-+cECwCR8IEh8UY5nefQVLO9Cydqpk1izO+o7BABmKjXfJZyEOzBWY3ss5jbOPM6KmEa9aQExvAtTW6tVTOsNAQ== + once@^1.3.0, once@^1.3.1, once@^1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" @@ -8534,7 +8649,7 @@ p-pipe@^3.1.0: resolved "https://registry.yarnpkg.com/p-pipe/-/p-pipe-3.1.0.tgz#48b57c922aa2e1af6a6404cb7c6bf0eb9cc8e60e" integrity sha512-08pj8ATpzMR0Y80x50yJHn37NF6vjrqHutASaX5LiH5npS9XPvrUmscd9MF5R4fuYRHOxQR1FfMIlF7AzwoPqw== -p-queue@^6.6.2: +p-queue@^6.0.0, p-queue@^6.6.2: version "6.6.2" resolved "https://registry.yarnpkg.com/p-queue/-/p-queue-6.6.2.tgz#2068a9dcf8e67dd0ec3e7a2bcb76810faa85e426" integrity sha512-RwFpb72c/BhQLEXIZ5K2e+AhgNVmIejGlTgiB9MzZ0e93GRvqZ7uSi0dvRF7/XIXDeNkra2fNHBxTyPDGySpjQ== @@ -8799,7 +8914,7 @@ pbkdf2@^3.0.17, pbkdf2@^3.0.3, pbkdf2@^3.0.9: safe-buffer "^5.0.1" sha.js "^2.4.8" -peer-id@^0.15.0, peer-id@^0.15.3: +peer-id@^0.15.3: version "0.15.3" resolved "https://registry.yarnpkg.com/peer-id/-/peer-id-0.15.3.tgz#c093486bcc11399ba63672990382946cfcf0e6f3" integrity sha512-pass5tk6Fbaz7PTD/3fJg2KWqaproHY0B0Ki8GQMEuMjkoLRcS2Vqt9yy6ob/+8uGBmWjRLtbMhaLV4HTyMDfw== @@ -9020,6 +9135,11 @@ promise-retry@^2.0.1: err-code "^2.0.2" retry "^0.12.0" +promise-timeout@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/promise-timeout/-/promise-timeout-1.3.0.tgz#d1c78dd50a607d5f0a5207410252a3a0914e1014" + integrity sha512-5yANTE0tmi5++POym6OgtFmwfDvOXABD9oj/jLQr5GPEyuNEb7jH4wbbANJceJid49jwhi1RddxnhnEAb/doqg== + promzard@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/promzard/-/promzard-0.3.0.tgz#26a5d6ee8c7dee4cb12208305acfb93ba382a9ee" @@ -9524,11 +9644,6 @@ ret@~0.2.0: resolved "https://registry.yarnpkg.com/ret/-/ret-0.2.2.tgz#b6861782a1f4762dce43402a71eb7a283f44573c" integrity sha512-M0b3YWQs7R3Z917WRQy1HHA7Ba7D8hvZg6UE5mLykJxQVE2ju0IXbGlaHPPlkY+WN7wFP+wUMXmBFA0aV6vYGQ== -retimer@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/retimer/-/retimer-2.0.0.tgz#e8bd68c5e5a8ec2f49ccb5c636db84c04063bbca" - integrity sha512-KLXY85WkEq2V2bKex/LOO1ViXVn2KGYe4PYysAdYdjmraYIUsVkXu8O4am+8+5UbaaGl1qho4aqAAPHNQ4GSbg== - retimer@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/retimer/-/retimer-3.0.0.tgz#98b751b1feaf1af13eb0228f8ea68b8f9da530df" @@ -9674,7 +9789,7 @@ sax@1.2.1: resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.1.tgz#7b8e656190b228e81a66aea748480d828cd2d37a" integrity sha1-e45lYZCyKOgaZq6nSEgNgozS03o= -sax@>=0.6.0, sax@^1.2.4: +sax@>=0.1.1, sax@>=0.6.0, sax@^1.2.4: version "1.2.4" resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== @@ -10456,20 +10571,12 @@ thunky@^1.0.2: resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d" integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA== -time-cache@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/time-cache/-/time-cache-0.3.0.tgz#ed0dfcf0fda45cdc95fbd601fda830ebf1bd5d8b" - integrity sha1-7Q388P2kXNyV+9YB/agw6/G9XYs= - dependencies: - lodash.throttle "^4.1.1" - -timeout-abort-controller@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/timeout-abort-controller/-/timeout-abort-controller-1.1.1.tgz#2c3c3c66f13c783237987673c276cbd7a9762f29" - integrity sha512-BsF9i3NAJag6T0ZEjki9j654zoafI2X6ayuNd6Tp8+Ul6Tr5s4jo973qFeiWrRSweqvskC+AHDKUmIW4b7pdhQ== +timeout-abort-controller@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/timeout-abort-controller/-/timeout-abort-controller-3.0.0.tgz#dd57ffca041652c03769904f8d95afd93fb95595" + integrity sha512-O3e+2B8BKrQxU2YRyEjC/2yFdb33slI22WRdUaDx6rvysfi9anloNZyR2q0l6LnePo5qH7gSM7uZtvvwZbc2yA== dependencies: - abort-controller "^3.0.0" - retimer "^2.0.0" + retimer "^3.0.0" timers-browserify@^2.0.4: version "2.0.12" @@ -11267,7 +11374,14 @@ xml2js@0.4.19: sax ">=0.6.0" xmlbuilder "~9.0.1" -xml2js@^0.4.19, xml2js@^0.4.23: +xml2js@^0.1.0: + version "0.1.14" + resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.1.14.tgz#5274e67f5a64c5f92974cd85139e0332adc6b90c" + integrity sha1-UnTmf1pkxfkpdM2FE54DMq3GuQw= + dependencies: + sax ">=0.1.1" + +xml2js@^0.4.19: version "0.4.23" resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.23.tgz#a0c69516752421eb2ac758ee4d4ccf58843eac66" integrity sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug== From 4effcd3bb6dfd224af5d13a8730ff143837537c3 Mon Sep 17 00:00:00 2001 From: g11tech <76567250+g11tech@users.noreply.github.com> Date: Fri, 11 Mar 2022 18:26:34 +0530 Subject: [PATCH 09/10] Add spinning up validator from mnemonic on the devnet script (#3849) * Add spinning up validator from mnemonic * supply params file --- kiln/devnets/README.md | 3 +- kiln/devnets/kiln.vars | 4 ++- kiln/devnets/parse-args.sh | 5 +++ kiln/devnets/setup.sh | 63 +++++++++++++++++++++++++++----------- 4 files changed, 55 insertions(+), 20 deletions(-) diff --git a/kiln/devnets/README.md b/kiln/devnets/README.md index 29236bff4094..6fdfa6276ad0 100644 --- a/kiln/devnets/README.md +++ b/kiln/devnets/README.md @@ -12,7 +12,7 @@ This is a setup to run and join the devnet with a single shell command. This scr ```bash cd kiln/devnets -./setup.sh --dataDir kiln-data --elClient geth --devnetVars ./kiln.vars [--dockerWithSudo --withTerminal "gnome-terminal --disable-factory --"] +./setup.sh --dataDir kiln-data --elClient geth --devnetVars ./kiln.vars [--dockerWithSudo --withTerminal "gnome-terminal --disable-factory --" --withValidator] ``` ###### Example scenarios @@ -35,5 +35,6 @@ You can alternate between `geth` and `nethermind` to experiment with the ELs bei 5. `--withTerminal`(optional): Provide the terminal command prefix for CL and EL processes to run in your favourite terminal. You may use an alias or a terminal launching script as long as it waits for the command it runs till ends and then closes.If not provided, it will launch the docker processes in _in-terminal_ mode. 6. `--detached`(optional): By default the script will wait for processes and use user input (ctrl +c) to end the processes, however you can pass this option to skip this behavior and just return, for e.g. in case you just want to leave it running. +7. `--withValidator` (optional): Launch a validator client using `LODESTAR_VALIDATOR_ARGS` as set in the devnet vars file. Only one of `--withTerminal` or `--detached` should be provided. diff --git a/kiln/devnets/kiln.vars b/kiln/devnets/kiln.vars index 29efe27e731f..edcbb7297bb9 100644 --- a/kiln/devnets/kiln.vars +++ b/kiln/devnets/kiln.vars @@ -7,7 +7,9 @@ CONFIG_GIT_DIR=kiln JWT_SECRET="0xdc6457099f127cf0bac78de8b297df04951281909db4f58b43def7c7151e765d" -LODESTAR_EXTRA_ARGS="--eth1.providerUrls http://127.0.0.1:8545 --execution.urls http://127.0.0.1:8551 --api.rest.enabled --api.rest.host 0.0.0.0" +LODESTAR_EXTRA_ARGS="--eth1.providerUrls http://127.0.0.1:8545 --execution.urls http://127.0.0.1:8551 --api.rest.enabled --api.rest.host 0.0.0.0 --api.rest.api '*'" + +LODESTAR_VALIDATOR_ARGS='--network kiln --fromMnemonic "lens risk clerk foot verb planet drill roof boost aim salt omit celery tube list permit motor obvious flash demise churn hold wave hollow" --mnemonicIndexes 0..5' NETHERMIND_EXTRA_ARGS="--config kiln --Network.DiscoveryPort=30303 --Network.P2PPort=30303 --Merge.Enabled=true --Merge.TerminalTotalDifficulty=1000000000000 --Init.DiagnosticMode=None --JsonRpc.Enabled=true --JsonRpc.Host=0.0.0.0 --JsonRpc.AdditionalRpcUrls \"http://localhost:8545|http;ws|net;eth;subscribe;engine;web3;client|no-auth,http://localhost:8551|http;ws|net;eth;subscribe;engine;web3;client\"" diff --git a/kiln/devnets/parse-args.sh b/kiln/devnets/parse-args.sh index 6fe0cc0be2a2..2cb35f944197 100755 --- a/kiln/devnets/parse-args.sh +++ b/kiln/devnets/parse-args.sh @@ -27,6 +27,10 @@ while [[ $# -gt 0 ]]; do dockerWithSudo=true shift # past argument ;; + --withValidator) + withValidator=true + shift # past argument + ;; --detached) detached=true shift # past argument @@ -42,6 +46,7 @@ echo "dataDir = $dataDir" echo "devnetVars = $devnetVars" echo "withTerminal = $withTerminal" echo "dockerWithSudo = $dockerWithSudo" +echo "withValidator = $withValidator" echo "detached = $detached" if [ -n "$withTerminal" ] && [ -n "$detached" ] diff --git a/kiln/devnets/setup.sh b/kiln/devnets/setup.sh index 23726bf60c2a..8a89ef46a0fd 100755 --- a/kiln/devnets/setup.sh +++ b/kiln/devnets/setup.sh @@ -37,15 +37,15 @@ run_cmd(){ execCmd=$1; if [ -n "$detached" ] then - echo "running: $execCmd" - $execCmd + echo "running detached: $execCmd" + eval "$execCmd" else if [ -n "$withTerminal" ] then execCmd="$withTerminal $execCmd" fi; echo "running: $execCmd &" - $execCmd & + eval "$execCmd" & fi; } @@ -61,7 +61,9 @@ dockerCmd="$dockerExec run" if [ -n "$detached" ] then - dockerCmd="$dockerCmd --detach" + dockerCmd="$dockerCmd --detach --restart unless-stopped" +else + dockerCmd="$dockerCmd --rm" fi; if [ -n "$withTerminal" ] @@ -86,9 +88,9 @@ then fi; if [ $platform == 'Darwin' ] then - elCmd="$dockerCmd --rm --name $elName -v $currentDir/$dataDir:/data $GETH_IMAGE --bootnodes $EXTRA_BOOTNODES$bootNode --datadir /data/geth --authrpc.jwtsecret /data/jwtsecret $GETH_EXTRA_ARGS" + elCmd="$dockerCmd --name $elName -v $currentDir/$dataDir:/data $GETH_IMAGE --bootnodes $EXTRA_BOOTNODES$bootNode --datadir /data/geth --authrpc.jwtsecret /data/jwtsecret $GETH_EXTRA_ARGS" else - elCmd="$dockerCmd --rm --name $elName --network host -v $currentDir/$dataDir:/data $GETH_IMAGE --bootnodes $EXTRA_BOOTNODES$bootNode --datadir /data/geth --authrpc.jwtsecret /data/jwtsecret $GETH_EXTRA_ARGS" + elCmd="$dockerCmd --name $elName --network host -v $currentDir/$dataDir:/data $GETH_IMAGE --bootnodes $EXTRA_BOOTNODES$bootNode --datadir /data/geth --authrpc.jwtsecret /data/jwtsecret $GETH_EXTRA_ARGS" fi elif [ "$elClient" == "nethermind" ] then @@ -99,9 +101,9 @@ then if [ $platform == 'Darwin' ] then - elCmd="$dockerCmd --rm --name $elName -v $currentDir/$dataDir/$configGitDir:/config -v $currentDir/$dataDir:/data $NETHERMIND_IMAGE --datadir /data/nethermind --Init.ChainSpecPath=/config/nethermind_genesis.json --JsonRpc.JwtSecretFile /data/jwtsecret $NETHERMIND_EXTRA_ARGS --Discovery.Bootnodes $EXTRA_BOOTNODES$bootNode" + elCmd="$dockerCmd --name $elName -v $currentDir/$dataDir/$configGitDir:/config -v $currentDir/$dataDir:/data $NETHERMIND_IMAGE --datadir /data/nethermind --Init.ChainSpecPath=/config/nethermind_genesis.json --JsonRpc.JwtSecretFile /data/jwtsecret $NETHERMIND_EXTRA_ARGS --Discovery.Bootnodes $EXTRA_BOOTNODES$bootNode" else - elCmd="$dockerCmd --rm --name $elName --network host -v $currentDir/$dataDir/$configGitDir:/config -v $currentDir/$dataDir:/data $NETHERMIND_IMAGE --datadir /data/nethermind --Init.ChainSpecPath=/config/nethermind_genesis.json --JsonRpc.JwtSecretFile /data/jwtsecret $NETHERMIND_EXTRA_ARGS --Discovery.Bootnodes $EXTRA_BOOTNODES$bootNode" + elCmd="$dockerCmd --name $elName --network host -v $currentDir/$dataDir/$configGitDir:/config -v $currentDir/$dataDir:/data $NETHERMIND_IMAGE --datadir /data/nethermind --Init.ChainSpecPath=/config/nethermind_genesis.json --JsonRpc.JwtSecretFile /data/jwtsecret $NETHERMIND_EXTRA_ARGS --Discovery.Bootnodes $EXTRA_BOOTNODES$bootNode" fi fi @@ -117,15 +119,24 @@ clName="$DEVNET_NAME-lodestar" if [ $platform == 'Darwin' ] then - clCmd="$dockerCmd --rm --name $clName --net=container:$elName -v $currentDir/$dataDir/$configGitDir:/config -v $currentDir/$dataDir:/data $LODESTAR_IMAGE beacon --rootDir /data/lodestar --paramsFile /config/config.yaml --genesisStateFile /config/genesis.ssz --network.connectToDiscv5Bootnodes --network.discv5.enabled true --eth1.enabled true --eth1.depositContractDeployBlock $depositContractDeployBlock $LODESTAR_EXTRA_ARGS --bootnodesFile /config/boot_enr.yaml --jwt-secret /data/jwtsecret" + clCmd="$dockerCmd --name $clName --net=container:$elName -v $currentDir/$dataDir/$configGitDir:/config -v $currentDir/$dataDir:/data $LODESTAR_IMAGE beacon --rootDir /data/lodestar --paramsFile /config/config.yaml --genesisStateFile /config/genesis.ssz --network.connectToDiscv5Bootnodes --network.discv5.enabled true --eth1.enabled true --eth1.depositContractDeployBlock $depositContractDeployBlock $LODESTAR_EXTRA_ARGS --bootnodesFile /config/boot_enr.yaml --jwt-secret /data/jwtsecret" else - clCmd="$dockerCmd --rm --name $clName --network host -v $currentDir/$dataDir/$configGitDir:/config -v $currentDir/$dataDir:/data $LODESTAR_IMAGE beacon --rootDir /data/lodestar --paramsFile /config/config.yaml --genesisStateFile /config/genesis.ssz --network.connectToDiscv5Bootnodes --network.discv5.enabled true --eth1.enabled true --eth1.depositContractDeployBlock $depositContractDeployBlock $LODESTAR_EXTRA_ARGS --bootnodesFile /config/boot_enr.yaml --jwt-secret /data/jwtsecret" + clCmd="$dockerCmd --name $clName --network host -v $currentDir/$dataDir/$configGitDir:/config -v $currentDir/$dataDir:/data $LODESTAR_IMAGE beacon --rootDir /data/lodestar --paramsFile /config/config.yaml --genesisStateFile /config/genesis.ssz --network.connectToDiscv5Bootnodes --network.discv5.enabled true --eth1.enabled true --eth1.depositContractDeployBlock $depositContractDeployBlock $LODESTAR_EXTRA_ARGS --bootnodesFile /config/boot_enr.yaml --jwt-secret /data/jwtsecret" fi +valName="$DEVNET_NAME-validator" +if [ $platform == 'Darwin' ] +then + valCmd="$dockerCmd --name $valName --net=container:$elName -v $currentDir/$dataDir/$configGitDir:/config -v $currentDir/$dataDir:/data $LODESTAR_IMAGE validator --rootDir /data/lodestar --paramsFile /config/config.yaml $LODESTAR_VALIDATOR_ARGS" +else + valCmd="$dockerCmd --name $valName --network host -v $currentDir/$dataDir/$configGitDir:/config -v $currentDir/$dataDir:/data $LODESTAR_IMAGE validator --rootDir /data/lodestar --paramsFile /config/config.yaml $LODESTAR_VALIDATOR_ARGS" +fi; + echo -n $JWT_SECRET > $dataDir/jwtsecret run_cmd "$elCmd" elPid=$! echo "elPid= $elPid" +terminalInfo="elPid= $elPid for $elName" if [ $platform == 'Darwin' ] then @@ -136,43 +147,59 @@ fi run_cmd "$clCmd" clPid=$! echo "clPid= $clPid" +terminalInfo="$terminalInfo, clPid= $clPid for $clName" + +if [ -n "$withValidator" ] +then + run_cmd "$valCmd" + valPid=$! + echo "valPid= $valPid" + terminalInfo="$terminalInfo, valPid= $valPid for $elName" +else + # hack to assign clPid to valPid for joint wait later + valPid=$clPid +fi; cleanup() { echo "cleaning up" $dockerExec rm $elName -f $dockerExec rm $clName -f + $dockerExec rm $valName -f elPid=null clPid=null + valPid=null } trap "echo exit signal recived;cleanup" SIGINT SIGTERM -if [ ! -n "$detached" ] && [ -n "$elPid" ] && [ -n "$clPid" ] +if [ ! -n "$detached" ] && [ -n "$elPid" ] && [ -n "$clPid" ] && ([ ! -n "$withValidator" ] || [ -n "$valPid" ] ) then - echo "launched two terminals for el and cl clients with elPid: $elPid clPid: $clPid" + echo "launched terminals for $terminalInfo" echo "you can watch observe the client logs at the respective terminals" - echo "use ctl + c on any of these three (including this) terminals to stop the process" + echo "use ctl + c on any of these (including this) terminals to stop the process" echo "waiting ..." if [ $platform == 'Darwin' ] then # macOs ships with an old version of bash with wait that does not have the -n flag wait $elPid wait $clPid + wait $valPid else - wait -n $elPid $clPid + wait -n $elPid $clPid $valPid fi echo "one of the el or cl process exited, stopping and cleanup" cleanup fi; -if [ ! -n "$detached" ] && [ -n "$elPid$clPid" ] +# if its not detached and is here, it means one of the processes exited/didn't launch +if [ ! -n "$detached" ] && [ -n "$elPid$clPid$valPid" ] then - echo "one of the el or cl processes didn't launch properly" + echo "one of the processes didn't launch properly" cleanup fi; if [ -n "$detached" ] -then - echo "launched detached docker containers: $elName, $clName" +then + echo "launched detached containers: $terminalInfo" else echo "exiting ..." fi; From f532758a4d7db4a9dda6deef4a0c447fb5f88725 Mon Sep 17 00:00:00 2001 From: g11tech <76567250+g11tech@users.noreply.github.com> Date: Fri, 11 Mar 2022 23:20:06 +0530 Subject: [PATCH 10/10] Ethereumjs script integration for kiln testnet (#3850) * Ethereumjs script integration for kiln testnet * ttd update as the ttd increased mid way * midway TTD update for kiln * correct ordering for bootnode with space --- kiln/devnets/kiln.vars | 5 ++++- kiln/devnets/setup.sh | 21 ++++++++++++++++--- .../config/src/chainConfig/networks/kiln.ts | 2 +- 3 files changed, 23 insertions(+), 5 deletions(-) diff --git a/kiln/devnets/kiln.vars b/kiln/devnets/kiln.vars index edcbb7297bb9..5cd3fa56eac2 100644 --- a/kiln/devnets/kiln.vars +++ b/kiln/devnets/kiln.vars @@ -2,6 +2,7 @@ DEVNET_NAME=kiln GETH_IMAGE=parithoshj/geth:merge-876ca42 NETHERMIND_IMAGE=nethermindeth/nethermind:kiln_0.6 +ETHEREUMJS_IMAGE=g11tech/ethereumjs:kiln LODESTAR_IMAGE=chainsafe/lodestar:next CONFIG_GIT_DIR=kiln @@ -11,8 +12,10 @@ LODESTAR_EXTRA_ARGS="--eth1.providerUrls http://127.0.0.1:8545 --execution.urls LODESTAR_VALIDATOR_ARGS='--network kiln --fromMnemonic "lens risk clerk foot verb planet drill roof boost aim salt omit celery tube list permit motor obvious flash demise churn hold wave hollow" --mnemonicIndexes 0..5' -NETHERMIND_EXTRA_ARGS="--config kiln --Network.DiscoveryPort=30303 --Network.P2PPort=30303 --Merge.Enabled=true --Merge.TerminalTotalDifficulty=1000000000000 --Init.DiagnosticMode=None --JsonRpc.Enabled=true --JsonRpc.Host=0.0.0.0 --JsonRpc.AdditionalRpcUrls \"http://localhost:8545|http;ws|net;eth;subscribe;engine;web3;client|no-auth,http://localhost:8551|http;ws|net;eth;subscribe;engine;web3;client\"" +NETHERMIND_EXTRA_ARGS="--config kiln --Network.DiscoveryPort=30303 --Network.P2PPort=30303 --Merge.Enabled=true --Merge.TerminalTotalDifficulty=20000000000000 --Init.DiagnosticMode=None --JsonRpc.Enabled=true --JsonRpc.Host=0.0.0.0 --JsonRpc.AdditionalRpcUrls \"http://localhost:8545|http;ws|net;eth;subscribe;engine;web3;client|no-auth,http://localhost:8551|http;ws|net;eth;subscribe;engine;web3;client\"" GETH_EXTRA_ARGS="--http --http.api engine,net,eth --http.port 8545 --allow-insecure-unlock --http.addr 0.0.0.0 --authrpc.port=8551 --networkid 1337802" +ETHEREUMJS_EXTRA_ARGS="--saveReceipts --rpc --rpcport 8545 --ws --rpcEngine --rpcEnginePort=8551 --rpcDebug --loglevel=debug" + EXTRA_BOOTNODES="" diff --git a/kiln/devnets/setup.sh b/kiln/devnets/setup.sh index 8a89ef46a0fd..ed282bc8e811 100755 --- a/kiln/devnets/setup.sh +++ b/kiln/devnets/setup.sh @@ -12,9 +12,9 @@ configGitDir=$CONFIG_GIT_DIR gethImage=$GETH_IMAGE nethermindImage=$NETHERMIND_IMAGE -if [ ! -n "$dataDir" ] || [ ! -n "$devnetVars" ] || ([ "$elClient" != "geth" ] && [ "$elClient" != "nethermind" ]) +if [ ! -n "$dataDir" ] || [ ! -n "$devnetVars" ] || ([ "$elClient" != "geth" ] && [ "$elClient" != "nethermind" ] && [ "$elClient" != "ethereumjs" ]) then - echo "usage: ./setup.sh --dataDir --elClient --devetVars [--dockerWithSudo --withTerminal \"gnome-terminal --disable-factory --\"]" + echo "usage: ./setup.sh --dataDir --elClient --devetVars [--dockerWithSudo --withTerminal \"gnome-terminal --disable-factory --\"]" echo "example: ./setup.sh --dataDir kiln-data --elClient nethermind --devnetVars ./kiln.vars --dockerWithSudo --withTerminal \"gnome-terminal --disable-factory --\"" echo "Note: if running on macOS where gnome-terminal is not available, remove the gnome-terminal related flags." echo "example: ./setup.sh --dataDir kiln-data --elClient geth --devnetVars ./kiln.vars" @@ -22,7 +22,7 @@ then fi -mkdir $dataDir && mkdir $dataDir/lodestar && mkdir $dataDir/geth && mkdir $dataDir/nethermind && cd $dataDir && git init && git remote add -f origin $setupConfigUrl && git config core.sparseCheckout true && echo "$configGitDir/*" >> .git/info/sparse-checkout && git pull --depth=1 origin main && cd $currentDir +mkdir $dataDir && mkdir $dataDir/lodestar && mkdir $dataDir/geth && mkdir $dataDir/nethermind && mkdir $dataDir/ethereumjs && cd $dataDir && git init && git remote add -f origin $setupConfigUrl && git config core.sparseCheckout true && echo "$configGitDir/*" >> .git/info/sparse-checkout && git pull --depth=1 origin main && cd $currentDir if [ ! -n "$(ls -A $dataDir/$configGitDir)" ] || [ ! -n "$(ls -A $dataDir/$configGitDir/genesis.json)" ] || [ ! -n "$(ls -A $dataDir/$configGitDir/genesis.ssz)" ] || [ ! -n "$(ls -A $dataDir/$configGitDir/nethermind_genesis.json)" ] || [ ! -n "$(ls -A $dataDir/$configGitDir/el_bootnode.txt)" ] || [ ! -n "$(ls -A $dataDir/$configGitDir/bootstrap_nodes.txt)" ] then @@ -74,7 +74,9 @@ fi; platform=$(uname) bootNode=$(cat $dataDir/$configGitDir/el_bootnode.txt) bootNode=($bootNode) +bootNodeWithSpace=$(IFS=" " ; echo "${bootNode[*]}") bootNode=$(IFS=, ; echo "${bootNode[*]}") + if [ "$elClient" == "geth" ] then echo "gethImage: $GETH_IMAGE" @@ -105,6 +107,19 @@ then else elCmd="$dockerCmd --name $elName --network host -v $currentDir/$dataDir/$configGitDir:/config -v $currentDir/$dataDir:/data $NETHERMIND_IMAGE --datadir /data/nethermind --Init.ChainSpecPath=/config/nethermind_genesis.json --JsonRpc.JwtSecretFile /data/jwtsecret $NETHERMIND_EXTRA_ARGS --Discovery.Bootnodes $EXTRA_BOOTNODES$bootNode" fi +elif [ "$elClient" == "ethereumjs" ] +then + echo "ethereumjsImage: $ETHEREUMJS_IMAGE" + $dockerExec pull $ETHEREUMJS_IMAGE + + elName="$DEVNET_NAME-ethereumjs" + + if [ $platform == 'Darwin' ] + then + elCmd="$dockerCmd --name $elName -v $currentDir/$dataDir/$configGitDir:/config -v $currentDir/$dataDir:/data $ETHEREUMJS_IMAGE --datadir /data/ethereumjs --gethGenesis /config/genesis.json $ETHEREUMJS_EXTRA_ARGS --bootnodes=$bootNodeWithSpace --jwt-secret /data/jwtsecret" + else + elCmd="$dockerCmd --name $elName --network host -v $currentDir/$dataDir/$configGitDir:/config -v $currentDir/$dataDir:/data $ETHEREUMJS_IMAGE --datadir /data/ethereumjs --gethGenesis /config/genesis.json $ETHEREUMJS_EXTRA_ARGS --bootnodes=$bootNodeWithSpace --jwt-secret /data/jwtsecret" + fi fi echo "lodestarImage: $LODESTAR_IMAGE" diff --git a/packages/config/src/chainConfig/networks/kiln.ts b/packages/config/src/chainConfig/networks/kiln.ts index 7fcb5d34e96e..8db9b27d0c65 100644 --- a/packages/config/src/chainConfig/networks/kiln.ts +++ b/packages/config/src/chainConfig/networks/kiln.ts @@ -22,7 +22,7 @@ export const kilnChainConfig: IChainConfig = { // Bellatrix BELLATRIX_FORK_VERSION: b("0x70000071"), BELLATRIX_FORK_EPOCH: 150, - TERMINAL_TOTAL_DIFFICULTY: BigInt(1000000000000), + TERMINAL_TOTAL_DIFFICULTY: BigInt(20000000000000), // Sharding SHARDING_FORK_VERSION: b("0x03000000"), SHARDING_FORK_EPOCH: Infinity,