Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(blst): add new blst to all packages with bls #5493

Closed
wants to merge 31 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
0bcde15
feat(blst): add new blst to all packages with bls
matthewkeil May 15, 2023
3b85012
refactor(workflows): turn of workflows for now
matthewkeil May 15, 2023
4374fd6
Revert "feat(blst): add new blst to all packages with bls"
matthewkeil May 17, 2023
e067da4
chore(beacon-node): add blst-ts dep
matthewkeil May 17, 2023
1c1bf55
feat(beacon-node): make signatureFromBytesNoCheck async
matthewkeil May 17, 2023
76254a8
feat(beacon-node): make validateGossipBlobsSidecar async
matthewkeil May 17, 2023
a83f831
feat(beacon-node): make aggregate/getAggregate async
matthewkeil May 17, 2023
9c6e677
feat(beacon-node): make SyncCommitteeMessagePool.add async
matthewkeil May 17, 2023
0334706
feat(beacon-node): make AttestationPool.add async
matthewkeil May 17, 2023
e05f203
feat(beacon-node): make AggregatedAttestationPool.add async
matthewkeil May 17, 2023
0cf2a23
feat(beacon-node): make MatchingDataAttestationGroup.add async
matthewkeil May 17, 2023
fdd6502
feat(beacon-node): make aggregateInto async
matthewkeil May 17, 2023
c8ab8e7
feat(beacon-node): make async versions of verifySignatureSetsMaybeBatch
matthewkeil May 17, 2023
19f43a1
feat(beacon-node): make sync/async versions of getAggregatedPubkey
matthewkeil May 17, 2023
9da3f2e
Revert "refactor(workflows): turn of workflows for now"
matthewkeil May 17, 2023
7e0179d
Merge branch 'unstable' into mkeil/test-blst
matthewkeil May 17, 2023
fa38095
refactor(beacon-node): change package name to @chainsafe/blst-ts to a…
matthewkeil May 17, 2023
5ef5007
chore(beacon-node): add temp package for CI
matthewkeil May 17, 2023
0cdccb8
chore(beacon-node): add yarn.lock
matthewkeil May 17, 2023
b4baaf5
fix(beacon-node): update file imports for temp package name
matthewkeil May 17, 2023
212699e
Merge branch 'unstable' into mkeil/test-blst
matthewkeil May 17, 2023
123ad12
chore: update blst-ts-test version
matthewkeil May 18, 2023
0f3635f
chore(beacon-node): update blst-ts-test@0.0.3
matthewkeil May 18, 2023
e99882a
chore: fix lint
matthewkeil May 18, 2023
78eb276
test(perf): update bls to new bindings
matthewkeil May 18, 2023
3abacf9
refactor: remove commented import
matthewkeil May 18, 2023
05cbd67
fix(beacon-node): return promise from perf test
matthewkeil May 18, 2023
a94b02c
fix(beacon-node): return promise from perf test
matthewkeil May 18, 2023
29afcf7
test(perf): chance tests to await not return promise
matthewkeil May 18, 2023
0daa885
test(perf): change bls test to await not return promise
matthewkeil May 18, 2023
c26a6e3
refactor(beacon-node): remove unnecessary async
matthewkeil May 18, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
1 change: 1 addition & 0 deletions packages/beacon-node/package.json
Expand Up @@ -98,6 +98,7 @@
"@chainsafe/as-chacha20poly1305": "^0.1.0",
"@chainsafe/as-sha256": "^0.3.1",
"@chainsafe/bls": "7.1.1",
"blst-ts-test": "0.0.3",
"@chainsafe/discv5": "^3.0.0",
"@chainsafe/libp2p-gossipsub": "^6.2.0",
"@chainsafe/libp2p-noise": "^11.0.4",
Expand Down
4 changes: 2 additions & 2 deletions packages/beacon-node/src/api/impl/beacon/pool/index.ts
Expand Up @@ -66,7 +66,7 @@ export function getBeaconPoolApi({
);

if (network.shouldAggregate(subnet, slot)) {
const insertOutcome = chain.attestationPool.add(attestation, attDataRootHex);
const insertOutcome = await chain.attestationPool.add(attestation, attDataRootHex);
metrics?.opPool.attestationPoolInsertOutcome.inc({insertOutcome});
}
const sentPeers = await network.publishBeaconAttestation(attestation, subnet);
Expand Down Expand Up @@ -184,7 +184,7 @@ export function getBeaconPoolApi({
// Sync committee subnet members are just sequential in the order they appear in SyncCommitteeIndexes array
const subnet = Math.floor(indexInCommittee / SYNC_COMMITTEE_SUBNET_SIZE);
const indexInSubcommittee = indexInCommittee % SYNC_COMMITTEE_SUBNET_SIZE;
chain.syncCommitteeMessagePool.add(subnet, signature, indexInSubcommittee);
await chain.syncCommitteeMessagePool.add(subnet, signature, indexInSubcommittee);

// Cheap de-duplication code to avoid using a Set. indexesInCommittee is always sorted
if (subnets.length === 0 || subnets[subnets.length - 1] !== subnet) {
Expand Down
2 changes: 1 addition & 1 deletion packages/beacon-node/src/api/impl/validator/index.ts
Expand Up @@ -538,7 +538,7 @@ export function getValidatorApi({
beaconBlockRoot
);

chain.aggregatedAttestationPool.add(
await chain.aggregatedAttestationPool.add(
signedAggregateAndProof.message.aggregate,
attDataRootHex,
indexedAttestation.attestingIndices.length,
Expand Down
40 changes: 38 additions & 2 deletions packages/beacon-node/src/chain/bls/maybeBatch.ts
@@ -1,5 +1,6 @@
import {CoordType, PublicKey} from "@chainsafe/bls/types";
import blstTs from "blst-ts-test";
import bls from "@chainsafe/bls";
import {CoordType, PublicKey} from "@chainsafe/bls/types";

const MIN_SET_COUNT_TO_BATCH = 2;

Expand All @@ -9,11 +10,17 @@ export type SignatureSetDeserialized = {
signature: Uint8Array;
};

export type NapiSignatureSetDeserialized = {
publicKey: blstTs.PublicKey;
message: Uint8Array;
signature: Uint8Array;
};

/**
* Verify signatures sets with batch verification or regular core verify depending on the set count.
* Abstracted in a separate file to be consumed by the threaded pool and the main thread implementation.
*/
export function verifySignatureSetsMaybeBatch(sets: SignatureSetDeserialized[]): boolean {
export function verifySignatureSetsMaybeBatchSync(sets: SignatureSetDeserialized[]): boolean {
if (sets.length >= MIN_SET_COUNT_TO_BATCH) {
return bls.Signature.verifyMultipleSignatures(
sets.map((s) => ({
Expand All @@ -37,3 +44,32 @@ export function verifySignatureSetsMaybeBatch(sets: SignatureSetDeserialized[]):
return sig.verify(set.publicKey, set.message);
});
}

/**
* Verify signatures sets with batch verification or regular core verify depending on the set count.
* Abstracted in a separate file to be consumed by the threaded pool and the main thread implementation.
*/
export async function verifySignatureSetsMaybeBatch(sets: NapiSignatureSetDeserialized[]): Promise<boolean> {
if (sets.length >= MIN_SET_COUNT_TO_BATCH) {
return blstTs.verifyMultipleAggregateSignatures(
sets.map((s) => ({
msg: s.message,
publicKey: s.publicKey,
signature: blstTs.Signature.deserialize(s.signature, blstTs.CoordType.affine),
}))
);
}

// .every on an empty array returns true
if (sets.length === 0) {
throw Error("Empty signature set");
}

const verifyPromises: Promise<boolean>[] = [];
for (const set of sets) {
const sig = blstTs.Signature.deserialize(set.signature, blstTs.CoordType.affine);
verifyPromises.push(sig.sigValidate().then(() => blstTs.verify(set.message, set.publicKey, sig)));
}

return (await Promise.all(verifyPromises)).every((v) => v);
}
10 changes: 5 additions & 5 deletions packages/beacon-node/src/chain/bls/multithread/index.ts
Expand Up @@ -13,8 +13,8 @@ import {ISignatureSet} from "@lodestar/state-transition";
import {QueueError, QueueErrorCode} from "../../../util/queue/index.js";
import {Metrics} from "../../../metrics/index.js";
import {IBlsVerifier, VerifySignatureOpts} from "../interface.js";
import {getAggregatedPubkey, getAggregatedPubkeysCount} from "../utils.js";
import {verifySignatureSetsMaybeBatch} from "../maybeBatch.js";
import {getAggregatedPubkeySync, getAggregatedPubkeysCount} from "../utils.js";
import {verifySignatureSetsMaybeBatchSync} from "../maybeBatch.js";
import {BlsWorkReq, BlsWorkResult, WorkerData, WorkResultCode} from "./types.js";
import {chunkifyMaximizeChunkSize} from "./utils.js";
import {defaultPoolSize} from "./poolSize.js";
Expand Down Expand Up @@ -155,9 +155,9 @@ export class BlsMultiThreadWorkerPool implements IBlsVerifier {
if (opts.verifyOnMainThread && !this.blsVerifyAllMultiThread) {
const timer = this.metrics?.blsThreadPool.mainThreadDurationInThreadPool.startTimer();
try {
return verifySignatureSetsMaybeBatch(
return verifySignatureSetsMaybeBatchSync(
sets.map((set) => ({
publicKey: getAggregatedPubkey(set),
publicKey: getAggregatedPubkeySync(set),
message: set.signingRoot.valueOf() as Uint8Array,
signature: set.signature,
}))
Expand All @@ -174,7 +174,7 @@ export class BlsMultiThreadWorkerPool implements IBlsVerifier {
this.queueBlsWork({
opts,
sets: setsWorker.map((s) => ({
publicKey: getAggregatedPubkey(s).toBytes(this.format),
publicKey: getAggregatedPubkeySync(s).toBytes(this.format),
message: s.signingRoot,
signature: s.signature,
})),
Expand Down
8 changes: 4 additions & 4 deletions packages/beacon-node/src/chain/bls/multithread/worker.ts
Expand Up @@ -3,7 +3,7 @@ import worker from "node:worker_threads";
import {expose} from "@chainsafe/threads/worker";
import bls from "@chainsafe/bls";
import {CoordType} from "@chainsafe/bls/types";
import {verifySignatureSetsMaybeBatch, SignatureSetDeserialized} from "../maybeBatch.js";
import {verifySignatureSetsMaybeBatchSync, SignatureSetDeserialized} from "../maybeBatch.js";
import {WorkerData, BlsWorkReq, WorkResult, WorkResultCode, SerializedSet, BlsWorkResult} from "./types.js";
import {chunkifyMaximizeChunkSize} from "./utils.js";

Expand Down Expand Up @@ -63,7 +63,7 @@ function verifyManySignatureSets(workReqArr: BlsWorkReq[]): BlsWorkResult {

try {
// Attempt to verify multiple sets at once
const isValid = verifySignatureSetsMaybeBatch(allSets);
const isValid = verifySignatureSetsMaybeBatchSync(allSets);

if (isValid) {
// The entire batch is valid, return success to all
Expand All @@ -78,7 +78,7 @@ function verifyManySignatureSets(workReqArr: BlsWorkReq[]): BlsWorkResult {
}
} catch (e) {
// TODO: Ignore this error expecting that the same error will happen when re-verifying the set individually
// It's not ideal but '@chainsafe/blst' may throw errors on some conditions
// It's not ideal but 'blst-ts-test' may throw errors on some conditions
batchRetries++;
// Re-verify all sigs
nonBatchableSets.push(...batchableChunk);
Expand All @@ -88,7 +88,7 @@ function verifyManySignatureSets(workReqArr: BlsWorkReq[]): BlsWorkResult {

for (const {idx, sets} of nonBatchableSets) {
try {
const isValid = verifySignatureSetsMaybeBatch(sets);
const isValid = verifySignatureSetsMaybeBatchSync(sets);
results[idx] = {code: WorkResultCode.success, result: isValid};
} catch (e) {
results[idx] = {code: WorkResultCode.error, error: e as Error};
Expand Down
7 changes: 4 additions & 3 deletions packages/beacon-node/src/chain/bls/singleThread.ts
Expand Up @@ -14,16 +14,17 @@ export class BlsSingleThreadVerifier implements IBlsVerifier {
async verifySignatureSets(sets: ISignatureSet[]): Promise<boolean> {
this.metrics?.bls.aggregatedPubkeys.inc(getAggregatedPubkeysCount(sets));

const setsAggregated = sets.map((set) => ({
publicKey: getAggregatedPubkey(set),
const keys = await Promise.all(sets.map((set) => getAggregatedPubkey(set)));
const setsAggregated = sets.map((set, i) => ({
publicKey: keys[i],
message: set.signingRoot,
signature: set.signature,
}));

// Count time after aggregating
const startNs = process.hrtime.bigint();

const isValid = verifySignatureSetsMaybeBatch(setsAggregated);
const isValid = await verifySignatureSetsMaybeBatch(setsAggregated);

// Don't use a try/catch, only count run without exceptions
const endNs = process.hrtime.bigint();
Expand Down
16 changes: 15 additions & 1 deletion packages/beacon-node/src/chain/bls/utils.ts
@@ -1,8 +1,9 @@
import blstTs from "blst-ts-test";
import type {PublicKey} from "@chainsafe/bls/types";
import bls from "@chainsafe/bls";
import {ISignatureSet, SignatureSetType} from "@lodestar/state-transition";

export function getAggregatedPubkey(signatureSet: ISignatureSet): PublicKey {
export function getAggregatedPubkeySync(signatureSet: ISignatureSet): PublicKey {
switch (signatureSet.type) {
case SignatureSetType.single:
return signatureSet.pubkey;
Expand All @@ -15,6 +16,19 @@ export function getAggregatedPubkey(signatureSet: ISignatureSet): PublicKey {
}
}

export async function getAggregatedPubkey(signatureSet: ISignatureSet): Promise<blstTs.PublicKey> {
switch (signatureSet.type) {
case SignatureSetType.single:
return blstTs.PublicKey.deserialize(signatureSet.pubkey.toBytes());

case SignatureSetType.aggregate:
return blstTs.aggregatePublicKeys(signatureSet.pubkeys.map((pubkey) => pubkey.toBytes()));

default:
throw Error("Unknown signature set type");
}
}

export function getAggregatedPubkeysCount(signatureSets: ISignatureSet[]): number {
let pubkeysConut = 0;
for (const set of signatureSets) {
Expand Down
@@ -1,4 +1,4 @@
import bls from "@chainsafe/bls";
import bls from "blst-ts-test";
import {
ForkName,
MAX_ATTESTATIONS,
Expand Down Expand Up @@ -70,12 +70,12 @@ export class AggregatedAttestationPool {
return {attestationCount, attestationDataCount};
}

add(
async add(
attestation: phase0.Attestation,
dataRootHex: RootHex,
attestingIndicesCount: number,
committee: ValidatorIndex[]
): InsertOutcome {
): Promise<InsertOutcome> {
const slot = attestation.data.slot;
const lowestPermissibleSlot = this.lowestPermissibleSlot;

Expand Down Expand Up @@ -229,7 +229,7 @@ export class MatchingDataAttestationGroup {
* If it's a subset of an existing attestations, it's not neccesrary to add to our pool.
* If it's a superset of an existing attestation, remove the existing attestation and add new.
*/
add(attestation: AttestationWithIndex): InsertOutcome {
async add(attestation: AttestationWithIndex): Promise<InsertOutcome> {
const newBits = attestation.attestation.aggregationBits;

const indicesToRemove = [];
Expand All @@ -245,7 +245,7 @@ export class MatchingDataAttestationGroup {

case IntersectResult.Exclusive:
// no intersection
aggregateInto(prevAttestation, attestation);
await aggregateInto(prevAttestation, attestation);
return InsertOutcome.Aggregated;

case IntersectResult.Superset:
Expand Down Expand Up @@ -313,13 +313,16 @@ export class MatchingDataAttestationGroup {
}
}

export function aggregateInto(attestation1: AttestationWithIndex, attestation2: AttestationWithIndex): void {
export async function aggregateInto(
attestation1: AttestationWithIndex,
attestation2: AttestationWithIndex
): Promise<void> {
// Merge bits of attestation2 into attestation1
attestation1.attestation.aggregationBits.mergeOrWith(attestation2.attestation.aggregationBits);

const signature1 = signatureFromBytesNoCheck(attestation1.attestation.signature);
const signature2 = signatureFromBytesNoCheck(attestation2.attestation.signature);
attestation1.attestation.signature = bls.Signature.aggregate([signature1, signature2]).toBytes();
const signature1 = await signatureFromBytesNoCheck(attestation1.attestation.signature);
const signature2 = await signatureFromBytesNoCheck(attestation2.attestation.signature);
attestation1.attestation.signature = (await bls.aggregateSignatures([signature1, signature2])).serialize();
}

/**
Expand Down
24 changes: 13 additions & 11 deletions packages/beacon-node/src/chain/opPools/attestationPool.ts
@@ -1,6 +1,5 @@
import bls from "blst-ts-test";
import {phase0, Slot, Root, RootHex} from "@lodestar/types";
import {PointFormat, Signature} from "@chainsafe/bls/types";
import bls from "@chainsafe/bls";
import {BitArray, toHexString} from "@chainsafe/ssz";
import {MapDef} from "@lodestar/utils";
import {IClock} from "../../util/clock.js";
Expand All @@ -26,7 +25,7 @@ const MAX_ATTESTATIONS_PER_SLOT = 16_384;
type AggregateFast = {
data: phase0.Attestation["data"];
aggregationBits: BitArray;
signature: Signature;
signature: bls.Signature;
};

/** Hex string of DataRoot `TODO` */
Expand Down Expand Up @@ -93,7 +92,7 @@ export class AttestationPool {
* - Valid committeeIndex
* - Valid data
*/
add(attestation: phase0.Attestation, attDataRootHex: RootHex): InsertOutcome {
async add(attestation: phase0.Attestation, attDataRootHex: RootHex): Promise<InsertOutcome> {
const slot = attestation.data.slot;
const lowestPermissibleSlot = this.lowestPermissibleSlot;

Expand All @@ -120,7 +119,7 @@ export class AttestationPool {
return aggregateAttestationInto(aggregate, attestation);
} else {
// Create new aggregate
aggregateByRoot.set(attDataRootHex, attestationToAggregate(attestation));
aggregateByRoot.set(attDataRootHex, await attestationToAggregate(attestation));
return InsertOutcome.NewData;
}
}
Expand Down Expand Up @@ -179,7 +178,10 @@ export class AttestationPool {
/**
* Aggregate a new contribution into `aggregate` mutating it
*/
function aggregateAttestationInto(aggregate: AggregateFast, attestation: phase0.Attestation): InsertOutcome {
async function aggregateAttestationInto(
aggregate: AggregateFast,
attestation: phase0.Attestation
): Promise<InsertOutcome> {
const bitIndex = attestation.aggregationBits.getSingleTrueBit();

// Should never happen, attestations are verified against this exact condition before
Expand All @@ -192,22 +194,22 @@ function aggregateAttestationInto(aggregate: AggregateFast, attestation: phase0.
}

aggregate.aggregationBits.set(bitIndex, true);
aggregate.signature = bls.Signature.aggregate([
aggregate.signature = await bls.aggregateSignatures([
aggregate.signature,
signatureFromBytesNoCheck(attestation.signature),
await signatureFromBytesNoCheck(attestation.signature),
]);
return InsertOutcome.Aggregated;
}

/**
* Format `contribution` into an efficient `aggregate` to add more contributions in with aggregateContributionInto()
*/
function attestationToAggregate(attestation: phase0.Attestation): AggregateFast {
async function attestationToAggregate(attestation: phase0.Attestation): Promise<AggregateFast> {
return {
data: attestation.data,
// clone because it will be mutated
aggregationBits: attestation.aggregationBits.clone(),
signature: signatureFromBytesNoCheck(attestation.signature),
signature: await signatureFromBytesNoCheck(attestation.signature),
};
}

Expand All @@ -218,6 +220,6 @@ function fastToAttestation(aggFast: AggregateFast): phase0.Attestation {
return {
data: aggFast.data,
aggregationBits: aggFast.aggregationBits,
signature: aggFast.signature.toBytes(PointFormat.compressed),
signature: aggFast.signature.serialize(true),
};
}