From fcab224a9ec79799ec7067e94a5e567ff91b83d4 Mon Sep 17 00:00:00 2001 From: Piotr Roslaniec Date: Thu, 24 Nov 2022 13:43:51 +0100 Subject: [PATCH 01/28] initial work on simple threshold decryption --- tpke-wasm/benches/benchmarks.rs | 8 ++++++-- tpke-wasm/src/lib.rs | 2 +- tpke/src/combine.rs | 36 +++++++++++++++++++++++++++++++++ tpke/src/decryption.rs | 3 +++ 4 files changed, 46 insertions(+), 3 deletions(-) diff --git a/tpke-wasm/benches/benchmarks.rs b/tpke-wasm/benches/benchmarks.rs index 470db5d6..8054ade7 100644 --- a/tpke-wasm/benches/benchmarks.rs +++ b/tpke-wasm/benches/benchmarks.rs @@ -3,7 +3,9 @@ use criterion::{black_box, criterion_group, criterion_main, Criterion}; pub fn bench_encrypt_combine(c: &mut Criterion) { use tpke_wasm::*; - fn bench_encrypt(num_shares: usize, threshold: usize) -> impl Fn() { + fn bench_encrypt( + num_shares: usize, + threshold: usize) -> impl Fn() { let message = "my-secret-message".as_bytes().to_vec(); let aad = "my-aad".as_bytes().to_vec(); let setup = Setup::new(threshold, num_shares); @@ -14,7 +16,9 @@ pub fn bench_encrypt_combine(c: &mut Criterion) { } } - fn bench_combine(num_shares: usize, threshold: usize) -> impl Fn() { + fn bench_combine( + num_shares: usize, + threshold: usize) -> impl Fn() { let message = "my-secret-message".as_bytes().to_vec(); let aad = "my-aad".as_bytes().to_vec(); let setup = Setup::new(threshold, num_shares); diff --git a/tpke-wasm/src/lib.rs b/tpke-wasm/src/lib.rs index 9f926ffe..bc187327 100644 --- a/tpke-wasm/src/lib.rs +++ b/tpke-wasm/src/lib.rs @@ -170,7 +170,7 @@ impl Setup { let mut rng = rand::thread_rng(); let (public_key, private_key, contexts) = - tpke::setup_fast::(threshold, shares_num, &mut rng); + tpke::setup_fast::(threshold, shares_num, &mut rng); let private_contexts = contexts .clone() .into_iter() diff --git a/tpke/src/combine.rs b/tpke/src/combine.rs index bc0b655c..e8ea970b 100644 --- a/tpke/src/combine.rs +++ b/tpke/src/combine.rs @@ -71,6 +71,42 @@ pub fn lagrange_basis_at( lagrange_coeffs } +pub fn prepare_combine_simple( + public_contexts: &[PublicDecryptionContext], + private_contexts: &[PrivateDecryptionContextSimple], +) -> Vec { + let mut lagrange_coeffs = vec![]; + + let shares = private_contexts + .iter() + .map(|priv_ctxt| { + let pub_ctxt = + &priv_ctxt.public_decryption_contexts[priv_ctxt.index]; + let x = pub_ctxt.domain[0]; // there's just one + // let y = context.private_key_share.private_key_shares[0]; // there's just one + // y = private_key_shares * b_inv + // why use b_inv here and not h^{-1}? revise this + // let y = pub_ctxt.blinded_key_shares.blinded_key_shares[0] + // .mul(priv_ctxt.b_inv); + // TODO: No idea why this works + let y = E::Fr::one(); + (x, y) + }) + .collect::>(); + + for (x_j, _) in shares.clone() { + let mut prod = E::Fr::one(); + for (x_m, _) in shares.clone() { + if x_j != x_m { + // x_i = 0 + prod *= (x_m) / (x_m - x_j); + } + } + lagrange_coeffs.push(prod); + } + lagrange_coeffs +} + pub fn share_combine_fast( shares: &[DecryptionShareFast], prepared_key_shares: &[E::G2Prepared], diff --git a/tpke/src/decryption.rs b/tpke/src/decryption.rs index b00f7379..06a2ce32 100644 --- a/tpke/src/decryption.rs +++ b/tpke/src/decryption.rs @@ -3,6 +3,9 @@ use crate::*; +use ark_ec::ProjectiveCurve; + + #[derive(Debug, Clone)] pub struct DecryptionShareFast { pub decrypter_index: usize, From bd5d74385017b80324c2c9d882f2f727ece3bef5 Mon Sep 17 00:00:00 2001 From: Piotr Roslaniec Date: Wed, 21 Dec 2022 11:05:42 +0100 Subject: [PATCH 02/28] calculate lagrange using private context --- tpke/src/combine.rs | 24 +++++++----------------- 1 file changed, 7 insertions(+), 17 deletions(-) diff --git a/tpke/src/combine.rs b/tpke/src/combine.rs index e8ea970b..81347a24 100644 --- a/tpke/src/combine.rs +++ b/tpke/src/combine.rs @@ -72,31 +72,21 @@ pub fn lagrange_basis_at( } pub fn prepare_combine_simple( - public_contexts: &[PublicDecryptionContext], private_contexts: &[PrivateDecryptionContextSimple], ) -> Vec { let mut lagrange_coeffs = vec![]; - let shares = private_contexts + let shares_x = private_contexts .iter() - .map(|priv_ctxt| { - let pub_ctxt = - &priv_ctxt.public_decryption_contexts[priv_ctxt.index]; - let x = pub_ctxt.domain[0]; // there's just one - // let y = context.private_key_share.private_key_shares[0]; // there's just one - // y = private_key_shares * b_inv - // why use b_inv here and not h^{-1}? revise this - // let y = pub_ctxt.blinded_key_shares.blinded_key_shares[0] - // .mul(priv_ctxt.b_inv); - // TODO: No idea why this works - let y = E::Fr::one(); - (x, y) - }) + .map(|ctxt| + // There's just one x in the domain: + ctxt.public_decryption_contexts[ctxt.index].domain[0] + ) .collect::>(); - for (x_j, _) in shares.clone() { + for x_j in shares_x.clone() { let mut prod = E::Fr::one(); - for (x_m, _) in shares.clone() { + for x_m in shares_x.clone() { if x_j != x_m { // x_i = 0 prod *= (x_m) / (x_m - x_j); From 0c6b9c560d934c3b4edab8866a4234461bc70274 Mon Sep 17 00:00:00 2001 From: Piotr Roslaniec Date: Wed, 21 Dec 2022 11:57:16 +0100 Subject: [PATCH 03/28] calculate lagrange using public context --- tpke/src/combine.rs | 21 +++++++-------------- 1 file changed, 7 insertions(+), 14 deletions(-) diff --git a/tpke/src/combine.rs b/tpke/src/combine.rs index 81347a24..a8fd7875 100644 --- a/tpke/src/combine.rs +++ b/tpke/src/combine.rs @@ -72,24 +72,17 @@ pub fn lagrange_basis_at( } pub fn prepare_combine_simple( - private_contexts: &[PrivateDecryptionContextSimple], + context: &[PublicDecryptionContextSimple], ) -> Vec { + let shares_x = &context.iter().map(|ctxt| ctxt.domain).collect::>(); + // Calculate lagrange coefficients using optimized formula, see https://en.wikipedia.org/wiki/Lagrange_polynomial#Optimal_algorithm let mut lagrange_coeffs = vec![]; - - let shares_x = private_contexts - .iter() - .map(|ctxt| - // There's just one x in the domain: - ctxt.public_decryption_contexts[ctxt.index].domain[0] - ) - .collect::>(); - - for x_j in shares_x.clone() { + for x_j in shares_x { let mut prod = E::Fr::one(); - for x_m in shares_x.clone() { + for x_m in shares_x { if x_j != x_m { - // x_i = 0 - prod *= (x_m) / (x_m - x_j); + // In this formula x_i = 0, hence numerator is x_m + prod *= (*x_m) / (*x_m - *x_j); } } lagrange_coeffs.push(prod); From 1b260cc97fabf263f88b2f0db1e0ff8cded3928d Mon Sep 17 00:00:00 2001 From: Piotr Roslaniec Date: Tue, 27 Dec 2022 17:05:40 +0100 Subject: [PATCH 04/28] wip --- ferveo/src/dkg/pv.rs | 2 +- ferveo/src/lib.rs | 121 +++++++++++++++++++++++++++++++++++++++++ ferveo/src/vss/pvss.rs | 34 ++++++++++-- 3 files changed, 151 insertions(+), 6 deletions(-) diff --git a/ferveo/src/dkg/pv.rs b/ferveo/src/dkg/pv.rs index 34ed7565..a8208150 100644 --- a/ferveo/src/dkg/pv.rs +++ b/ferveo/src/dkg/pv.rs @@ -272,7 +272,7 @@ pub(crate) mod test_common { ValidatorSet::new( (0..4) .map(|i| TendermintValidator { - power: i, + power: i, // TODO: Should set to 1 in order to force partitioning to give one share to each validator. Replace with 1 by reworking how partitioning works. address: format!("validator_{}", i), public_key: keypairs[i as usize].public(), }) diff --git a/ferveo/src/lib.rs b/ferveo/src/lib.rs index 6b9f509f..b3c35c16 100644 --- a/ferveo/src/lib.rs +++ b/ferveo/src/lib.rs @@ -4,6 +4,7 @@ pub mod msg; pub mod vss; pub mod primitives; +use itertools::izip; pub use primitives::*; use ferveo_common::Rng; @@ -30,3 +31,123 @@ use ark_ec::PairingEngine; use ark_ff::PrimeField; use measure_time::print_time; + +pub fn share_combine_simple( + shares: &[E::Fqk], + lagrange: &[E::Fr], + // prepared_key_shares: &[E::G2Affine], +) -> E::Fqk { + let mut product_of_shares = E::Fqk::one(); + + // Sum of C_i^{L_i} + for (c_i, alpha_i) in izip!(shares.iter(), lagrange.iter()) { + // Exponentiation by alpha_i + let ss = c_i.pow(alpha_i.into_repr()); + product_of_shares *= ss; + } + + product_of_shares +} + +#[cfg(test)] +mod test_dkg_full { + use super::*; + + use crate::dkg::pv::test_common::*; + use ark_bls12_381::Bls12_381 as EllipticCurve; + use ark_ff::UniformRand; + use ferveo_common::{TendermintValidator, ValidatorSet}; + use group_threshold_cryptography as tpke; + + type E = ark_bls12_381::Bls12_381; + + /// Test happy flow for a full DKG with simple threshold decryption variant + #[test] + fn test_dkg_simple_decryption_variant() { + // + // The following is copied from other tests + // + + let rng = &mut ark_std::test_rng(); + let dkg = setup_dealt_dkg(); + let aggregate = aggregate(&dkg); + // check that a polynomial of the correct degree was created + assert_eq!(aggregate.coeffs.len(), 5); + // check that the correct number of shares were created + assert_eq!(aggregate.shares.len(), 4); + // check that the optimistic verify returns true + assert!(aggregate.verify_optimistic()); + // check that the full verify returns true + assert!(aggregate.verify_full(&dkg, rng)); + // check that the verification of aggregation passes + assert_eq!( + aggregate + .verify_aggregation(&dkg, rng) + .expect("Test failed"), + 6 + ); + + // + // Now, we start the actual test + // + + // At this point, we have a DKG that has been dealt and aggregated + // We now want to test the decryption of a message + + // First, we encrypt a message using a DKG public key + + let msg: &[u8] = "abc".as_bytes(); + let aad: &[u8] = "my-aad".as_bytes(); + let public_key = dkg.final_key(); + let ciphertext = tpke::encrypt::<_, E>(msg, aad, &public_key, rng); + + // TODO: Update test utils so that we can easily get a validator keypair for each validator + let validator_keypairs = gen_keypairs(); + + // TODO: Check ciphertext validity, https://nikkolasg.github.io/ferveo/tpke.html#to-validate-ciphertext-for-ind-cca2-security + + // Each validator computes a decryption share + let decryption_shares = validator_keypairs.iter().map(|keypair| { + // let decryption_shares = aggregate + let decryption_shares = aggregate + .shares[0] + .iter() + .map(|share| { + // TODO: In simple decryption variant, we only have one share per validator + // assert_eq!(z_i.len(), 1); + let z_i = share.mul(keypair.decryption_key); + + // Validator decryption of private key shares, https://nikkolasg.github.io/ferveo/pvss.html#validator-decryption-of-private-key-shares + let u = ciphertext.commitment; + let c_i = E::pairing(u, z_i); + c_i + }) + .collect::>(); + + // TODO: In simple decryption variant, we only have one share per validator + // assert_eq!(decryption_shares.len(), 1); + // decryption_shares[0] + decryption_shares + }); + + + // let s = share_combine_simple::(&aggregate.shares, &aggregate.coeffs); + + /* + TODO: This variant seems to be outdated/unused in simple threshold decryption variant + + // Following section 4.4.8 of the paper, we need to compute the following: + let decryption_shares = validator_keypairs.iter().map(|validator| { + // TODO: Check the validity of (U, W) + + // Compute the decryption share D_{i,j} = [dk_j^{-1}]*U_i + // We only have one U in this case + let u = ciphertext.commitment; + let dk_j = validator.decryption_key; + let dk_j_inv = dk_j.inverse().unwrap(); + let d_ij = u.mul(dk_j_inv); + d_ij + }); + */ + } +} diff --git a/ferveo/src/vss/pvss.rs b/ferveo/src/vss/pvss.rs index 451caae3..9c8a79b3 100644 --- a/ferveo/src/vss/pvss.rs +++ b/ferveo/src/vss/pvss.rs @@ -42,7 +42,7 @@ pub struct PubliclyVerifiableParams { /// 2/3 the total), this will be aggregated into a final key #[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug)] pub struct PubliclyVerifiableSS { - /// Feldman commitment to the VSS polynomial, F = g^{\phi} + /// Used in Feldman commitment to the VSS polynomial, F = g^{\phi} pub coeffs: Vec, /// The shares to be dealt to each validator @@ -66,11 +66,13 @@ impl PubliclyVerifiableSS { dkg: &PubliclyVerifiableDkg, rng: &mut R, ) -> Result { + // Our random polynomial, \phi(x) = s + \sum_{i=1}^{t-1} a_i x^i let mut phi = DensePolynomial::::rand( (dkg.params.total_weight - dkg.params.security_threshold) as usize, rng, ); - phi.coeffs[0] = *s; + phi.coeffs[0] = *s; // setting the first coefficient to secret value + // Evaluations of the polynomial over the domain let evals = phi.evaluate_over_domain_by_ref(dkg.domain); // commitment to coeffs, F_i let coeffs = fast_multiexp(&phi.coeffs, dkg.pvss_params.g); @@ -92,7 +94,10 @@ impl PubliclyVerifiableSS { )); } //phi.zeroize(); // TODO zeroize? + // TODO: Cross check proof of knowledge check with the whitepaper; this check proves that there is a relationship between the secret and the pvss transcript + // Sigma is a proof of knowledge of the secret, sigma = h^s let sigma = E::G2Affine::prime_subgroup_generator().mul(*s).into(); //todo hash to curve + // So at this point, we have a commitment to the polynomial, a number of shares, and a proof of knowledge let vss = Self { coeffs, shares, @@ -106,10 +111,15 @@ impl PubliclyVerifiableSS { /// i.e. we optimistically do not check the commitment. This is deferred /// until the aggregation step pub fn verify_optimistic(&self) -> bool { + // We're only checking the proof of knowledge here, sigma ?= h^s + // "Does the first coefficient of the secret polynomial match the proof of knowledge?" E::pairing( - self.coeffs[0].into_projective(), - E::G2Affine::prime_subgroup_generator(), - ) == E::pairing(E::G1Affine::prime_subgroup_generator(), self.sigma) + self.coeffs[0].into_projective(), // F_0 = g^s + E::G2Affine::prime_subgroup_generator(), // h + ) == E::pairing( + E::G1Affine::prime_subgroup_generator(), // g + self.sigma // h^s + ) } /// Part of checking the validity of an aggregated PVSS transcript @@ -127,8 +137,11 @@ impl PubliclyVerifiableSS { print_time!("commitment fft"); dkg.domain.fft_in_place(&mut commitment); + // Each validator checks that their share is correct dkg.validators.iter().zip(self.shares.iter()).all( |(validator, shares)| { + // ek is the public key of the validator + // TODO: Is that the ek = [dk]H key? let ek = validator .validator .public_key @@ -138,14 +151,19 @@ impl PubliclyVerifiableSS { let mut powers_of_alpha = alpha; let mut y = E::G2Projective::zero(); let mut a = E::G1Projective::zero(); + // Validator checks checks aggregated shares against commitment for (y_i, a_i) in shares.iter().zip_eq( commitment[validator.share_start..validator.share_end] .iter(), ) { + // We iterate over shares (y_i) and commitment (a_i) + // TODO: Check #3 is missing + // See #3 in 4.2.3 section of https://eprint.iacr.org/2022/898.pdf y += y_i.mul(powers_of_alpha.into_repr()); a += a_i.mul(powers_of_alpha.into_repr()); powers_of_alpha *= alpha; } + // See #4 in 4.2.3 section of https://eprint.iacr.org/2022/898.pdf // Y = \sum_i y_i \alpha^i // A = \sum_i a_i \alpha^i // e(G,Y) = e(A, ek) @@ -168,6 +186,8 @@ impl PubliclyVerifiableSS { ) -> Result { print_time!("PVSS verify_aggregation"); self.verify_full(dkg, rng); + // Now, we verify that the aggregated PVSS transcript is a valid aggregation + // If it is, we return the total weights of the PVSS transcripts let mut y = E::G1Projective::zero(); let mut weight = 0u32; for (dealer, pvss) in dkg.vss.iter() { @@ -200,6 +220,10 @@ pub fn aggregate( .iter() .map(|a| batch_to_projective(a)) .collect::>(); + + // So now we're iterating over the PVSS instances, and adding their coefficients and shares, and their sigma + // sigma is the sum of all the sigma_i, which is the proof of knowledge of the secret polynomial + // Aggregating is just adding the corresponding values in pvss instances, so pvss = pvss + pvss_j for (_, next) in pvss_iter { sigma = sigma.add(next.sigma); coeffs From efa6150f3aa07e262290392f41dfa37c83a7a4a4 Mon Sep 17 00:00:00 2001 From: Piotr Roslaniec Date: Wed, 28 Dec 2022 17:15:00 +0100 Subject: [PATCH 05/28] incorrect length of decrypted shares after pvss combination --- ferveo/src/dkg/pv.rs | 12 ++- ferveo/src/lib.rs | 125 ++++++++++++++++++++++++-------- ferveo/src/vss/pvss.rs | 65 +++++++++++++++-- tpke-wasm/benches/benchmarks.rs | 8 +- tpke-wasm/src/lib.rs | 2 +- tpke/src/decryption.rs | 1 - 6 files changed, 168 insertions(+), 45 deletions(-) diff --git a/ferveo/src/dkg/pv.rs b/ferveo/src/dkg/pv.rs index a8208150..58c01817 100644 --- a/ferveo/src/dkg/pv.rs +++ b/ferveo/src/dkg/pv.rs @@ -305,10 +305,12 @@ pub(crate) mod test_common { /// /// The correctness of this function is tested in the module [`test_dealing`] pub fn setup_dealt_dkg() -> PubliclyVerifiableDkg { + let n = 4; let rng = &mut ark_std::test_rng(); // gather everyone's transcripts let mut transcripts = vec![]; - for i in 0..4 { + for i in 0..n { + // All of the dkg instances have the same validators let mut dkg = setup_dkg(i); transcripts.push(dkg.share(rng).expect("Test failed")); } @@ -317,11 +319,17 @@ pub(crate) mod test_common { // iterate over transcripts from lowest weight to highest for (sender, pvss) in transcripts.into_iter().rev().enumerate() { dkg.apply_message( - dkg.validators[3 - sender].validator.clone(), + dkg.validators[n - 1 - sender].validator.clone(), pvss, ) .expect("Setup failed"); } + // At this point, the dkg should contain n transcripts, each containing n shares + // TODO: Remove this check + assert_eq!(dkg.vss.len(), n); + for i in 0..n { + assert_eq!(dkg.vss[&(i as u32)].shares.len(), n); + } dkg } } diff --git a/ferveo/src/lib.rs b/ferveo/src/lib.rs index b3c35c16..953eeab2 100644 --- a/ferveo/src/lib.rs +++ b/ferveo/src/lib.rs @@ -1,10 +1,12 @@ #![allow(unused_imports)] + pub mod dkg; pub mod msg; pub mod vss; pub mod primitives; -use itertools::izip; + +use itertools::{izip, zip_eq}; pub use primitives::*; use ferveo_common::Rng; @@ -32,15 +34,33 @@ use ark_ff::PrimeField; use measure_time::print_time; +pub fn prepare_combine_simple( + shares_x: &[E::Fr], +) -> Vec { + // Calculate lagrange coefficients using optimized formula, see https://en.wikipedia.org/wiki/Lagrange_polynomial#Optimal_algorithm + let mut lagrange_coeffs = vec![]; + for x_j in shares_x { + let mut prod = E::Fr::one(); + for x_m in shares_x { + if x_j != x_m { + // In this formula x_i = 0, hence numerator is x_m + prod *= (*x_m) / (*x_m - *x_j); + } + } + lagrange_coeffs.push(prod); + } + lagrange_coeffs +} + pub fn share_combine_simple( - shares: &[E::Fqk], - lagrange: &[E::Fr], + shares: &Vec, + lagrange_coeffs: &Vec, // prepared_key_shares: &[E::G2Affine], ) -> E::Fqk { let mut product_of_shares = E::Fqk::one(); - // Sum of C_i^{L_i} - for (c_i, alpha_i) in izip!(shares.iter(), lagrange.iter()) { + // Sum of C_i^{L_i}z + for (c_i, alpha_i) in zip_eq(shares.iter(), lagrange_coeffs.iter()) { // Exponentiation by alpha_i let ss = c_i.pow(alpha_i.into_repr()); product_of_shares *= ss; @@ -58,6 +78,7 @@ mod test_dkg_full { use ark_ff::UniformRand; use ferveo_common::{TendermintValidator, ValidatorSet}; use group_threshold_cryptography as tpke; + use itertools::{zip_eq, Itertools}; type E = ark_bls12_381::Bls12_381; @@ -106,32 +127,76 @@ mod test_dkg_full { // TODO: Check ciphertext validity, https://nikkolasg.github.io/ferveo/tpke.html#to-validate-ciphertext-for-ind-cca2-security - // Each validator computes a decryption share - let decryption_shares = validator_keypairs.iter().map(|keypair| { - // let decryption_shares = aggregate - let decryption_shares = aggregate - .shares[0] - .iter() - .map(|share| { - // TODO: In simple decryption variant, we only have one share per validator - // assert_eq!(z_i.len(), 1); - let z_i = share.mul(keypair.decryption_key); - - // Validator decryption of private key shares, https://nikkolasg.github.io/ferveo/pvss.html#validator-decryption-of-private-key-shares - let u = ciphertext.commitment; - let c_i = E::pairing(u, z_i); - c_i - }) - .collect::>(); - - // TODO: In simple decryption variant, we only have one share per validator - // assert_eq!(decryption_shares.len(), 1); - // decryption_shares[0] - decryption_shares - }); - + // - // let s = share_combine_simple::(&aggregate.shares, &aggregate.coeffs); + // Each validator attempts to aggregate and decrypt the secret shares + // let decryption_shares = validator_keypairs + validator_keypairs + .iter() + .enumerate() + // Assuming that the ordering of the validator keypairs is the same as the ordering of the validators in the validator set + // TODO: Check this assumption + .for_each(|(validator_i, keypair)| { + let decrypted_shares: Vec> = + shares_for_validator(validator_i, &dkg) + .iter() + // Each "share" the validator has is actually a vector of shares + // This because of domain partitioning - the amount of shares is the same as the validator's "power" + .map(|share| { + // Decrypt the share by decrypting each of the G2 elements within ShareEncryptions + share + .iter() + .map(|s| s.mul(keypair.decryption_key)) + .collect() + }) + .collect(); + + let combined_shares = decrypted_shares.iter().fold( + vec![ + ark_bls12_381::G2Projective::zero(); + decrypted_shares[0].len() + ], + |acc, share| { + zip_eq(acc, share).map(|(a, b)| a + b).collect() + }, + ); + + let decryption_shares = combined_shares + .iter() + .map(|z_i| { + // Validator decryption of private key shares, https://nikkolasg.github.io/ferveo/pvss.html#validator-decryption-of-private-key-shares + let u = ciphertext.commitment; + let c_i = E::pairing(u, *z_i); + c_i + }) + .collect::>(); + + let shares_x = &dkg.domain.elements().collect::>(); + let lagrange_coeffs = prepare_combine_simple::(&shares_x); + + let s = + share_combine_simple::(&decryption_shares, &lagrange_coeffs); + + let plaintext = + tpke::checked_decrypt_with_shared_secret(&ciphertext, aad, &s); + assert_eq!(plaintext, msg); + }); + + // TODO: Perform decryption here! + + // For every validator, we're collecting all the decryption shares from all of the PVSS transcripts + // .flatten() + // .collect(); + + // let shares_x = &dkg.domain.elements().collect::>(); + // let lagrange_coeffs = prepare_combine_simple::(&shares_x); + // + // let s = + // share_combine_simple::(&decryption_shares, &lagrange_coeffs); + // + // let plaintext = + // tpke::checked_decrypt_with_shared_secret(&ciphertext, aad, &s); + // assert_eq!(plaintext, msg); /* TODO: This variant seems to be outdated/unused in simple threshold decryption variant diff --git a/ferveo/src/vss/pvss.rs b/ferveo/src/vss/pvss.rs index 9c8a79b3..fb8f552c 100644 --- a/ferveo/src/vss/pvss.rs +++ b/ferveo/src/vss/pvss.rs @@ -8,20 +8,23 @@ use ark_ec::PairingEngine; use ark_ff::UniformRand; use ark_serialize::*; use ferveo_common::PublicKey; -use itertools::Itertools; +use itertools::{zip_eq, Itertools}; use subproductdomain::fast_multiexp; /// These are the blinded evaluations of weight shares of a single random polynomial pub type ShareEncryptions = Vec<::G2Affine>; + /// Marker struct for unaggregated PVSS transcripts #[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug)] pub struct Unaggregated; + /// Marker struct for aggregated PVSS transcripts #[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug)] pub struct Aggregated; /// Trait gate used to add extra methods to aggregated PVSS transcripts pub trait Aggregate {} + /// Apply trait gate to Aggregated marker struct impl Aggregate for Aggregated {} @@ -72,7 +75,7 @@ impl PubliclyVerifiableSS { rng, ); phi.coeffs[0] = *s; // setting the first coefficient to secret value - // Evaluations of the polynomial over the domain + // Evaluations of the polynomial over the domain let evals = phi.evaluate_over_domain_by_ref(dkg.domain); // commitment to coeffs, F_i let coeffs = fast_multiexp(&phi.coeffs, dkg.pvss_params.g); @@ -97,7 +100,7 @@ impl PubliclyVerifiableSS { // TODO: Cross check proof of knowledge check with the whitepaper; this check proves that there is a relationship between the secret and the pvss transcript // Sigma is a proof of knowledge of the secret, sigma = h^s let sigma = E::G2Affine::prime_subgroup_generator().mul(*s).into(); //todo hash to curve - // So at this point, we have a commitment to the polynomial, a number of shares, and a proof of knowledge + // So at this point, we have a commitment to the polynomial, a number of shares, and a proof of knowledge let vss = Self { coeffs, shares, @@ -117,8 +120,8 @@ impl PubliclyVerifiableSS { self.coeffs[0].into_projective(), // F_0 = g^s E::G2Affine::prime_subgroup_generator(), // h ) == E::pairing( - E::G1Affine::prime_subgroup_generator(), // g - self.sigma // h^s + E::G1Affine::prime_subgroup_generator(), // g + self.sigma, // h^s ) } @@ -252,6 +255,57 @@ pub fn aggregate( } } +// pub fn aggregate_for_decryption( +// dkg: &PubliclyVerifiableDkg, +// ) -> ShareEncryptions { +// let aggregate = dkg +// .vss +// .iter() +// .map(|(_, pvss)| { +// assert_eq!(dkg.validators.len(), pvss.shares.len()); +// +// let shares = pvss +// .shares +// .iter() +// .map(|a| batch_to_projective(a)) +// .collect::>(); +// +// // Combine PVSS transcripts into a share aggregate +// let mut share_iter = shares.iter(); +// let first_share = share_iter.next().unwrap(); +// share_iter +// .fold(first_share, |acc, share| { +// &zip_eq(acc, share) +// .map(|(a, b)| *a + *b) +// .collect::>() +// }) +// .iter() +// .map(|a| a.into_affine()) +// .collect::>() +// }) +// .collect::>>(); +// +// E::G2Projective::batch_normalization_into_affine(&aggregate) +// } + +/// Returns ShareEncryptions from DKG PVSS transcripts for a selected validator +pub fn shares_for_validator( + validator: usize, + dkg: &PubliclyVerifiableDkg, +) -> Vec> { + // DKG contains multiple PVSS transcripts, one for each dealer + dkg.vss + .iter() + .map(|(_, pvss)| { + // Each PVSS transcript contains multiple shares, one for each validator + assert_eq!(dkg.validators.len(), pvss.shares.len()); + pvss.shares[validator].clone() + }) + // Each validator has a share from each PVSS transcript + // One share is represented by ShareEncryptions, which is a vector of G2 points + .collect::>>() +} + #[cfg(test)] mod test_pvss { use super::*; @@ -260,6 +314,7 @@ mod test_pvss { use ark_bls12_381::Bls12_381 as EllipticCurve; use ark_ff::UniformRand; use ferveo_common::{TendermintValidator, ValidatorSet}; + type Fr = ::Fr; type G1 = ::G1Affine; type G2 = ::G2Affine; diff --git a/tpke-wasm/benches/benchmarks.rs b/tpke-wasm/benches/benchmarks.rs index 8054ade7..470db5d6 100644 --- a/tpke-wasm/benches/benchmarks.rs +++ b/tpke-wasm/benches/benchmarks.rs @@ -3,9 +3,7 @@ use criterion::{black_box, criterion_group, criterion_main, Criterion}; pub fn bench_encrypt_combine(c: &mut Criterion) { use tpke_wasm::*; - fn bench_encrypt( - num_shares: usize, - threshold: usize) -> impl Fn() { + fn bench_encrypt(num_shares: usize, threshold: usize) -> impl Fn() { let message = "my-secret-message".as_bytes().to_vec(); let aad = "my-aad".as_bytes().to_vec(); let setup = Setup::new(threshold, num_shares); @@ -16,9 +14,7 @@ pub fn bench_encrypt_combine(c: &mut Criterion) { } } - fn bench_combine( - num_shares: usize, - threshold: usize) -> impl Fn() { + fn bench_combine(num_shares: usize, threshold: usize) -> impl Fn() { let message = "my-secret-message".as_bytes().to_vec(); let aad = "my-aad".as_bytes().to_vec(); let setup = Setup::new(threshold, num_shares); diff --git a/tpke-wasm/src/lib.rs b/tpke-wasm/src/lib.rs index bc187327..9f926ffe 100644 --- a/tpke-wasm/src/lib.rs +++ b/tpke-wasm/src/lib.rs @@ -170,7 +170,7 @@ impl Setup { let mut rng = rand::thread_rng(); let (public_key, private_key, contexts) = - tpke::setup_fast::(threshold, shares_num, &mut rng); + tpke::setup_fast::(threshold, shares_num, &mut rng); let private_contexts = contexts .clone() .into_iter() diff --git a/tpke/src/decryption.rs b/tpke/src/decryption.rs index 06a2ce32..910ff2d8 100644 --- a/tpke/src/decryption.rs +++ b/tpke/src/decryption.rs @@ -5,7 +5,6 @@ use crate::*; use ark_ec::ProjectiveCurve; - #[derive(Debug, Clone)] pub struct DecryptionShareFast { pub decrypter_index: usize, From ab2857d7d30627753ca2ae2a3550284d73d56fec Mon Sep 17 00:00:00 2001 From: Piotr Roslaniec Date: Thu, 29 Dec 2022 12:29:27 +0100 Subject: [PATCH 06/28] initial removal of share partitioning --- ferveo-common/src/lib.rs | 17 +--- ferveo/benches/benchmarks/pvdkg.rs | 2 +- ferveo/examples/pvdkg.rs | 2 +- ferveo/src/dkg.rs | 20 ++--- ferveo/src/dkg/common.rs | 62 +++----------- ferveo/src/dkg/pv.rs | 128 +++++++++-------------------- ferveo/src/lib.rs | 3 +- ferveo/src/vss/pvss.rs | 28 ++++--- 8 files changed, 79 insertions(+), 183 deletions(-) diff --git a/ferveo-common/src/lib.rs b/ferveo-common/src/lib.rs index b4c32651..e443e876 100644 --- a/ferveo-common/src/lib.rs +++ b/ferveo-common/src/lib.rs @@ -64,24 +64,13 @@ impl ValidatorSet { #[derive(Clone, Debug, CanonicalSerialize, CanonicalDeserialize)] pub struct Validator { pub validator: TendermintValidator, - pub weight: u32, - pub share_start: usize, - pub share_end: usize, + pub share_index: usize, } impl PartialEq for Validator { fn eq(&self, other: &Self) -> bool { - ( - &self.validator, - self.weight, - self.share_start, - self.share_end, - ) == ( - &other.validator, - other.weight, - other.share_start, - other.share_end, - ) + (&self.validator, self.share_index) + == (&other.validator, other.share_index) } } diff --git a/ferveo/benches/benchmarks/pvdkg.rs b/ferveo/benches/benchmarks/pvdkg.rs index 02df0341..67803331 100644 --- a/ferveo/benches/benchmarks/pvdkg.rs +++ b/ferveo/benches/benchmarks/pvdkg.rs @@ -51,7 +51,7 @@ pub fn gen_validators( ValidatorSet::new( (0..keypairs.len()) .map(|i| TendermintValidator { - power: i as u64, + power: 1,// TODO: Remove it. //i as u64, address: format!("validator_{}", i), public_key: keypairs[i].public(), }) diff --git a/ferveo/examples/pvdkg.rs b/ferveo/examples/pvdkg.rs index c1dcf071..b741e622 100644 --- a/ferveo/examples/pvdkg.rs +++ b/ferveo/examples/pvdkg.rs @@ -47,7 +47,7 @@ pub fn setup_dkg( Params { tau: 0, security_threshold: shares / 3, - total_weight: shares, + shares_num: shares, retry_after: 1, }, me, diff --git a/ferveo/src/dkg.rs b/ferveo/src/dkg.rs index daba9172..c395ff66 100644 --- a/ferveo/src/dkg.rs +++ b/ferveo/src/dkg.rs @@ -23,9 +23,9 @@ pub use pv::*; #[derive(Copy, Clone, Debug, CanonicalSerialize, CanonicalDeserialize)] pub struct Params { pub tau: u64, - pub security_threshold: u32, // threshold - pub total_weight: u32, // total weight - pub retry_after: u32, + pub security_threshold: u32, + pub shares_num: u32, + pub retry_after: u32, // TODO: Remove. Not relevant in our scheme. } #[derive(Clone, Debug, Eq, PartialEq)] @@ -36,7 +36,7 @@ pub enum PvssScheduler { #[derive(Debug, Clone)] pub enum DkgState { - Sharing { accumulated_weight: u32, block: u32 }, + Sharing { accumulated_shares: u32, block: u32 }, Dealt, Success { final_key: E::G1Affine }, Invalid, @@ -50,12 +50,12 @@ impl CanonicalSerialize for DkgState { ) -> Result<(), SerializationError> { match self { Self::Sharing { - accumulated_weight, + accumulated_shares, block, } => { CanonicalSerialize::serialize(&0u8, &mut writer)?; CanonicalSerialize::serialize( - &(*accumulated_weight, *block), + &(*accumulated_shares, *block), &mut writer, ) } @@ -72,11 +72,11 @@ impl CanonicalSerialize for DkgState { fn serialized_size(&self) -> usize { match self { Self::Sharing { - accumulated_weight, + accumulated_shares, block, } => { 0u8.serialized_size() - + (*accumulated_weight, *block).serialized_size() + + (*accumulated_shares, *block).serialized_size() } Self::Dealt => 1u8.serialized_size(), Self::Success { final_key } => { @@ -93,12 +93,12 @@ impl CanonicalDeserialize for DkgState { let variant = ::deserialize(&mut reader)?; match variant { 0 => { - let (accumulated_weight, block) = + let (accumulated_shares, block) = <(u32, u32) as CanonicalDeserialize>::deserialize( &mut reader, )?; Ok(Self::Sharing { - accumulated_weight, + accumulated_shares, block, }) } diff --git a/ferveo/src/dkg/common.rs b/ferveo/src/dkg/common.rs index b69c314f..55d65abb 100644 --- a/ferveo/src/dkg/common.rs +++ b/ferveo/src/dkg/common.rs @@ -2,58 +2,16 @@ use crate::*; use ferveo_common::ValidatorSet; use itertools::izip; -/// partition_domain takes as input a vector of validators from -/// participants in the DKG, containing their total stake amounts -/// and public address (as Bech32m string) -/// -/// The validators are *assumed to be* stable-sorted by staking weight -/// (so highest weight participants come first), then by address -/// and the DKG share domain is partitioned into continuous segments roughly -/// the same relative size as the staked weight. -/// -/// partition_domain returns a vector of DKG participants -pub fn partition_domain( - params: &Params, - mut validator_set: ValidatorSet, -) -> Result>> { - // Sort participants from greatest to least stake - - // Compute the total amount staked - let total_voting_power = - params.total_weight as f64 / validator_set.total_voting_power() as f64; - - // Compute the weight of each participant rounded down - let mut weights = validator_set +pub fn make_validators( + validator_set: ValidatorSet, +) -> Vec> { + validator_set .validators .iter() - .map(|p| (p.power as f64 * total_voting_power).floor() as u32) - .collect::>(); - - // Add any excess weight to the largest weight participants - let adjust_weight = params - .total_weight - .checked_sub(weights.iter().sum()) - .ok_or_else(|| anyhow!("adjusted weight negative"))? - as usize; - for i in &mut weights[0..adjust_weight] { - *i += 1; - } - - let mut allocated_weight = 0usize; - let mut participants = vec![]; - // note that the order of `participants` corresponds to the same - // order as `validator_set` - for (ix, validator) in validator_set.validators.drain(0..).enumerate() { - participants.push(ferveo_common::Validator:: { - validator, - weight: weights[ix], - share_start: allocated_weight, - share_end: allocated_weight + weights[ix] as usize, - }); - allocated_weight = - allocated_weight - .checked_add(weights[ix] as usize) - .ok_or_else(|| anyhow!("allocated weight overflow"))?; - } - Ok(participants) + .enumerate() + .map(|(index, validator)| ferveo_common::Validator:: { + validator: validator.clone(), + share_index: index, + }) + .collect() } diff --git a/ferveo/src/dkg/pv.rs b/ferveo/src/dkg/pv.rs index 58c01817..00550130 100644 --- a/ferveo/src/dkg/pv.rs +++ b/ferveo/src/dkg/pv.rs @@ -37,7 +37,7 @@ impl PubliclyVerifiableDkg { ) -> Result { use ark_std::UniformRand; let domain = ark_poly::Radix2EvaluationDomain::::new( - params.total_weight as usize, + params.shares_num as usize, ) .ok_or_else(|| anyhow!("unable to construct domain"))?; @@ -47,8 +47,9 @@ impl PubliclyVerifiableDkg { .binary_search_by(|probe| me.cmp(probe)) .map_err(|_| anyhow!("could not find this validator in the provided validator set"))?; - // partition out weight shares of validators based on their voting power - let validators = partition_domain(¶ms, validator_set)?; + // partition out shares shares of validators based on their voting power + let validators = make_validators(validator_set); + // we further partition out valdiators into partitions to submit pvss transcripts // so as to minimize network load and enable retrying let my_partition = @@ -63,7 +64,7 @@ impl PubliclyVerifiableDkg { vss: BTreeMap::new(), domain, state: DkgState::Sharing { - accumulated_weight: 0, + accumulated_shares: 0, block: 0, }, me, @@ -153,7 +154,7 @@ impl PubliclyVerifiableDkg { match payload { Message::Deal(pvss) if matches!(self.state, DkgState::Sharing{..} | DkgState::Dealt) => { // TODO: If this is two slow, we can convert self.validators to - // an address keyed hashmap after partitioning the weight shares + // an address keyed hashmap after partitioning the shares shares // in the [`new`] method let sender = self.validators .binary_search_by(|probe| sender.cmp(&probe.validator)) @@ -167,13 +168,13 @@ impl PubliclyVerifiableDkg { } } Message::Aggregate(Aggregation{vss, final_key}) if matches!(self.state, DkgState::Dealt) => { - let minimum_weight = self.params.total_weight + let minimum_shares = self.params.shares_num - self.params.security_threshold; - let verified_weight = vss.verify_aggregation(self, rng)?; + let verified_shares = vss.verify_aggregation(self, rng)?; // we reject aggregations that fail to meet the security threshold - if verified_weight < minimum_weight { + if verified_shares < minimum_shares { Err( - anyhow!("Aggregation failed because the verified weight was insufficient") + anyhow!("Aggregation failed because the verified shares was insufficient") ) } else if &self.final_key() == final_key { Ok(()) @@ -203,12 +204,11 @@ impl PubliclyVerifiableDkg { .map_err(|_| anyhow!("dkg received unknown dealer"))?; self.vss.insert(sender as u32, pvss); - // we keep track of the amount of weight seen until the security + // we keep track of the amount of shares seen until the security // threshold is met. Then we may change the state of the DKG - if let DkgState::Sharing{ref mut accumulated_weight, ..} = &mut self.state { - *accumulated_weight += self.validators[sender].weight; - if *accumulated_weight - >= self.params.total_weight - self.params.security_threshold { + if let DkgState::Sharing{ref mut accumulated_shares, ..} = &mut self.state { + *accumulated_shares += 1; + if *accumulated_shares >= self.params.shares_num - self.params.security_threshold { self.state = DkgState::Dealt; } } @@ -272,7 +272,7 @@ pub(crate) mod test_common { ValidatorSet::new( (0..4) .map(|i| TendermintValidator { - power: i, // TODO: Should set to 1 in order to force partitioning to give one share to each validator. Replace with 1 by reworking how partitioning works. + power: 1, // TODO: Should set to 1 in order to force partitioning to give one share to each validator. Replace with 1 by reworking how partitioning works. address: format!("validator_{}", i), public_key: keypairs[i as usize].public(), }) @@ -292,7 +292,7 @@ pub(crate) mod test_common { Params { tau: 0, security_threshold: 2, - total_weight: 6, + shares_num: 6, retry_after: 2, }, me, @@ -316,7 +316,7 @@ pub(crate) mod test_common { } // our test dkg let mut dkg = setup_dkg(0); - // iterate over transcripts from lowest weight to highest + // iterate over transcripts from lowest shares to highest for (sender, pvss) in transcripts.into_iter().rev().enumerate() { dkg.apply_message( dkg.validators[n - 1 - sender].validator.clone(), @@ -338,60 +338,6 @@ pub(crate) mod test_common { #[cfg(test)] mod test_dkg_init { use super::test_common::*; - - /// Test that validators are correctly sorted - #[test] - fn test_validator_set() { - let rng = &mut ark_std::test_rng(); - let validators = vec![ - TendermintValidator:: { - power: 0, - address: "validator_0".into(), - public_key: ferveo_common::Keypair::::new(rng) - .public(), - }, - TendermintValidator:: { - power: 2, - address: "validator_1".into(), - public_key: ferveo_common::Keypair::::new(rng) - .public(), - }, - TendermintValidator:: { - power: 2, - address: "validator_2".into(), - public_key: ferveo_common::Keypair::::new(rng) - .public(), - }, - TendermintValidator:: { - power: 1, - address: "validator_3".into(), - public_key: ferveo_common::Keypair::::new(rng) - .public(), - }, - ]; - let expected = vec![ - validators[2].clone(), - validators[1].clone(), - validators[3].clone(), - validators[0].clone(), - ]; - let validator_set = ValidatorSet::new(validators); - assert_eq!(validator_set.validators, expected); - let params = Params { - tau: 0, - security_threshold: 2, - total_weight: 6, - retry_after: 2, - }; - let validator_set: Vec> = - partition_domain(¶ms, validator_set) - .expect("Test failed") - .iter() - .map(|v| v.validator.clone()) - .collect(); - assert_eq!(validator_set, expected); - } - /// Test that dkg fails to start if the `me` input /// is not in the validator set #[test] @@ -404,7 +350,7 @@ mod test_dkg_init { Params { tau: 0, security_threshold: 4, - total_weight: 6, + shares_num: 6, retry_after: 2, }, TendermintValidator:: { @@ -452,7 +398,7 @@ mod test_dealing { } // our test dkg let mut dkg = setup_dkg(0); - // iterate over transcripts from lowest weight to highest + // iterate over transcripts from lowest shares to highest let mut expected = 0u32; for (sender, pvss) in transcripts.into_iter().rev().enumerate() { // check the verification passes @@ -470,19 +416,19 @@ mod test_dealing { pvss ) .is_ok()); - expected += dkg.validators[3 - sender].validator.power as u32; + expected += 1; // dkg.validators[3 - sender].validator.power as u32; if sender < 3 { - // check that weight accumulates correctly + // check that shares accumulates correctly match dkg.state { DkgState::Sharing { - accumulated_weight, .. + accumulated_shares, .. } => { - assert_eq!(accumulated_weight, expected) + assert_eq!(accumulated_shares, expected) } _ => panic!("Test failed"), } } else { - // check that when enough weight is accumulated, we transition state + // check that when enough shares is accumulated, we transition state assert!(matches!(dkg.state, DkgState::Dealt)); } } @@ -498,7 +444,7 @@ mod test_dealing { assert!(matches!( dkg.state, DkgState::Sharing { - accumulated_weight: 0, + accumulated_shares: 0, block: 0 } )); @@ -517,7 +463,7 @@ mod test_dealing { assert!(matches!( dkg.state, DkgState::Sharing { - accumulated_weight: 0, + accumulated_shares: 0, block: 0, } )); @@ -532,7 +478,7 @@ mod test_dealing { assert!(matches!( dkg.state, DkgState::Sharing { - accumulated_weight: 0, + accumulated_shares: 0, block: 0, } )); @@ -546,7 +492,7 @@ mod test_dealing { assert!(matches!( dkg.state, DkgState::Sharing { - accumulated_weight: 0, + accumulated_shares: 1, block: 0, } )); @@ -563,7 +509,7 @@ mod test_dealing { assert!(matches!( dkg.state, DkgState::Sharing { - accumulated_weight: 0, + accumulated_shares: 0, block: 0, } )); @@ -572,7 +518,7 @@ mod test_dealing { assert!(matches!( dkg.state, DkgState::Sharing { - accumulated_weight: 0, + accumulated_shares: 0, block: 0, } )); @@ -584,7 +530,7 @@ mod test_dealing { assert!(matches!( dkg.state, DkgState::Sharing { - accumulated_weight: 3, + accumulated_shares: 1, block: 0, } )); @@ -599,7 +545,7 @@ mod test_dealing { assert!(matches!( dkg.state, DkgState::Sharing { - accumulated_weight: 0, + accumulated_shares: 0, block: 0, } )); @@ -625,7 +571,7 @@ mod test_dealing { assert!(matches!( dkg.state, DkgState::Sharing { - accumulated_weight: 0, + accumulated_shares: 0, block: 0, } )); @@ -693,7 +639,7 @@ mod test_dealing { fn test_pvss_reissue() { let mut dkg = setup_dkg(0); dkg.state = DkgState::Sharing { - accumulated_weight: 0, + accumulated_shares: 0, block: 2, }; assert_eq!(dkg.increase_block(), PvssScheduler::Issue); @@ -739,7 +685,7 @@ mod test_aggregation { fn test_aggregate_state_guards() { let mut dkg = setup_dealt_dkg(); dkg.state = DkgState::Sharing { - accumulated_weight: 0, + accumulated_shares: 0, block: 0, }; assert!(dkg.aggregate().is_err()); @@ -759,7 +705,7 @@ mod test_aggregation { let aggregate = dkg.aggregate().expect("Test failed"); let sender = dkg.validators[dkg.me].validator.clone(); dkg.state = DkgState::Sharing { - accumulated_weight: 0, + accumulated_shares: 0, block: 0, }; assert!(dkg.verify_message(&sender, &aggregate, rng).is_err()); @@ -779,7 +725,7 @@ mod test_aggregation { fn test_aggregate_wont_verify_if_under_threshold() { let rng = &mut ark_std::test_rng(); let mut dkg = setup_dealt_dkg(); - dkg.params.total_weight = 10; + dkg.params.shares_num = 10; let aggregate = dkg.aggregate().expect("Test failed"); let sender = dkg.validators[dkg.me].validator.clone(); assert!(dkg.verify_message(&sender, &aggregate, rng).is_err()); diff --git a/ferveo/src/lib.rs b/ferveo/src/lib.rs index 953eeab2..a5d518fb 100644 --- a/ferveo/src/lib.rs +++ b/ferveo/src/lib.rs @@ -84,6 +84,7 @@ mod test_dkg_full { /// Test happy flow for a full DKG with simple threshold decryption variant #[test] + #[ignore] fn test_dkg_simple_decryption_variant() { // // The following is copied from other tests @@ -105,7 +106,7 @@ mod test_dkg_full { aggregate .verify_aggregation(&dkg, rng) .expect("Test failed"), - 6 + 4 ); // diff --git a/ferveo/src/vss/pvss.rs b/ferveo/src/vss/pvss.rs index fb8f552c..cacd2420 100644 --- a/ferveo/src/vss/pvss.rs +++ b/ferveo/src/vss/pvss.rs @@ -71,11 +71,12 @@ impl PubliclyVerifiableSS { ) -> Result { // Our random polynomial, \phi(x) = s + \sum_{i=1}^{t-1} a_i x^i let mut phi = DensePolynomial::::rand( - (dkg.params.total_weight - dkg.params.security_threshold) as usize, + (dkg.params.shares_num - dkg.params.security_threshold) as usize, rng, ); phi.coeffs[0] = *s; // setting the first coefficient to secret value - // Evaluations of the polynomial over the domain + + // Evaluations of the polynomial over the domain let evals = phi.evaluate_over_domain_by_ref(dkg.domain); // commitment to coeffs, F_i let coeffs = fast_multiexp(&phi.coeffs, dkg.pvss_params.g); @@ -86,7 +87,7 @@ impl PubliclyVerifiableSS { // ek_{i}^{eval_i}, i = validator index fast_multiexp( // &evals.evals[i..i] = &evals.evals[i] - &evals.evals[val.share_start..val.share_end], + &[evals.evals[val.share_index]], val.validator.public_key.encryption_key.into_projective(), ) }) @@ -96,7 +97,7 @@ impl PubliclyVerifiableSS { "Not all validator session keys have been announced" )); } - //phi.zeroize(); // TODO zeroize? + // phi.zeroize(); // TODO zeroize? // TODO: Cross check proof of knowledge check with the whitepaper; this check proves that there is a relationship between the secret and the pvss transcript // Sigma is a proof of knowledge of the secret, sigma = h^s let sigma = E::G2Affine::prime_subgroup_generator().mul(*s).into(); //todo hash to curve @@ -155,8 +156,9 @@ impl PubliclyVerifiableSS { let mut y = E::G2Projective::zero(); let mut a = E::G1Projective::zero(); // Validator checks checks aggregated shares against commitment + // TODO: Just one commitment per validator. Consider rewriting this. for (y_i, a_i) in shares.iter().zip_eq( - commitment[validator.share_start..validator.share_end] + [commitment[validator.share_index]] .iter(), ) { // We iterate over shares (y_i) and commitment (a_i) @@ -192,13 +194,14 @@ impl PubliclyVerifiableSS { // Now, we verify that the aggregated PVSS transcript is a valid aggregation // If it is, we return the total weights of the PVSS transcripts let mut y = E::G1Projective::zero(); - let mut weight = 0u32; - for (dealer, pvss) in dkg.vss.iter() { + // TODO: If we don't deal with share weights anymore, do we even need to call `verify_aggregation`? + let mut shares_total = 0u32; + for (_, pvss) in dkg.vss.iter() { y += pvss.coeffs[0].into_projective(); - weight += dkg.validators[*dealer as usize].weight; + shares_total += 1 } if y.into_affine() == self.coeffs[0] { - Ok(weight) + Ok(shares_total) } else { Err(anyhow!( "aggregation does not match received PVSS instances" @@ -298,11 +301,10 @@ pub fn shares_for_validator( .iter() .map(|(_, pvss)| { // Each PVSS transcript contains multiple shares, one for each validator - assert_eq!(dkg.validators.len(), pvss.shares.len()); pvss.shares[validator].clone() }) - // Each validator has a share from each PVSS transcript - // One share is represented by ShareEncryptions, which is a vector of G2 points + // Each validator has a vector of shares from each PVSS transcript + // Vector of shares represented by ShareEncryptions, which is a vector of G2 points .collect::>>() } @@ -381,7 +383,7 @@ mod test_pvss { aggregate .verify_aggregation(&dkg, rng) .expect("Test failed"), - 6 + 4 ); } From e2b55b4cd8583d64e02c6b63a936bd6c670dd046 Mon Sep 17 00:00:00 2001 From: Piotr Roslaniec Date: Thu, 29 Dec 2022 13:23:20 +0100 Subject: [PATCH 07/28] updating scheme --- ferveo/benches/benchmarks/pvdkg.rs | 2 +- ferveo/examples/pvdkg.rs | 2 +- ferveo/src/dkg/pv.rs | 41 +++++-------- ferveo/src/lib.rs | 94 ++++++++++++------------------ ferveo/src/vss/pvss.rs | 72 ++++++----------------- 5 files changed, 73 insertions(+), 138 deletions(-) diff --git a/ferveo/benches/benchmarks/pvdkg.rs b/ferveo/benches/benchmarks/pvdkg.rs index 67803331..122bb749 100644 --- a/ferveo/benches/benchmarks/pvdkg.rs +++ b/ferveo/benches/benchmarks/pvdkg.rs @@ -51,7 +51,7 @@ pub fn gen_validators( ValidatorSet::new( (0..keypairs.len()) .map(|i| TendermintValidator { - power: 1,// TODO: Remove it. //i as u64, + power: 1, // TODO: Remove it. //i as u64, address: format!("validator_{}", i), public_key: keypairs[i].public(), }) diff --git a/ferveo/examples/pvdkg.rs b/ferveo/examples/pvdkg.rs index b741e622..b6b7139c 100644 --- a/ferveo/examples/pvdkg.rs +++ b/ferveo/examples/pvdkg.rs @@ -71,7 +71,7 @@ pub fn setup_dealt_dkg(num: u64, shares: u32) { for (sender, pvss) in transcripts.into_iter().rev().enumerate() { if let Message::Deal(ss) = pvss.clone() { print_time!("PVSS verify pvdkg"); - ss.verify_full(&dkg, rng); + ss.verify_full(&dkg); } dkg.apply_message( dkg.validators[num as usize - 1 - sender].validator.clone(), diff --git a/ferveo/src/dkg/pv.rs b/ferveo/src/dkg/pv.rs index 00550130..7a07e028 100644 --- a/ferveo/src/dkg/pv.rs +++ b/ferveo/src/dkg/pv.rs @@ -50,7 +50,7 @@ impl PubliclyVerifiableDkg { // partition out shares shares of validators based on their voting power let validators = make_validators(validator_set); - // we further partition out valdiators into partitions to submit pvss transcripts + // we further partition out validators into partitions to submit pvss transcripts // so as to minimize network load and enable retrying let my_partition = params.retry_after * (2 * me as u32 / params.retry_after); @@ -145,11 +145,10 @@ impl PubliclyVerifiableDkg { /// Verify a DKG related message in a block proposal /// `sender` is the validator of the sender of the message /// `payload` is the content of the message - pub fn verify_message( + pub fn verify_message( &self, sender: &TendermintValidator, payload: &Message, - rng: &mut R, ) -> Result<()> { match payload { Message::Deal(pvss) if matches!(self.state, DkgState::Sharing{..} | DkgState::Dealt) => { @@ -170,7 +169,7 @@ impl PubliclyVerifiableDkg { Message::Aggregate(Aggregation{vss, final_key}) if matches!(self.state, DkgState::Dealt) => { let minimum_shares = self.params.shares_num - self.params.security_threshold; - let verified_shares = vss.verify_aggregation(self, rng)?; + let verified_shares = vss.verify_aggregation(self)?; // we reject aggregations that fail to meet the security threshold if verified_shares < minimum_shares { Err( @@ -403,11 +402,7 @@ mod test_dealing { for (sender, pvss) in transcripts.into_iter().rev().enumerate() { // check the verification passes assert!(dkg - .verify_message( - &dkg.validators[3 - sender].validator, - &pvss, - rng - ) + .verify_message(&dkg.validators[3 - sender].validator, &pvss,) .is_ok()); // check that application passes assert!(dkg @@ -456,7 +451,7 @@ mod test_dealing { .public(), }; // check that verification fails - assert!(dkg.verify_message(&sender, &pvss, rng).is_err()); + assert!(dkg.verify_message(&sender, &pvss).is_err()); // check that application fails assert!(dkg.apply_message(sender, pvss).is_err()); // check that state has not changed @@ -485,7 +480,7 @@ mod test_dealing { let pvss = dkg.share(rng).expect("Test failed"); let sender = dkg.validators[3].validator.clone(); // check that verification fails - assert!(dkg.verify_message(&sender, &pvss, rng).is_ok()); + assert!(dkg.verify_message(&sender, &pvss).is_ok()); // check that application fails assert!(dkg.apply_message(sender.clone(), pvss.clone()).is_ok()); // check that state has appropriately changed @@ -497,7 +492,7 @@ mod test_dealing { } )); // check that sending another pvss from same sender fails - assert!(dkg.verify_message(&sender, &pvss, rng).is_err()); + assert!(dkg.verify_message(&sender, &pvss).is_err()); } /// Test that if a validators tries to verify it's own @@ -524,7 +519,7 @@ mod test_dealing { )); let sender = dkg.validators[0].validator.clone(); // check that verification fails - assert!(dkg.verify_message(&sender, &pvss, rng).is_ok()); + assert!(dkg.verify_message(&sender, &pvss).is_ok()); assert!(dkg.apply_message(sender, pvss).is_ok()); // check that state did not change assert!(matches!( @@ -579,12 +574,12 @@ mod test_dealing { dkg.state = DkgState::Success { final_key: G1::zero(), }; - assert!(dkg.verify_message(&sender, &pvss, rng).is_err()); + assert!(dkg.verify_message(&sender, &pvss).is_err()); assert!(dkg.apply_message(sender.clone(), pvss.clone()).is_err()); // check that we can still accept pvss transcripts after meeting threshold dkg.state = DkgState::Dealt; - assert!(dkg.verify_message(&sender, &pvss, rng).is_ok()); + assert!(dkg.verify_message(&sender, &pvss).is_ok()); assert!(dkg.apply_message(sender, pvss).is_ok()); assert!(matches!(dkg.state, DkgState::Dealt)) } @@ -628,7 +623,7 @@ mod test_dealing { let pvss = dkg.share(rng).expect("Test failed"); let sender = dkg.validators[0].validator.clone(); // check that verification fails - assert!(dkg.verify_message(&sender, &pvss, rng).is_ok()); + assert!(dkg.verify_message(&sender, &pvss).is_ok()); assert!(dkg.apply_message(sender, pvss).is_ok()); assert_eq!(dkg.increase_block(), PvssScheduler::Wait); } @@ -670,11 +665,10 @@ mod test_aggregation { /// met, we can create a final key #[test] fn test_aggregate() { - let rng = &mut ark_std::test_rng(); let mut dkg = setup_dealt_dkg(); let aggregate = dkg.aggregate().expect("Test failed"); let sender = dkg.validators[dkg.me].validator.clone(); - assert!(dkg.verify_message(&sender, &aggregate, rng).is_ok()); + assert!(dkg.verify_message(&sender, &aggregate).is_ok()); assert!(dkg.apply_message(sender, aggregate).is_ok()); assert!(matches!(dkg.state, DkgState::Success { .. })); } @@ -700,7 +694,6 @@ mod test_aggregation { /// [`DkgState::Dealt`] #[test] fn test_aggregate_message_state_guards() { - let rng = &mut ark_std::test_rng(); let mut dkg = setup_dealt_dkg(); let aggregate = dkg.aggregate().expect("Test failed"); let sender = dkg.validators[dkg.me].validator.clone(); @@ -708,14 +701,14 @@ mod test_aggregation { accumulated_shares: 0, block: 0, }; - assert!(dkg.verify_message(&sender, &aggregate, rng).is_err()); + assert!(dkg.verify_message(&sender, &aggregate).is_err()); assert!(dkg .apply_message(sender.clone(), aggregate.clone()) .is_err()); dkg.state = DkgState::Success { final_key: G1::zero(), }; - assert!(dkg.verify_message(&sender, &aggregate, rng).is_err()); + assert!(dkg.verify_message(&sender, &aggregate).is_err()); assert!(dkg.apply_message(sender, aggregate).is_err()) } @@ -723,19 +716,17 @@ mod test_aggregation { /// security threshold is not met #[test] fn test_aggregate_wont_verify_if_under_threshold() { - let rng = &mut ark_std::test_rng(); let mut dkg = setup_dealt_dkg(); dkg.params.shares_num = 10; let aggregate = dkg.aggregate().expect("Test failed"); let sender = dkg.validators[dkg.me].validator.clone(); - assert!(dkg.verify_message(&sender, &aggregate, rng).is_err()); + assert!(dkg.verify_message(&sender, &aggregate).is_err()); } /// If the aggregated pvss passes, check that the announced /// key is correct. Verification should fail if it is not #[test] fn test_aggregate_wont_verify_if_wrong_key() { - let rng = &mut ark_std::test_rng(); let mut dkg = setup_dealt_dkg(); let mut aggregate = dkg.aggregate().expect("Test failed"); while dkg.final_key() == G1::zero() { @@ -747,6 +738,6 @@ mod test_aggregation { *final_key = G1::zero(); } let sender = dkg.validators[dkg.me].validator.clone(); - assert!(dkg.verify_message(&sender, &aggregate, rng).is_err()); + assert!(dkg.verify_message(&sender, &aggregate).is_err()); } } diff --git a/ferveo/src/lib.rs b/ferveo/src/lib.rs index a5d518fb..62939eae 100644 --- a/ferveo/src/lib.rs +++ b/ferveo/src/lib.rs @@ -55,7 +55,6 @@ pub fn prepare_combine_simple( pub fn share_combine_simple( shares: &Vec, lagrange_coeffs: &Vec, - // prepared_key_shares: &[E::G2Affine], ) -> E::Fqk { let mut product_of_shares = E::Fqk::one(); @@ -84,7 +83,6 @@ mod test_dkg_full { /// Test happy flow for a full DKG with simple threshold decryption variant #[test] - #[ignore] fn test_dkg_simple_decryption_variant() { // // The following is copied from other tests @@ -100,14 +98,9 @@ mod test_dkg_full { // check that the optimistic verify returns true assert!(aggregate.verify_optimistic()); // check that the full verify returns true - assert!(aggregate.verify_full(&dkg, rng)); + assert!(aggregate.verify_full(&dkg)); // check that the verification of aggregation passes - assert_eq!( - aggregate - .verify_aggregation(&dkg, rng) - .expect("Test failed"), - 4 - ); + assert_eq!(aggregate.verify_aggregation(&dkg).expect("Test failed"), 4); // // Now, we start the actual test @@ -128,66 +121,53 @@ mod test_dkg_full { // TODO: Check ciphertext validity, https://nikkolasg.github.io/ferveo/tpke.html#to-validate-ciphertext-for-ind-cca2-security - // - // Each validator attempts to aggregate and decrypt the secret shares - // let decryption_shares = validator_keypairs - validator_keypairs + let decryption_shares = validator_keypairs .iter() .enumerate() // Assuming that the ordering of the validator keypairs is the same as the ordering of the validators in the validator set // TODO: Check this assumption - .for_each(|(validator_i, keypair)| { - let decrypted_shares: Vec> = - shares_for_validator(validator_i, &dkg) + .map(|(validator_i, keypair)| { + let decrypted_shares: Vec = + // shares_for_validator(validator_i, &dkg) + dkg.vss[&(validator_i as u32)].shares .iter() // Each "share" the validator has is actually a vector of shares // This because of domain partitioning - the amount of shares is the same as the validator's "power" - .map(|share| { + .map(|share| // Decrypt the share by decrypting each of the G2 elements within ShareEncryptions - share - .iter() - .map(|s| s.mul(keypair.decryption_key)) - .collect() - }) + share.mul(keypair.decryption_key)) .collect(); - let combined_shares = decrypted_shares.iter().fold( - vec![ - ark_bls12_381::G2Projective::zero(); - decrypted_shares[0].len() - ], - |acc, share| { - zip_eq(acc, share).map(|(a, b)| a + b).collect() - }, - ); - - let decryption_shares = combined_shares + let z_i = decrypted_shares .iter() - .map(|z_i| { - // Validator decryption of private key shares, https://nikkolasg.github.io/ferveo/pvss.html#validator-decryption-of-private-key-shares - let u = ciphertext.commitment; - let c_i = E::pairing(u, *z_i); - c_i - }) - .collect::>(); - - let shares_x = &dkg.domain.elements().collect::>(); - let lagrange_coeffs = prepare_combine_simple::(&shares_x); - - let s = - share_combine_simple::(&decryption_shares, &lagrange_coeffs); - - let plaintext = - tpke::checked_decrypt_with_shared_secret(&ciphertext, aad, &s); - assert_eq!(plaintext, msg); - }); - - // TODO: Perform decryption here! - - // For every validator, we're collecting all the decryption shares from all of the PVSS transcripts - // .flatten() - // .collect(); + .fold(ark_bls12_381::G2Projective::zero(), |acc, share| { + acc + *share + }); + + // Validator decryption of private key shares, https://nikkolasg.github.io/ferveo/pvss.html#validator-decryption-of-private-key-shares + let u = ciphertext.commitment; + let c_i = E::pairing(u, z_i); + + c_i + }) + .collect::>(); + + // TODO: Am I taking a correct amount of x cooridnates herer? The domain contains 2^n=8 elements total, but I'm taking 4 + let shares_x = &dkg.domain.elements().take(decryption_shares.len()).collect::>(); + let lagrange_coeffs = prepare_combine_simple::(&shares_x); + + let s = share_combine_simple::(&decryption_shares, &lagrange_coeffs); + + let plaintext = + tpke::checked_decrypt_with_shared_secret(&ciphertext, aad, &s); + assert_eq!(plaintext, msg); + + // TODO: Perform decryption here! + + // For every validator, we're collecting all the decryption shares from all of the PVSS transcripts + // .flatten() + // .collect(); // let shares_x = &dkg.domain.elements().collect::>(); // let lagrange_coeffs = prepare_combine_simple::(&shares_x); diff --git a/ferveo/src/vss/pvss.rs b/ferveo/src/vss/pvss.rs index cacd2420..73fcd10d 100644 --- a/ferveo/src/vss/pvss.rs +++ b/ferveo/src/vss/pvss.rs @@ -12,7 +12,7 @@ use itertools::{zip_eq, Itertools}; use subproductdomain::fast_multiexp; /// These are the blinded evaluations of weight shares of a single random polynomial -pub type ShareEncryptions = Vec<::G2Affine>; +pub type ShareEncryptions = ::G2Affine; /// Marker struct for unaggregated PVSS transcripts #[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug)] @@ -89,7 +89,7 @@ impl PubliclyVerifiableSS { // &evals.evals[i..i] = &evals.evals[i] &[evals.evals[val.share_index]], val.validator.public_key.encryption_key.into_projective(), - ) + )[0] }) .collect::>>(); if shares.len() != dkg.validators.len() { @@ -131,11 +131,7 @@ impl PubliclyVerifiableSS { /// If aggregation fails, a validator needs to know that their pvss /// transcript was at fault so that the can issue a new one. This /// function may also be used for that purpose. - pub fn verify_full( - &self, - dkg: &PubliclyVerifiableDkg, - rng: &mut R, - ) -> bool { + pub fn verify_full(&self, dkg: &PubliclyVerifiableDkg) -> bool { // compute the commitment let mut commitment = batch_to_projective(&self.coeffs); print_time!("commitment fft"); @@ -143,7 +139,7 @@ impl PubliclyVerifiableSS { // Each validator checks that their share is correct dkg.validators.iter().zip(self.shares.iter()).all( - |(validator, shares)| { + |(validator, share)| { // ek is the public key of the validator // TODO: Is that the ek = [dk]H key? let ek = validator @@ -151,26 +147,13 @@ impl PubliclyVerifiableSS { .public_key .encryption_key .into_projective(); - let alpha = E::Fr::rand(rng); - let mut powers_of_alpha = alpha; - let mut y = E::G2Projective::zero(); - let mut a = E::G1Projective::zero(); // Validator checks checks aggregated shares against commitment - // TODO: Just one commitment per validator. Consider rewriting this. - for (y_i, a_i) in shares.iter().zip_eq( - [commitment[validator.share_index]] - .iter(), - ) { - // We iterate over shares (y_i) and commitment (a_i) - // TODO: Check #3 is missing - // See #3 in 4.2.3 section of https://eprint.iacr.org/2022/898.pdf - y += y_i.mul(powers_of_alpha.into_repr()); - a += a_i.mul(powers_of_alpha.into_repr()); - powers_of_alpha *= alpha; - } + // TODO: Check #3 is missing + // See #3 in 4.2.3 section of https://eprint.iacr.org/2022/898.pdf + let y = *share; + let a = commitment[validator.share_index]; + // We verify that e(G, Y_j) = e(A_j, ek_j) for all j // See #4 in 4.2.3 section of https://eprint.iacr.org/2022/898.pdf - // Y = \sum_i y_i \alpha^i - // A = \sum_i a_i \alpha^i // e(G,Y) = e(A, ek) E::pairing(dkg.pvss_params.g, y) == E::pairing(a, ek) }, @@ -184,13 +167,12 @@ impl PubliclyVerifiableSS { /// the PVSS instances, produced by [`aggregate`], /// and received by the DKG context `dkg` /// Returns the total valid weight of the aggregated PVSS - pub fn verify_aggregation( + pub fn verify_aggregation( &self, dkg: &PubliclyVerifiableDkg, - rng: &mut R, ) -> Result { print_time!("PVSS verify_aggregation"); - self.verify_full(dkg, rng); + self.verify_full(dkg); // Now, we verify that the aggregated PVSS transcript is a valid aggregation // If it is, we return the total weights of the PVSS transcripts let mut y = E::G1Projective::zero(); @@ -221,11 +203,7 @@ pub fn aggregate( let mut coeffs = batch_to_projective(&first_pvss.coeffs); let mut sigma = first_pvss.sigma; - let mut shares = first_pvss - .shares - .iter() - .map(|a| batch_to_projective(a)) - .collect::>(); + let mut shares = batch_to_projective(&first_pvss.shares); // So now we're iterating over the PVSS instances, and adding their coefficients and shares, and their sigma // sigma is the sum of all the sigma_i, which is the proof of knowledge of the secret polynomial @@ -239,16 +217,9 @@ pub fn aggregate( shares .iter_mut() .zip_eq(next.shares.iter()) - .for_each(|(a, b)| { - a.iter_mut() - .zip_eq(b.iter()) - .for_each(|(c, d)| *c += d.into_projective()) - }); + .for_each(|(a, b)| *a += b.into_projective()); } - let shares = shares - .iter() - .map(|a| E::G2Projective::batch_normalization_into_affine(a)) - .collect::>(); + let shares = E::G2Projective::batch_normalization_into_affine(&shares); PubliclyVerifiableSS { coeffs: E::G1Projective::batch_normalization_into_affine(&coeffs), @@ -341,7 +312,7 @@ mod test_pvss { // check that the optimistic verify returns true assert!(pvss.verify_optimistic()); // check that the full verify returns true - assert!(pvss.verify_full(&dkg, rng)); + assert!(pvss.verify_full(&dkg)); } /// Check that if the proof of knowledge is wrong, @@ -367,7 +338,6 @@ mod test_pvss { /// Should have the correct form and validations pass #[test] fn test_aggregate_pvss() { - let rng = &mut ark_std::test_rng(); let dkg = setup_dealt_dkg(); let aggregate = aggregate(&dkg); //check that a polynomial of the correct degree was created @@ -377,14 +347,9 @@ mod test_pvss { // check that the optimistic verify returns true assert!(aggregate.verify_optimistic()); // check that the full verify returns true - assert!(aggregate.verify_full(&dkg, rng)); + assert!(aggregate.verify_full(&dkg)); // check that the verification of aggregation passes - assert_eq!( - aggregate - .verify_aggregation(&dkg, rng) - .expect("Test failed"), - 4 - ); + assert_eq!(aggregate.verify_aggregation(&dkg).expect("Test failed"), 4); } /// Check that if the aggregated pvss transcript has an @@ -392,7 +357,6 @@ mod test_pvss { #[test] fn test_verify_aggregation_fails_if_constant_term_wrong() { use std::ops::Neg; - let rng = &mut ark_std::test_rng(); let dkg = setup_dealt_dkg(); let mut aggregated = aggregate(&dkg); while aggregated.coeffs[0] == G1::zero() { @@ -402,7 +366,7 @@ mod test_pvss { aggregated.coeffs[0] = G1::zero(); assert_eq!( aggregated - .verify_aggregation(&dkg, rng) + .verify_aggregation(&dkg) .expect_err("Test failed") .to_string(), "aggregation does not match received PVSS instances" From 0474b484a6eb8b9d91eb4b3cb7d56db207eda12c Mon Sep 17 00:00:00 2001 From: Piotr Roslaniec Date: Thu, 29 Dec 2022 15:40:39 +0100 Subject: [PATCH 08/28] update aggregation --- ferveo/src/dkg/pv.rs | 56 ++++++++++++++++----------------- ferveo/src/lib.rs | 71 ++++++------------------------------------ ferveo/src/vss/pvss.rs | 61 ++++++++++-------------------------- 3 files changed, 54 insertions(+), 134 deletions(-) diff --git a/ferveo/src/dkg/pv.rs b/ferveo/src/dkg/pv.rs index 7a07e028..05e799c6 100644 --- a/ferveo/src/dkg/pv.rs +++ b/ferveo/src/dkg/pv.rs @@ -166,7 +166,7 @@ impl PubliclyVerifiableDkg { Ok(()) } } - Message::Aggregate(Aggregation{vss, final_key}) if matches!(self.state, DkgState::Dealt) => { + Message::Aggregate(Aggregation { vss, final_key }) if matches!(self.state, DkgState::Dealt) => { let minimum_shares = self.params.shares_num - self.params.security_threshold; let verified_shares = vss.verify_aggregation(self)?; @@ -205,17 +205,17 @@ impl PubliclyVerifiableDkg { // we keep track of the amount of shares seen until the security // threshold is met. Then we may change the state of the DKG - if let DkgState::Sharing{ref mut accumulated_shares, ..} = &mut self.state { + if let DkgState::Sharing { ref mut accumulated_shares, .. } = &mut self.state { *accumulated_shares += 1; if *accumulated_shares >= self.params.shares_num - self.params.security_threshold { - self.state = DkgState::Dealt; + self.state = DkgState::Dealt; } } Ok(()) } Message::Aggregate(_) if matches!(self.state, DkgState::Dealt) => { // change state and cache the final key - self.state = DkgState::Success {final_key: self.final_key()}; + self.state = DkgState::Success { final_key: self.final_key() }; Ok(()) } _ => Err(anyhow!("DKG state machine is not in correct state to apply this message")) @@ -254,6 +254,7 @@ pub(crate) mod test_common { pub use super::*; pub use ark_bls12_381::Bls12_381 as EllipticCurve; pub use ark_ff::UniformRand; + pub type G1 = ::G1Affine; /// Generate a set of keypairs for each validator @@ -306,29 +307,27 @@ pub(crate) mod test_common { pub fn setup_dealt_dkg() -> PubliclyVerifiableDkg { let n = 4; let rng = &mut ark_std::test_rng(); - // gather everyone's transcripts - let mut transcripts = vec![]; - for i in 0..n { - // All of the dkg instances have the same validators - let mut dkg = setup_dkg(i); - transcripts.push(dkg.share(rng).expect("Test failed")); - } - // our test dkg + + // Gather everyone's transcripts + let transcripts = (0..n) + .map(|i| { + let mut dkg = setup_dkg(i); + dkg.share(rng).expect("Test failed") + }) + .collect::>(); + + // Our test dkg let mut dkg = setup_dkg(0); - // iterate over transcripts from lowest shares to highest - for (sender, pvss) in transcripts.into_iter().rev().enumerate() { - dkg.apply_message( - dkg.validators[n - 1 - sender].validator.clone(), - pvss, - ) - .expect("Setup failed"); - } - // At this point, the dkg should contain n transcripts, each containing n shares - // TODO: Remove this check - assert_eq!(dkg.vss.len(), n); - for i in 0..n { - assert_eq!(dkg.vss[&(i as u32)].shares.len(), n); - } + transcripts + .into_iter() + .enumerate() + .for_each(|(sender, pvss)| { + dkg.apply_message( + dkg.validators[sender].validator.clone(), + pvss, + ) + .expect("Setup failed"); + }); dkg } } @@ -337,6 +336,7 @@ pub(crate) mod test_common { #[cfg(test)] mod test_dkg_init { use super::test_common::*; + /// Test that dkg fails to start if the `me` input /// is not in the validator set #[test] @@ -402,13 +402,13 @@ mod test_dealing { for (sender, pvss) in transcripts.into_iter().rev().enumerate() { // check the verification passes assert!(dkg - .verify_message(&dkg.validators[3 - sender].validator, &pvss,) + .verify_message(&dkg.validators[3 - sender].validator, &pvss) .is_ok()); // check that application passes assert!(dkg .apply_message( dkg.validators[3 - sender].validator.clone(), - pvss + pvss, ) .is_ok()); expected += 1; // dkg.validators[3 - sender].validator.power as u32; diff --git a/ferveo/src/lib.rs b/ferveo/src/lib.rs index 62939eae..bf73bba7 100644 --- a/ferveo/src/lib.rs +++ b/ferveo/src/lib.rs @@ -110,7 +110,6 @@ mod test_dkg_full { // We now want to test the decryption of a message // First, we encrypt a message using a DKG public key - let msg: &[u8] = "abc".as_bytes(); let aad: &[u8] = "my-aad".as_bytes(); let public_key = dkg.final_key(); @@ -118,43 +117,24 @@ mod test_dkg_full { // TODO: Update test utils so that we can easily get a validator keypair for each validator let validator_keypairs = gen_keypairs(); - // TODO: Check ciphertext validity, https://nikkolasg.github.io/ferveo/tpke.html#to-validate-ciphertext-for-ind-cca2-security + let aggregate = aggregate_for_decryption(&dkg); // Each validator attempts to aggregate and decrypt the secret shares - let decryption_shares = validator_keypairs - .iter() - .enumerate() - // Assuming that the ordering of the validator keypairs is the same as the ordering of the validators in the validator set - // TODO: Check this assumption - .map(|(validator_i, keypair)| { - let decrypted_shares: Vec = - // shares_for_validator(validator_i, &dkg) - dkg.vss[&(validator_i as u32)].shares - .iter() - // Each "share" the validator has is actually a vector of shares - // This because of domain partitioning - the amount of shares is the same as the validator's "power" - .map(|share| - // Decrypt the share by decrypting each of the G2 elements within ShareEncryptions - share.mul(keypair.decryption_key)) - .collect(); - - let z_i = decrypted_shares - .iter() - .fold(ark_bls12_381::G2Projective::zero(), |acc, share| { - acc + *share - }); - - // Validator decryption of private key shares, https://nikkolasg.github.io/ferveo/pvss.html#validator-decryption-of-private-key-shares + let decryption_shares = zip_eq(validator_keypairs, aggregate) + .map(|(keypair, encrypted_shares)| { + let z_i = encrypted_shares.mul(keypair.decryption_key); let u = ciphertext.commitment; let c_i = E::pairing(u, z_i); - c_i }) .collect::>(); - // TODO: Am I taking a correct amount of x cooridnates herer? The domain contains 2^n=8 elements total, but I'm taking 4 - let shares_x = &dkg.domain.elements().take(decryption_shares.len()).collect::>(); + let shares_x = &dkg + .domain + .elements() + .take(decryption_shares.len()) + .collect::>(); let lagrange_coeffs = prepare_combine_simple::(&shares_x); let s = share_combine_simple::(&decryption_shares, &lagrange_coeffs); @@ -162,38 +142,5 @@ mod test_dkg_full { let plaintext = tpke::checked_decrypt_with_shared_secret(&ciphertext, aad, &s); assert_eq!(plaintext, msg); - - // TODO: Perform decryption here! - - // For every validator, we're collecting all the decryption shares from all of the PVSS transcripts - // .flatten() - // .collect(); - - // let shares_x = &dkg.domain.elements().collect::>(); - // let lagrange_coeffs = prepare_combine_simple::(&shares_x); - // - // let s = - // share_combine_simple::(&decryption_shares, &lagrange_coeffs); - // - // let plaintext = - // tpke::checked_decrypt_with_shared_secret(&ciphertext, aad, &s); - // assert_eq!(plaintext, msg); - - /* - TODO: This variant seems to be outdated/unused in simple threshold decryption variant - - // Following section 4.4.8 of the paper, we need to compute the following: - let decryption_shares = validator_keypairs.iter().map(|validator| { - // TODO: Check the validity of (U, W) - - // Compute the decryption share D_{i,j} = [dk_j^{-1}]*U_i - // We only have one U in this case - let u = ciphertext.commitment; - let dk_j = validator.decryption_key; - let dk_j_inv = dk_j.inverse().unwrap(); - let d_ij = u.mul(dk_j_inv); - d_ij - }); - */ } } diff --git a/ferveo/src/vss/pvss.rs b/ferveo/src/vss/pvss.rs index 73fcd10d..9fb627a5 100644 --- a/ferveo/src/vss/pvss.rs +++ b/ferveo/src/vss/pvss.rs @@ -229,54 +229,27 @@ pub fn aggregate( } } -// pub fn aggregate_for_decryption( -// dkg: &PubliclyVerifiableDkg, -// ) -> ShareEncryptions { -// let aggregate = dkg -// .vss -// .iter() -// .map(|(_, pvss)| { -// assert_eq!(dkg.validators.len(), pvss.shares.len()); -// -// let shares = pvss -// .shares -// .iter() -// .map(|a| batch_to_projective(a)) -// .collect::>(); -// -// // Combine PVSS transcripts into a share aggregate -// let mut share_iter = shares.iter(); -// let first_share = share_iter.next().unwrap(); -// share_iter -// .fold(first_share, |acc, share| { -// &zip_eq(acc, share) -// .map(|(a, b)| *a + *b) -// .collect::>() -// }) -// .iter() -// .map(|a| a.into_affine()) -// .collect::>() -// }) -// .collect::>>(); -// -// E::G2Projective::batch_normalization_into_affine(&aggregate) -// } - -/// Returns ShareEncryptions from DKG PVSS transcripts for a selected validator -pub fn shares_for_validator( - validator: usize, +pub fn aggregate_for_decryption( dkg: &PubliclyVerifiableDkg, ) -> Vec> { - // DKG contains multiple PVSS transcripts, one for each dealer - dkg.vss + // From docs: https://nikkolasg.github.io/ferveo/pvss.html?highlight=aggregate#aggregation + // "Two PVSS instances may be aggregated into a single PVSS instance by adding elementwise each of the corresponding group elements." + let shares = dkg + .vss .iter() - .map(|(_, pvss)| { - // Each PVSS transcript contains multiple shares, one for each validator - pvss.shares[validator].clone() + .map(|(_, pvss)| pvss.shares.clone()) + .collect::>(); + let first_share = shares.first().unwrap().to_vec(); + shares + .into_iter() + .skip(1) + // We're assuming that in every PVSS instance, the shares are in the same order + .fold(first_share, |acc, shares| { + acc.into_iter() + .zip_eq(shares.into_iter()) + .map(|(a, b)| a + b) + .collect() }) - // Each validator has a vector of shares from each PVSS transcript - // Vector of shares represented by ShareEncryptions, which is a vector of G2 points - .collect::>>() } #[cfg(test)] From 4fbaab341e8481d7fbcf103e8b9c29b0a7ea348a Mon Sep 17 00:00:00 2001 From: Piotr Roslaniec Date: Thu, 29 Dec 2022 18:56:32 +0100 Subject: [PATCH 09/28] simple decryption with one validator works with ferveo dkg --- ferveo/src/dkg/pv.rs | 112 +++++++++++++++++++++++++------------------ ferveo/src/lib.rs | 78 ++++++++++++++++-------------- tpke/src/combine.rs | 3 +- 3 files changed, 108 insertions(+), 85 deletions(-) diff --git a/ferveo/src/dkg/pv.rs b/ferveo/src/dkg/pv.rs index 05e799c6..bbce4f3b 100644 --- a/ferveo/src/dkg/pv.rs +++ b/ferveo/src/dkg/pv.rs @@ -39,7 +39,7 @@ impl PubliclyVerifiableDkg { let domain = ark_poly::Radix2EvaluationDomain::::new( params.shares_num as usize, ) - .ok_or_else(|| anyhow!("unable to construct domain"))?; + .ok_or_else(|| anyhow!("unable to construct domain"))?; // keep track of the owner of this instance in the validator set let me = validator_set @@ -47,10 +47,8 @@ impl PubliclyVerifiableDkg { .binary_search_by(|probe| me.cmp(probe)) .map_err(|_| anyhow!("could not find this validator in the provided validator set"))?; - // partition out shares shares of validators based on their voting power let validators = make_validators(validator_set); - // we further partition out validators into partitions to submit pvss transcripts // so as to minimize network load and enable retrying let my_partition = params.retry_after * (2 * me as u32 / params.retry_after); @@ -80,22 +78,22 @@ impl PubliclyVerifiableDkg { pub fn increase_block(&mut self) -> PvssScheduler { match self.state { DkgState::Sharing { ref mut block, .. } - if !self.vss.contains_key(&(self.me as u32)) => - { - *block += 1; - // if our scheduled window begins, issue PVSS - if self.window.0 + 1 == *block { - PvssScheduler::Issue - } else if &self.window.1 < block { - // reset the window during which we try to get our - // PVSS on chain - *block = self.window.0 + 1; - // reissue PVSS - PvssScheduler::Issue - } else { - PvssScheduler::Wait + if !self.vss.contains_key(&(self.me as u32)) => + { + *block += 1; + // if our scheduled window begins, issue PVSS + if self.window.0 + 1 == *block { + PvssScheduler::Issue + } else if &self.window.1 < block { + // reset the window during which we try to get our + // PVSS on chain + *block = self.window.0 + 1; + // reissue PVSS + PvssScheduler::Issue + } else { + PvssScheduler::Wait + } } - } _ => PvssScheduler::Wait, } } @@ -224,12 +222,12 @@ impl PubliclyVerifiableDkg { } #[derive( - Serialize, - Deserialize, - Clone, - Debug, - CanonicalSerialize, - CanonicalDeserialize, +Serialize, +Deserialize, +Clone, +Debug, +CanonicalSerialize, +CanonicalDeserialize, )] #[serde(bound = "")] pub struct Aggregation { @@ -257,20 +255,25 @@ pub(crate) mod test_common { pub type G1 = ::G1Affine; - /// Generate a set of keypairs for each validator - pub fn gen_keypairs() -> Vec> { + pub fn gen_n_keypairs(n: u32) -> Vec> { let rng = &mut ark_std::test_rng(); - (0..4) + (0..n) .map(|_| ferveo_common::Keypair::::new(rng)) .collect() } - /// Generate a few validators - pub fn gen_validators( + + /// Generate a set of keypairs for each validator + pub fn gen_keypairs() -> Vec> { + gen_n_keypairs(4) + } + + pub fn gen_n_validators( keypairs: &[ferveo_common::Keypair], + n: u32, ) -> ValidatorSet { ValidatorSet::new( - (0..4) + (0..n) .map(|i| TendermintValidator { power: 1, // TODO: Should set to 1 in order to force partitioning to give one share to each validator. Replace with 1 by reworking how partitioning works. address: format!("validator_{}", i), @@ -280,44 +283,59 @@ pub(crate) mod test_common { ) } - /// Create a test dkg - /// - /// The [`test_dkg_init`] module checks correctness of this setup - pub fn setup_dkg(validator: usize) -> PubliclyVerifiableDkg { - let keypairs = gen_keypairs(); - let validators = gen_validators(&keypairs); - let me = validators.validators[validator].clone(); + /// Generate a few validators + pub fn gen_validators( + keypairs: &[ferveo_common::Keypair], + ) -> ValidatorSet { + gen_n_validators(keypairs, 4) + } + + pub fn setup_dkg_for_n_validators(n_validators: u32, security_threshold: u32, shares_num: u32, my_index: usize) -> PubliclyVerifiableDkg { + let keypairs = gen_n_keypairs(n_validators ); + let validators = gen_n_validators(&keypairs, n_validators); + let me = validators.validators[my_index].clone(); PubliclyVerifiableDkg::new( validators, Params { tau: 0, - security_threshold: 2, - shares_num: 6, + security_threshold, + shares_num, retry_after: 2, }, me, - keypairs[validator], + keypairs[my_index], ) - .expect("Setup failed") + .expect("Setup failed") } + /// Create a test dkg + /// + /// The [`test_dkg_init`] module checks correctness of this setup + pub fn setup_dkg(validator: usize) -> PubliclyVerifiableDkg { + setup_dkg_for_n_validators(4, 2, 6, validator) + } + + /// Set up a dkg with enough pvss transcripts to meet the threshold /// /// The correctness of this function is tested in the module [`test_dealing`] pub fn setup_dealt_dkg() -> PubliclyVerifiableDkg { - let n = 4; + setup_dealt_dkg_with_n_validators(4, 2, 6) + } + + pub fn setup_dealt_dkg_with_n_validators(n_validators: u32, security_threshold: u32, shares_num: u32) -> PubliclyVerifiableDkg { let rng = &mut ark_std::test_rng(); // Gather everyone's transcripts - let transcripts = (0..n) + let transcripts = (0..n_validators) .map(|i| { - let mut dkg = setup_dkg(i); + let mut dkg = setup_dkg_for_n_validators(n_validators, security_threshold, shares_num, i as usize); dkg.share(rng).expect("Test failed") }) .collect::>(); // Our test dkg - let mut dkg = setup_dkg(0); + let mut dkg = setup_dkg_for_n_validators(n_validators, security_threshold, shares_num, 0); transcripts .into_iter() .enumerate() @@ -326,7 +344,7 @@ pub(crate) mod test_common { dkg.validators[sender].validator.clone(), pvss, ) - .expect("Setup failed"); + .expect("Setup failed"); }); dkg } @@ -359,7 +377,7 @@ mod test_dkg_init { }, keypair, ) - .expect_err("Test failed"); + .expect_err("Test failed"); assert_eq!( err.to_string(), "could not find this validator in the provided validator set" diff --git a/ferveo/src/lib.rs b/ferveo/src/lib.rs index bf73bba7..a87d26f5 100644 --- a/ferveo/src/lib.rs +++ b/ferveo/src/lib.rs @@ -34,40 +34,6 @@ use ark_ff::PrimeField; use measure_time::print_time; -pub fn prepare_combine_simple( - shares_x: &[E::Fr], -) -> Vec { - // Calculate lagrange coefficients using optimized formula, see https://en.wikipedia.org/wiki/Lagrange_polynomial#Optimal_algorithm - let mut lagrange_coeffs = vec![]; - for x_j in shares_x { - let mut prod = E::Fr::one(); - for x_m in shares_x { - if x_j != x_m { - // In this formula x_i = 0, hence numerator is x_m - prod *= (*x_m) / (*x_m - *x_j); - } - } - lagrange_coeffs.push(prod); - } - lagrange_coeffs -} - -pub fn share_combine_simple( - shares: &Vec, - lagrange_coeffs: &Vec, -) -> E::Fqk { - let mut product_of_shares = E::Fqk::one(); - - // Sum of C_i^{L_i}z - for (c_i, alpha_i) in zip_eq(shares.iter(), lagrange_coeffs.iter()) { - // Exponentiation by alpha_i - let ss = c_i.pow(alpha_i.into_repr()); - product_of_shares *= ss; - } - - product_of_shares -} - #[cfg(test)] mod test_dkg_full { use super::*; @@ -81,8 +47,48 @@ mod test_dkg_full { type E = ark_bls12_381::Bls12_381; + #[test] + fn test_dkg_simple_decryption_variant_with_single_validator() { + let rng = &mut ark_std::test_rng(); + // Make sure that the number of shares is a power of 2 for the FFT to work (Radix-2 FFT domain is being used) + let dkg = setup_dealt_dkg_with_n_validators(1, 1, 1); + + // First, we encrypt a message using a DKG public key + let msg: &[u8] = "abc".as_bytes(); + let aad: &[u8] = "my-aad".as_bytes(); + let public_key = dkg.final_key(); // sum of g^coeffs[0] for all validators + let ciphertext = tpke::encrypt::<_, E>(msg, aad, &public_key, rng); + + let validator_keypair = gen_n_keypairs(1)[0]; + let encrypted_shares = batch_to_projective(&dkg.vss.get(&0).unwrap().shares); + + let decryption_shares = + encrypted_shares.iter().map(|encrypted_share| { + // Decrypt private key shares https://nikkolasg.github.io/ferveo/pvss.html#validator-decryption-of-private-key-shares + let z_i = encrypted_share.mul(validator_keypair.decryption_key.inverse().unwrap().into_repr()); + let u = ciphertext.commitment; + let c_i = E::pairing(u, z_i); + c_i + }) + .collect::>(); + + let shares_x = &dkg + .domain + .elements() + .take(decryption_shares.len()) + .collect::>(); + let lagrange_coeffs = tpke::prepare_combine_simple::(&shares_x); + + let s = tpke::share_combine_simple::(&decryption_shares, &lagrange_coeffs); + + let plaintext = + tpke::checked_decrypt_with_shared_secret(&ciphertext, aad, &s); + assert_eq!(plaintext, msg); + } + /// Test happy flow for a full DKG with simple threshold decryption variant #[test] + #[ignore] fn test_dkg_simple_decryption_variant() { // // The following is copied from other tests @@ -135,9 +141,9 @@ mod test_dkg_full { .elements() .take(decryption_shares.len()) .collect::>(); - let lagrange_coeffs = prepare_combine_simple::(&shares_x); + let lagrange_coeffs = tpke::prepare_combine_simple::(&shares_x); - let s = share_combine_simple::(&decryption_shares, &lagrange_coeffs); + let s = tpke::share_combine_simple::(&decryption_shares, &lagrange_coeffs); let plaintext = tpke::checked_decrypt_with_shared_secret(&ciphertext, aad, &s); diff --git a/tpke/src/combine.rs b/tpke/src/combine.rs index a8fd7875..df0d1993 100644 --- a/tpke/src/combine.rs +++ b/tpke/src/combine.rs @@ -72,9 +72,8 @@ pub fn lagrange_basis_at( } pub fn prepare_combine_simple( - context: &[PublicDecryptionContextSimple], + shares_x: &[E::Fr], ) -> Vec { - let shares_x = &context.iter().map(|ctxt| ctxt.domain).collect::>(); // Calculate lagrange coefficients using optimized formula, see https://en.wikipedia.org/wiki/Lagrange_polynomial#Optimal_algorithm let mut lagrange_coeffs = vec![]; for x_j in shares_x { From cca32700b3b13aafab6fcb899f852d3643dddcfd Mon Sep 17 00:00:00 2001 From: Piotr Roslaniec Date: Thu, 29 Dec 2022 19:32:22 +0100 Subject: [PATCH 10/28] fix clippy --- ferveo/src/dkg.rs | 1 + ferveo/src/dkg/pv.rs | 85 +++++++++++++++++++++++-------------- ferveo/src/lib.rs | 38 +++++++++++------ subproductdomain/src/lib.rs | 4 +- tpke/benches/benchmarks.rs | 0 tpke/src/lib.rs | 2 +- 6 files changed, 82 insertions(+), 48 deletions(-) create mode 100644 tpke/benches/benchmarks.rs diff --git a/ferveo/src/dkg.rs b/ferveo/src/dkg.rs index c395ff66..b8e6bb1d 100644 --- a/ferveo/src/dkg.rs +++ b/ferveo/src/dkg.rs @@ -12,6 +12,7 @@ use ark_poly::{ EvaluationDomain, Polynomial, }; use ark_serialize::*; +use bincode::Options; use ed25519_dalek as ed25519; pub mod common; diff --git a/ferveo/src/dkg/pv.rs b/ferveo/src/dkg/pv.rs index bbce4f3b..b9e1d811 100644 --- a/ferveo/src/dkg/pv.rs +++ b/ferveo/src/dkg/pv.rs @@ -39,7 +39,7 @@ impl PubliclyVerifiableDkg { let domain = ark_poly::Radix2EvaluationDomain::::new( params.shares_num as usize, ) - .ok_or_else(|| anyhow!("unable to construct domain"))?; + .ok_or_else(|| anyhow!("unable to construct domain"))?; // keep track of the owner of this instance in the validator set let me = validator_set @@ -78,22 +78,22 @@ impl PubliclyVerifiableDkg { pub fn increase_block(&mut self) -> PvssScheduler { match self.state { DkgState::Sharing { ref mut block, .. } - if !self.vss.contains_key(&(self.me as u32)) => - { - *block += 1; - // if our scheduled window begins, issue PVSS - if self.window.0 + 1 == *block { - PvssScheduler::Issue - } else if &self.window.1 < block { - // reset the window during which we try to get our - // PVSS on chain - *block = self.window.0 + 1; - // reissue PVSS - PvssScheduler::Issue - } else { - PvssScheduler::Wait - } + if !self.vss.contains_key(&(self.me as u32)) => + { + *block += 1; + // if our scheduled window begins, issue PVSS + if self.window.0 + 1 == *block { + PvssScheduler::Issue + } else if &self.window.1 < block { + // reset the window during which we try to get our + // PVSS on chain + *block = self.window.0 + 1; + // reissue PVSS + PvssScheduler::Issue + } else { + PvssScheduler::Wait } + } _ => PvssScheduler::Wait, } } @@ -222,12 +222,12 @@ impl PubliclyVerifiableDkg { } #[derive( -Serialize, -Deserialize, -Clone, -Debug, -CanonicalSerialize, -CanonicalDeserialize, + Serialize, + Deserialize, + Clone, + Debug, + CanonicalSerialize, + CanonicalDeserialize, )] #[serde(bound = "")] pub struct Aggregation { @@ -255,14 +255,15 @@ pub(crate) mod test_common { pub type G1 = ::G1Affine; - pub fn gen_n_keypairs(n: u32) -> Vec> { + pub fn gen_n_keypairs( + n: u32, + ) -> Vec> { let rng = &mut ark_std::test_rng(); (0..n) .map(|_| ferveo_common::Keypair::::new(rng)) .collect() } - /// Generate a set of keypairs for each validator pub fn gen_keypairs() -> Vec> { gen_n_keypairs(4) @@ -290,8 +291,13 @@ pub(crate) mod test_common { gen_n_validators(keypairs, 4) } - pub fn setup_dkg_for_n_validators(n_validators: u32, security_threshold: u32, shares_num: u32, my_index: usize) -> PubliclyVerifiableDkg { - let keypairs = gen_n_keypairs(n_validators ); + pub fn setup_dkg_for_n_validators( + n_validators: u32, + security_threshold: u32, + shares_num: u32, + my_index: usize, + ) -> PubliclyVerifiableDkg { + let keypairs = gen_n_keypairs(n_validators); let validators = gen_n_validators(&keypairs, n_validators); let me = validators.validators[my_index].clone(); PubliclyVerifiableDkg::new( @@ -305,7 +311,7 @@ pub(crate) mod test_common { me, keypairs[my_index], ) - .expect("Setup failed") + .expect("Setup failed") } /// Create a test dkg @@ -315,7 +321,6 @@ pub(crate) mod test_common { setup_dkg_for_n_validators(4, 2, 6, validator) } - /// Set up a dkg with enough pvss transcripts to meet the threshold /// /// The correctness of this function is tested in the module [`test_dealing`] @@ -323,19 +328,33 @@ pub(crate) mod test_common { setup_dealt_dkg_with_n_validators(4, 2, 6) } - pub fn setup_dealt_dkg_with_n_validators(n_validators: u32, security_threshold: u32, shares_num: u32) -> PubliclyVerifiableDkg { + pub fn setup_dealt_dkg_with_n_validators( + n_validators: u32, + security_threshold: u32, + shares_num: u32, + ) -> PubliclyVerifiableDkg { let rng = &mut ark_std::test_rng(); // Gather everyone's transcripts let transcripts = (0..n_validators) .map(|i| { - let mut dkg = setup_dkg_for_n_validators(n_validators, security_threshold, shares_num, i as usize); + let mut dkg = setup_dkg_for_n_validators( + n_validators, + security_threshold, + shares_num, + i as usize, + ); dkg.share(rng).expect("Test failed") }) .collect::>(); // Our test dkg - let mut dkg = setup_dkg_for_n_validators(n_validators, security_threshold, shares_num, 0); + let mut dkg = setup_dkg_for_n_validators( + n_validators, + security_threshold, + shares_num, + 0, + ); transcripts .into_iter() .enumerate() @@ -344,7 +363,7 @@ pub(crate) mod test_common { dkg.validators[sender].validator.clone(), pvss, ) - .expect("Setup failed"); + .expect("Setup failed"); }); dkg } @@ -377,7 +396,7 @@ mod test_dkg_init { }, keypair, ) - .expect_err("Test failed"); + .expect_err("Test failed"); assert_eq!( err.to_string(), "could not find this validator in the provided validator set" diff --git a/ferveo/src/lib.rs b/ferveo/src/lib.rs index a87d26f5..8ad379c7 100644 --- a/ferveo/src/lib.rs +++ b/ferveo/src/lib.rs @@ -60,15 +60,23 @@ mod test_dkg_full { let ciphertext = tpke::encrypt::<_, E>(msg, aad, &public_key, rng); let validator_keypair = gen_n_keypairs(1)[0]; - let encrypted_shares = batch_to_projective(&dkg.vss.get(&0).unwrap().shares); + let encrypted_shares = + batch_to_projective(&dkg.vss.get(&0).unwrap().shares); - let decryption_shares = - encrypted_shares.iter().map(|encrypted_share| { + let decryption_shares = encrypted_shares + .iter() + .map(|encrypted_share| { // Decrypt private key shares https://nikkolasg.github.io/ferveo/pvss.html#validator-decryption-of-private-key-shares - let z_i = encrypted_share.mul(validator_keypair.decryption_key.inverse().unwrap().into_repr()); + let z_i = encrypted_share.mul( + validator_keypair + .decryption_key + .inverse() + .unwrap() + .into_repr(), + ); let u = ciphertext.commitment; - let c_i = E::pairing(u, z_i); - c_i + + E::pairing(u, z_i) }) .collect::>(); @@ -77,9 +85,12 @@ mod test_dkg_full { .elements() .take(decryption_shares.len()) .collect::>(); - let lagrange_coeffs = tpke::prepare_combine_simple::(&shares_x); + let lagrange_coeffs = tpke::prepare_combine_simple::(shares_x); - let s = tpke::share_combine_simple::(&decryption_shares, &lagrange_coeffs); + let s = tpke::share_combine_simple::( + &decryption_shares, + &lagrange_coeffs, + ); let plaintext = tpke::checked_decrypt_with_shared_secret(&ciphertext, aad, &s); @@ -131,8 +142,8 @@ mod test_dkg_full { .map(|(keypair, encrypted_shares)| { let z_i = encrypted_shares.mul(keypair.decryption_key); let u = ciphertext.commitment; - let c_i = E::pairing(u, z_i); - c_i + + E::pairing(u, z_i) }) .collect::>(); @@ -141,9 +152,12 @@ mod test_dkg_full { .elements() .take(decryption_shares.len()) .collect::>(); - let lagrange_coeffs = tpke::prepare_combine_simple::(&shares_x); + let lagrange_coeffs = tpke::prepare_combine_simple::(shares_x); - let s = tpke::share_combine_simple::(&decryption_shares, &lagrange_coeffs); + let s = tpke::share_combine_simple::( + &decryption_shares, + &lagrange_coeffs, + ); let plaintext = tpke::checked_decrypt_with_shared_secret(&ciphertext, aad, &s); diff --git a/subproductdomain/src/lib.rs b/subproductdomain/src/lib.rs index e7136a3c..e2d329ad 100644 --- a/subproductdomain/src/lib.rs +++ b/subproductdomain/src/lib.rs @@ -303,7 +303,7 @@ impl SubproductTree { pub fn derivative(f: &Poly) -> Poly { let mut coeffs = Vec::with_capacity(f.coeffs().len() - 1); for (i, c) in f.coeffs.iter().enumerate().skip(1) { - coeffs.push(F::from(i as u64) * c); + coeffs.push(F::from(i as u128) * c); } Poly:: { coeffs } } @@ -374,7 +374,7 @@ pub fn toeplitz_mul( Ok(( tmp[..toeplitz_size].to_vec(), - E::Fr::from(domain.size() as u64).inverse().unwrap(), + E::Fr::from(domain.size() as u128).inverse().unwrap(), )) } diff --git a/tpke/benches/benchmarks.rs b/tpke/benches/benchmarks.rs new file mode 100644 index 00000000..e69de29b diff --git a/tpke/src/lib.rs b/tpke/src/lib.rs index 5a3f16ff..238bd629 100644 --- a/tpke/src/lib.rs +++ b/tpke/src/lib.rs @@ -486,7 +486,7 @@ mod tests { &contexts[0].public_decryption_contexts, ); - let shared_secret = + let shared_secret = share_combine_simple::(&decryption_shares, &lagrange); test_ciphertext_validation_fails(msg, aad, &ciphertext, &shared_secret); From d3c76cde43f13a9a7c24d24511acbd980b5b6e44 Mon Sep 17 00:00:00 2001 From: Piotr Roslaniec Date: Fri, 30 Dec 2022 13:39:58 +0100 Subject: [PATCH 11/28] simple threshold decryption works --- ferveo-common/src/lib.rs | 7 ++- ferveo/src/dkg/pv.rs | 43 +++++++++------ ferveo/src/lib.rs | 113 ++++++++++++++------------------------- ferveo/src/vss/pvss.rs | 24 ++++++++- tpke/src/lib.rs | 2 +- 5 files changed, 94 insertions(+), 95 deletions(-) diff --git a/ferveo-common/src/lib.rs b/ferveo-common/src/lib.rs index e443e876..00a6bb7e 100644 --- a/ferveo-common/src/lib.rs +++ b/ferveo-common/src/lib.rs @@ -35,7 +35,7 @@ impl PartialOrd for TendermintValidator { impl Ord for TendermintValidator { fn cmp(&self, other: &Self) -> Ordering { - (self.power, &self.address).cmp(&(other.power, &other.address)) + self.address.cmp(&other.address) } } @@ -49,13 +49,12 @@ impl ValidatorSet { /// Sorts the validators from highest to lowest. This ordering /// first considers staking weight and breaks ties on established /// address - pub fn new(mut validators: Vec>) -> Self { - // reverse the ordering here - validators.sort_by(|a, b| b.cmp(a)); + pub fn new(validators: Vec>) -> Self { Self { validators } } /// Get the total voting power of the validator set + // TODO: Remove this pub fn total_voting_power(&self) -> u64 { self.validators.iter().map(|v| v.power).sum() } diff --git a/ferveo/src/dkg/pv.rs b/ferveo/src/dkg/pv.rs index b9e1d811..5a5cb71d 100644 --- a/ferveo/src/dkg/pv.rs +++ b/ferveo/src/dkg/pv.rs @@ -1,4 +1,5 @@ use crate::*; +use anyhow::Context; use ark_ec::bn::TwistType::D; use ark_ec::PairingEngine; use ark_ff::Field; @@ -44,12 +45,15 @@ impl PubliclyVerifiableDkg { // keep track of the owner of this instance in the validator set let me = validator_set .validators - .binary_search_by(|probe| me.cmp(probe)) - .map_err(|_| anyhow!("could not find this validator in the provided validator set"))?; + .iter() + .position(|probe| me.address == probe.address) + .context( + "could not find this validator in the provided validator set", + )?; let validators = make_validators(validator_set); - // so as to minimize network load and enable retrying + // TODO: Remove my_partition let my_partition = params.retry_after * (2 * me as u32 / params.retry_after); Ok(Self { @@ -67,6 +71,7 @@ impl PubliclyVerifiableDkg { }, me, validators, + // TODO: Remove window window: (my_partition, my_partition + params.retry_after), }) } @@ -154,8 +159,8 @@ impl PubliclyVerifiableDkg { // an address keyed hashmap after partitioning the shares shares // in the [`new`] method let sender = self.validators - .binary_search_by(|probe| sender.cmp(&probe.validator)) - .map_err(|_| anyhow!("dkg received unknown dealer"))?; + .iter().position(|probe| sender.address == probe.validator.address) + .context("dkg received unknown dealer")?; if self.vss.contains_key(&(sender as u32)) { Err(anyhow!("Repeat dealer {}", sender)) } else if !pvss.verify_optimistic() { @@ -197,8 +202,8 @@ impl PubliclyVerifiableDkg { Message::Deal(pvss) if matches!(self.state, DkgState::Sharing{..} | DkgState::Dealt) => { // Add the ephemeral public key and pvss transcript let sender = self.validators - .binary_search_by(|probe| sender.cmp(&probe.validator)) - .map_err(|_| anyhow!("dkg received unknown dealer"))?; + .iter().position(|probe| sender.address == probe.validator.address) + .context("dkg received unknown dealer")?; self.vss.insert(sender as u32, pvss); // we keep track of the amount of shares seen until the security @@ -298,6 +303,7 @@ pub(crate) mod test_common { my_index: usize, ) -> PubliclyVerifiableDkg { let keypairs = gen_n_keypairs(n_validators); + for _keypair in &keypairs {} let validators = gen_n_validators(&keypairs, n_validators); let me = validators.validators[my_index].clone(); PubliclyVerifiableDkg::new( @@ -325,7 +331,7 @@ pub(crate) mod test_common { /// /// The correctness of this function is tested in the module [`test_dealing`] pub fn setup_dealt_dkg() -> PubliclyVerifiableDkg { - setup_dealt_dkg_with_n_validators(4, 2, 6) + setup_dealt_dkg_with_n_validators(4, 2, 4) } pub fn setup_dealt_dkg_with_n_validators( @@ -333,6 +339,10 @@ pub(crate) mod test_common { security_threshold: u32, shares_num: u32, ) -> PubliclyVerifiableDkg { + // Make sure that the number of shares is a power of 2 for the FFT to work (Radix-2 FFT domain is being used) + let is_power_of_2 = |n: u32| n != 0 && (n & (n - 1)) == 0; + assert!(is_power_of_2(shares_num)); + let rng = &mut ark_std::test_rng(); // Gather everyone's transcripts @@ -386,12 +396,12 @@ mod test_dkg_init { Params { tau: 0, security_threshold: 4, - shares_num: 6, + shares_num: 8, retry_after: 2, }, TendermintValidator:: { power: 9001, - address: "Goku".into(), + address: "non-existant-validator".into(), public_key: keypair.public(), }, keypair, @@ -434,22 +444,23 @@ mod test_dealing { } // our test dkg let mut dkg = setup_dkg(0); - // iterate over transcripts from lowest shares to highest + // iterate over transcripts let mut expected = 0u32; - for (sender, pvss) in transcripts.into_iter().rev().enumerate() { + for (sender, pvss) in transcripts.iter().enumerate() { // check the verification passes assert!(dkg - .verify_message(&dkg.validators[3 - sender].validator, &pvss) + .verify_message(&dkg.validators[sender].validator, pvss) .is_ok()); // check that application passes assert!(dkg .apply_message( - dkg.validators[3 - sender].validator.clone(), - pvss, + dkg.validators[sender].validator.clone(), + pvss.clone(), ) .is_ok()); expected += 1; // dkg.validators[3 - sender].validator.power as u32; - if sender < 3 { + // As long as we still have transcripts to deal, we should be in the Dealt state + if sender < transcripts.len() - 1 { // check that shares accumulates correctly match dkg.state { DkgState::Sharing { diff --git a/ferveo/src/lib.rs b/ferveo/src/lib.rs index 8ad379c7..74bd26ca 100644 --- a/ferveo/src/lib.rs +++ b/ferveo/src/lib.rs @@ -39,46 +39,33 @@ mod test_dkg_full { use super::*; use crate::dkg::pv::test_common::*; - use ark_bls12_381::Bls12_381 as EllipticCurve; - use ark_ff::UniformRand; - use ferveo_common::{TendermintValidator, ValidatorSet}; + use ark_bls12_381::{Bls12_381 as EllipticCurve, Bls12_381, G2Projective}; + use ark_ec::bls12::G2Affine; + use ark_ff::{Fp12, UniformRand}; + use ferveo_common::{Keypair, TendermintValidator, ValidatorSet}; use group_threshold_cryptography as tpke; + use group_threshold_cryptography::Ciphertext; use itertools::{zip_eq, Itertools}; - type E = ark_bls12_381::Bls12_381; + type E = Bls12_381; #[test] - fn test_dkg_simple_decryption_variant_with_single_validator() { + fn test_dkg_simple_decryption_variant_single_validator() { let rng = &mut ark_std::test_rng(); - // Make sure that the number of shares is a power of 2 for the FFT to work (Radix-2 FFT domain is being used) let dkg = setup_dealt_dkg_with_n_validators(1, 1, 1); - // First, we encrypt a message using a DKG public key let msg: &[u8] = "abc".as_bytes(); let aad: &[u8] = "my-aad".as_bytes(); - let public_key = dkg.final_key(); // sum of g^coeffs[0] for all validators + let public_key = dkg.final_key(); let ciphertext = tpke::encrypt::<_, E>(msg, aad, &public_key, rng); - let validator_keypair = gen_n_keypairs(1)[0]; - let encrypted_shares = - batch_to_projective(&dkg.vss.get(&0).unwrap().shares); + let aggregate = aggregate_for_decryption(&dkg); + // Aggregate contains only one set of shares + assert_eq!(aggregate, dkg.vss.get(&0).unwrap().shares); - let decryption_shares = encrypted_shares - .iter() - .map(|encrypted_share| { - // Decrypt private key shares https://nikkolasg.github.io/ferveo/pvss.html#validator-decryption-of-private-key-shares - let z_i = encrypted_share.mul( - validator_keypair - .decryption_key - .inverse() - .unwrap() - .into_repr(), - ); - let u = ciphertext.commitment; - - E::pairing(u, z_i) - }) - .collect::>(); + let validator_keypairs = gen_n_keypairs(1); + let decryption_shares = + make_decryption_shares(&ciphertext, validator_keypairs, aggregate); let shares_x = &dkg .domain @@ -87,65 +74,44 @@ mod test_dkg_full { .collect::>(); let lagrange_coeffs = tpke::prepare_combine_simple::(shares_x); - let s = tpke::share_combine_simple::( + let shared_secret = tpke::share_combine_simple::( &decryption_shares, &lagrange_coeffs, ); - let plaintext = - tpke::checked_decrypt_with_shared_secret(&ciphertext, aad, &s); + let plaintext = tpke::checked_decrypt_with_shared_secret( + &ciphertext, + aad, + &shared_secret, + ); assert_eq!(plaintext, msg); } - /// Test happy flow for a full DKG with simple threshold decryption variant #[test] - #[ignore] fn test_dkg_simple_decryption_variant() { - // - // The following is copied from other tests - // - let rng = &mut ark_std::test_rng(); - let dkg = setup_dealt_dkg(); - let aggregate = aggregate(&dkg); - // check that a polynomial of the correct degree was created - assert_eq!(aggregate.coeffs.len(), 5); - // check that the correct number of shares were created - assert_eq!(aggregate.shares.len(), 4); - // check that the optimistic verify returns true - assert!(aggregate.verify_optimistic()); - // check that the full verify returns true - assert!(aggregate.verify_full(&dkg)); - // check that the verification of aggregation passes - assert_eq!(aggregate.verify_aggregation(&dkg).expect("Test failed"), 4); - - // - // Now, we start the actual test - // - - // At this point, we have a DKG that has been dealt and aggregated - // We now want to test the decryption of a message - - // First, we encrypt a message using a DKG public key + let dkg = setup_dealt_dkg_with_n_validators(4, 3, 4); + let msg: &[u8] = "abc".as_bytes(); let aad: &[u8] = "my-aad".as_bytes(); let public_key = dkg.final_key(); let ciphertext = tpke::encrypt::<_, E>(msg, aad, &public_key, rng); - // TODO: Update test utils so that we can easily get a validator keypair for each validator - let validator_keypairs = gen_keypairs(); - // TODO: Check ciphertext validity, https://nikkolasg.github.io/ferveo/tpke.html#to-validate-ciphertext-for-ind-cca2-security let aggregate = aggregate_for_decryption(&dkg); - // Each validator attempts to aggregate and decrypt the secret shares - let decryption_shares = zip_eq(validator_keypairs, aggregate) - .map(|(keypair, encrypted_shares)| { - let z_i = encrypted_shares.mul(keypair.decryption_key); - let u = ciphertext.commitment; - - E::pairing(u, z_i) - }) - .collect::>(); + // TODO: Before creating decryption shares, check ciphertext validity + // See: https://nikkolasg.github.io/ferveo/tpke.html#to-validate-ciphertext-for-ind-cca2-security + + let validator_keypairs = gen_n_keypairs(4); + // Make sure validators are in the same order dkg is by comparing their public keys + dkg.validators + .iter() + .zip_eq(validator_keypairs.iter()) + .for_each(|(v, k)| { + assert_eq!(v.validator.public_key, k.public()); + }); + let decryption_shares = + make_decryption_shares(&ciphertext, validator_keypairs, aggregate); let shares_x = &dkg .domain @@ -154,13 +120,16 @@ mod test_dkg_full { .collect::>(); let lagrange_coeffs = tpke::prepare_combine_simple::(shares_x); - let s = tpke::share_combine_simple::( + let shared_secret = tpke::share_combine_simple::( &decryption_shares, &lagrange_coeffs, ); - let plaintext = - tpke::checked_decrypt_with_shared_secret(&ciphertext, aad, &s); + let plaintext = tpke::checked_decrypt_with_shared_secret( + &ciphertext, + aad, + &shared_secret, + ); assert_eq!(plaintext, msg); } } diff --git a/ferveo/src/vss/pvss.rs b/ferveo/src/vss/pvss.rs index 9fb627a5..2f8d27c2 100644 --- a/ferveo/src/vss/pvss.rs +++ b/ferveo/src/vss/pvss.rs @@ -7,7 +7,8 @@ use ark_ec::bn::G2Affine; use ark_ec::PairingEngine; use ark_ff::UniformRand; use ark_serialize::*; -use ferveo_common::PublicKey; +use ferveo_common::{Keypair, PublicKey}; +use group_threshold_cryptography::Ciphertext; use itertools::{zip_eq, Itertools}; use subproductdomain::fast_multiexp; @@ -252,6 +253,25 @@ pub fn aggregate_for_decryption( }) } +pub fn make_decryption_shares( + ciphertext: &Ciphertext, + validator_keypairs: Vec>, + aggregate: Vec, +) -> Vec { + let decryption_shares = aggregate + .iter() + .zip_eq(validator_keypairs.iter()) + .map(|(encrypted_share, keypair)| { + // Decrypt private key shares https://nikkolasg.github.io/ferveo/pvss.html#validator-decryption-of-private-key-shares + let z_i = encrypted_share + .mul(keypair.decryption_key.inverse().unwrap().into_repr()); + let u = ciphertext.commitment; + E::pairing(u, z_i) + }) + .collect::>(); + decryption_shares +} + #[cfg(test)] mod test_pvss { use super::*; @@ -314,7 +334,7 @@ mod test_pvss { let dkg = setup_dealt_dkg(); let aggregate = aggregate(&dkg); //check that a polynomial of the correct degree was created - assert_eq!(aggregate.coeffs.len(), 5); + assert_eq!(aggregate.coeffs.len(), 3); // check that the correct number of shares were created assert_eq!(aggregate.shares.len(), 4); // check that the optimistic verify returns true diff --git a/tpke/src/lib.rs b/tpke/src/lib.rs index 238bd629..9b487874 100644 --- a/tpke/src/lib.rs +++ b/tpke/src/lib.rs @@ -486,7 +486,7 @@ mod tests { &contexts[0].public_decryption_contexts, ); - let shared_secret = + let shared_secret = share_combine_simple::(&decryption_shares, &lagrange); test_ciphertext_validation_fails(msg, aad, &ciphertext, &shared_secret); From f526ad44a8c05151bc7c0d745770e6b7f88e1876 Mon Sep 17 00:00:00 2001 From: Piotr Roslaniec Date: Fri, 30 Dec 2022 14:56:59 +0100 Subject: [PATCH 12/28] remove dealer's lagrange coeffs calculation --- tpke/src/lib.rs | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/tpke/src/lib.rs b/tpke/src/lib.rs index 9b487874..ceb29d31 100644 --- a/tpke/src/lib.rs +++ b/tpke/src/lib.rs @@ -159,7 +159,7 @@ pub fn setup_fast( pubkey_shares.chunks(1), privkey_shares.chunks(1) ) - .enumerate() + .enumerate() { let private_key_share = PrivateKeyShare:: { private_key_shares: private.to_vec(), @@ -259,7 +259,7 @@ pub fn setup_simple( pubkey_shares.chunks(1), privkey_shares.chunks(1) ) - .enumerate() + .enumerate() { let private_key_share = PrivateKeyShare:: { private_key_shares: private.to_vec(), @@ -349,8 +349,7 @@ mod tests { fn decryption_share_serialization() { let decryption_share = DecryptionShareFast:: { decrypter_index: 1, - decryption_share: ark_bls12_381::G1Affine::prime_subgroup_generator( - ), + decryption_share: ark_bls12_381::G1Affine::prime_subgroup_generator(), }; let serialized = decryption_share.to_bytes(); From b560ad6e5e72a4b1521486cbc90e84fcbff2ed6f Mon Sep 17 00:00:00 2001 From: Piotr Roslaniec Date: Fri, 30 Dec 2022 15:10:43 +0100 Subject: [PATCH 13/28] self code review --- ferveo/src/dkg/pv.rs | 4 ++-- ferveo/src/lib.rs | 5 +++-- tpke/src/lib.rs | 7 ++++--- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/ferveo/src/dkg/pv.rs b/ferveo/src/dkg/pv.rs index 5a5cb71d..6ffd13cd 100644 --- a/ferveo/src/dkg/pv.rs +++ b/ferveo/src/dkg/pv.rs @@ -331,14 +331,14 @@ pub(crate) mod test_common { /// /// The correctness of this function is tested in the module [`test_dealing`] pub fn setup_dealt_dkg() -> PubliclyVerifiableDkg { - setup_dealt_dkg_with_n_validators(4, 2, 4) + setup_dealt_dkg_with_n_validators(2, 4) } pub fn setup_dealt_dkg_with_n_validators( - n_validators: u32, security_threshold: u32, shares_num: u32, ) -> PubliclyVerifiableDkg { + let n_validators = shares_num; // Make sure that the number of shares is a power of 2 for the FFT to work (Radix-2 FFT domain is being used) let is_power_of_2 = |n: u32| n != 0 && (n & (n - 1)) == 0; assert!(is_power_of_2(shares_num)); diff --git a/ferveo/src/lib.rs b/ferveo/src/lib.rs index 74bd26ca..df3a81e8 100644 --- a/ferveo/src/lib.rs +++ b/ferveo/src/lib.rs @@ -52,11 +52,12 @@ mod test_dkg_full { #[test] fn test_dkg_simple_decryption_variant_single_validator() { let rng = &mut ark_std::test_rng(); - let dkg = setup_dealt_dkg_with_n_validators(1, 1, 1); + let dkg = setup_dealt_dkg_with_n_validators(1, 1); let msg: &[u8] = "abc".as_bytes(); let aad: &[u8] = "my-aad".as_bytes(); let public_key = dkg.final_key(); + let ciphertext = tpke::encrypt::<_, E>(msg, aad, &public_key, rng); let aggregate = aggregate_for_decryption(&dkg); @@ -90,7 +91,7 @@ mod test_dkg_full { #[test] fn test_dkg_simple_decryption_variant() { let rng = &mut ark_std::test_rng(); - let dkg = setup_dealt_dkg_with_n_validators(4, 3, 4); + let dkg = setup_dealt_dkg_with_n_validators(3, 4); let msg: &[u8] = "abc".as_bytes(); let aad: &[u8] = "my-aad".as_bytes(); diff --git a/tpke/src/lib.rs b/tpke/src/lib.rs index ceb29d31..9b487874 100644 --- a/tpke/src/lib.rs +++ b/tpke/src/lib.rs @@ -159,7 +159,7 @@ pub fn setup_fast( pubkey_shares.chunks(1), privkey_shares.chunks(1) ) - .enumerate() + .enumerate() { let private_key_share = PrivateKeyShare:: { private_key_shares: private.to_vec(), @@ -259,7 +259,7 @@ pub fn setup_simple( pubkey_shares.chunks(1), privkey_shares.chunks(1) ) - .enumerate() + .enumerate() { let private_key_share = PrivateKeyShare:: { private_key_shares: private.to_vec(), @@ -349,7 +349,8 @@ mod tests { fn decryption_share_serialization() { let decryption_share = DecryptionShareFast:: { decrypter_index: 1, - decryption_share: ark_bls12_381::G1Affine::prime_subgroup_generator(), + decryption_share: ark_bls12_381::G1Affine::prime_subgroup_generator( + ), }; let serialized = decryption_share.to_bytes(); From cafca08919841dcef7019c6e98e636450d522fa8 Mon Sep 17 00:00:00 2001 From: Piotr Roslaniec Date: Fri, 30 Dec 2022 16:20:47 +0100 Subject: [PATCH 14/28] fix clippy after 1.66 update --- ferveo/src/vss/pvss.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/ferveo/src/vss/pvss.rs b/ferveo/src/vss/pvss.rs index 2f8d27c2..186c1e59 100644 --- a/ferveo/src/vss/pvss.rs +++ b/ferveo/src/vss/pvss.rs @@ -236,9 +236,7 @@ pub fn aggregate_for_decryption( // From docs: https://nikkolasg.github.io/ferveo/pvss.html?highlight=aggregate#aggregation // "Two PVSS instances may be aggregated into a single PVSS instance by adding elementwise each of the corresponding group elements." let shares = dkg - .vss - .iter() - .map(|(_, pvss)| pvss.shares.clone()) + .vss.values().map(|pvss| pvss.shares.clone()) .collect::>(); let first_share = shares.first().unwrap().to_vec(); shares From 66215410afa829639db6417772f7bf443da36d6c Mon Sep 17 00:00:00 2001 From: Piotr Roslaniec Date: Fri, 30 Dec 2022 18:22:32 +0100 Subject: [PATCH 15/28] cargo fmt --- ferveo/src/vss/pvss.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/ferveo/src/vss/pvss.rs b/ferveo/src/vss/pvss.rs index 186c1e59..5d6b02f5 100644 --- a/ferveo/src/vss/pvss.rs +++ b/ferveo/src/vss/pvss.rs @@ -236,7 +236,9 @@ pub fn aggregate_for_decryption( // From docs: https://nikkolasg.github.io/ferveo/pvss.html?highlight=aggregate#aggregation // "Two PVSS instances may be aggregated into a single PVSS instance by adding elementwise each of the corresponding group elements." let shares = dkg - .vss.values().map(|pvss| pvss.shares.clone()) + .vss + .values() + .map(|pvss| pvss.shares.clone()) .collect::>(); let first_share = shares.first().unwrap().to_vec(); shares From 50343e33d1c48e5199bac79f4d75031857fd0a7a Mon Sep 17 00:00:00 2001 From: Piotr Roslaniec Date: Wed, 4 Jan 2023 11:44:09 +0100 Subject: [PATCH 16/28] fix after rebase --- tpke/src/combine.rs | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/tpke/src/combine.rs b/tpke/src/combine.rs index df0d1993..bc0b655c 100644 --- a/tpke/src/combine.rs +++ b/tpke/src/combine.rs @@ -71,24 +71,6 @@ pub fn lagrange_basis_at( lagrange_coeffs } -pub fn prepare_combine_simple( - shares_x: &[E::Fr], -) -> Vec { - // Calculate lagrange coefficients using optimized formula, see https://en.wikipedia.org/wiki/Lagrange_polynomial#Optimal_algorithm - let mut lagrange_coeffs = vec![]; - for x_j in shares_x { - let mut prod = E::Fr::one(); - for x_m in shares_x { - if x_j != x_m { - // In this formula x_i = 0, hence numerator is x_m - prod *= (*x_m) / (*x_m - *x_j); - } - } - lagrange_coeffs.push(prod); - } - lagrange_coeffs -} - pub fn share_combine_fast( shares: &[DecryptionShareFast], prepared_key_shares: &[E::G2Prepared], From 60e4c6f26c6cc2041ba66cd6697db3bae66ff04e Mon Sep 17 00:00:00 2001 From: Piotr Roslaniec Date: Thu, 5 Jan 2023 10:27:21 +0100 Subject: [PATCH 17/28] remove ValidatorSet --- ferveo-common/src/lib.rs | 27 ++---------------------- ferveo/benches/benchmarks/pvdkg.rs | 23 +++++++++----------- ferveo/examples/pvdkg.rs | 21 ++++++++---------- ferveo/src/dkg/common.rs | 7 +++--- ferveo/src/dkg/pv.rs | 34 ++++++++++++------------------ ferveo/src/lib.rs | 2 +- ferveo/src/vss/pvss.rs | 2 +- 7 files changed, 40 insertions(+), 76 deletions(-) diff --git a/ferveo-common/src/lib.rs b/ferveo-common/src/lib.rs index 00a6bb7e..0898d700 100644 --- a/ferveo-common/src/lib.rs +++ b/ferveo-common/src/lib.rs @@ -11,8 +11,6 @@ use std::cmp::Ordering; #[derive(Clone, Debug, CanonicalSerialize, CanonicalDeserialize)] /// Represents a tendermint validator pub struct TendermintValidator { - /// Total voting power in tendermint consensus - pub power: u64, /// The established address of the validator pub address: String, /// The Public key @@ -21,7 +19,7 @@ pub struct TendermintValidator { impl PartialEq for TendermintValidator { fn eq(&self, other: &Self) -> bool { - (self.power, &self.address) == (other.power, &other.address) + (&self.address) == (&other.address) } } @@ -29,7 +27,7 @@ impl Eq for TendermintValidator {} impl PartialOrd for TendermintValidator { fn partial_cmp(&self, other: &Self) -> Option { - Some((self.power, &self.address).cmp(&(other.power, &other.address))) + Some(self.address.cmp(&other.address)) } } @@ -39,27 +37,6 @@ impl Ord for TendermintValidator { } } -#[derive(Clone, Debug, CanonicalSerialize, CanonicalDeserialize)] -/// The set of tendermint validators for a dkg instance -pub struct ValidatorSet { - pub validators: Vec>, -} - -impl ValidatorSet { - /// Sorts the validators from highest to lowest. This ordering - /// first considers staking weight and breaks ties on established - /// address - pub fn new(validators: Vec>) -> Self { - Self { validators } - } - - /// Get the total voting power of the validator set - // TODO: Remove this - pub fn total_voting_power(&self) -> u64 { - self.validators.iter().map(|v| v.power).sum() - } -} - #[derive(Clone, Debug, CanonicalSerialize, CanonicalDeserialize)] pub struct Validator { pub validator: TendermintValidator, diff --git a/ferveo/benches/benchmarks/pvdkg.rs b/ferveo/benches/benchmarks/pvdkg.rs index 122bb749..292b691e 100644 --- a/ferveo/benches/benchmarks/pvdkg.rs +++ b/ferveo/benches/benchmarks/pvdkg.rs @@ -1,6 +1,6 @@ pub use ark_bls12_381::Bls12_381 as EllipticCurve; use criterion::{criterion_group, criterion_main, Criterion}; -use ferveo_common::{TendermintValidator, ValidatorSet}; +use ferveo_common::TendermintValidator; use pprof::criterion::{Output, PProfProfiler}; use ferveo::*; @@ -47,16 +47,13 @@ pub fn gen_keypairs(num: u64) -> Vec> { /// Generate a few validators pub fn gen_validators( keypairs: &[ferveo_common::Keypair], -) -> ValidatorSet { - ValidatorSet::new( - (0..keypairs.len()) - .map(|i| TendermintValidator { - power: 1, // TODO: Remove it. //i as u64, - address: format!("validator_{}", i), - public_key: keypairs[i].public(), - }) - .collect(), - ) +) -> Vec> { + (0..keypairs.len()) + .map(|i| TendermintValidator { + address: format!("validator_{}", i), + public_key: keypairs[i].public(), + }) + .collect() } /// Create a test dkg in state [`DkgState::Init`] @@ -66,13 +63,13 @@ pub fn setup_dkg( ) -> PubliclyVerifiableDkg { let keypairs = gen_keypairs(num); let validators = gen_validators(&keypairs); - let me = validators.validators[validator].clone(); + let me = validators[validator].clone(); PubliclyVerifiableDkg::new( validators, Params { tau: 0, security_threshold: 300 / 3, - total_weight: 300, + shares_num: 4, retry_after: 2, }, me, diff --git a/ferveo/examples/pvdkg.rs b/ferveo/examples/pvdkg.rs index b6b7139c..d06c401c 100644 --- a/ferveo/examples/pvdkg.rs +++ b/ferveo/examples/pvdkg.rs @@ -1,6 +1,6 @@ pub use ark_bls12_381::Bls12_381 as EllipticCurve; use ferveo::*; -use ferveo_common::{TendermintValidator, ValidatorSet}; +use ferveo_common::TendermintValidator; use measure_time::print_time; pub fn main() { @@ -21,16 +21,13 @@ pub fn gen_keypairs(num: u64) -> Vec> { /// Generate a few validators pub fn gen_validators( keypairs: &[ferveo_common::Keypair], -) -> ValidatorSet { - ValidatorSet::new( - (0..keypairs.len()) - .map(|i| TendermintValidator { - power: i as u64, - address: format!("validator_{}", i), - public_key: keypairs[i].public(), - }) - .collect(), - ) +) -> Vec> { + (0..keypairs.len()) + .map(|i| TendermintValidator { + address: format!("validator_{}", i), + public_key: keypairs[i].public(), + }) + .collect() } /// Create a test dkg in state [`DkgState::Init`] @@ -41,7 +38,7 @@ pub fn setup_dkg( ) -> PubliclyVerifiableDkg { let keypairs = gen_keypairs(num); let validators = gen_validators(&keypairs); - let me = validators.validators[validator].clone(); + let me = validators[validator].clone(); PubliclyVerifiableDkg::new( validators, Params { diff --git a/ferveo/src/dkg/common.rs b/ferveo/src/dkg/common.rs index 55d65abb..67987860 100644 --- a/ferveo/src/dkg/common.rs +++ b/ferveo/src/dkg/common.rs @@ -1,12 +1,11 @@ use crate::*; -use ferveo_common::ValidatorSet; +use ferveo_common::TendermintValidator; use itertools::izip; pub fn make_validators( - validator_set: ValidatorSet, + validators: Vec>, ) -> Vec> { - validator_set - .validators + validators .iter() .enumerate() .map(|(index, validator)| ferveo_common::Validator:: { diff --git a/ferveo/src/dkg/pv.rs b/ferveo/src/dkg/pv.rs index 6ffd13cd..c8936319 100644 --- a/ferveo/src/dkg/pv.rs +++ b/ferveo/src/dkg/pv.rs @@ -5,7 +5,7 @@ use ark_ec::PairingEngine; use ark_ff::Field; use ark_serialize::*; use ark_std::{end_timer, start_timer}; -use ferveo_common::{PublicKey, TendermintValidator, ValidatorSet}; +use ferveo_common::{PublicKey, TendermintValidator}; use std::collections::BTreeMap; /// The DKG context that holds all of the local state for participating in the DKG @@ -31,7 +31,7 @@ impl PubliclyVerifiableDkg { /// `me` the validator creating this instance /// `session_keypair` the keypair for `me` pub fn new( - validator_set: ValidatorSet, + validators: Vec>, params: Params, me: TendermintValidator, session_keypair: ferveo_common::Keypair, @@ -43,15 +43,14 @@ impl PubliclyVerifiableDkg { .ok_or_else(|| anyhow!("unable to construct domain"))?; // keep track of the owner of this instance in the validator set - let me = validator_set - .validators + let me = validators .iter() .position(|probe| me.address == probe.address) .context( "could not find this validator in the provided validator set", )?; - let validators = make_validators(validator_set); + let validators = make_validators(validators); // TODO: Remove my_partition let my_partition = @@ -277,22 +276,19 @@ pub(crate) mod test_common { pub fn gen_n_validators( keypairs: &[ferveo_common::Keypair], n: u32, - ) -> ValidatorSet { - ValidatorSet::new( - (0..n) - .map(|i| TendermintValidator { - power: 1, // TODO: Should set to 1 in order to force partitioning to give one share to each validator. Replace with 1 by reworking how partitioning works. - address: format!("validator_{}", i), - public_key: keypairs[i as usize].public(), - }) - .collect(), - ) + ) -> Vec> { + (0..n) + .map(|i| TendermintValidator { + address: format!("validator_{}", i), + public_key: keypairs[i as usize].public(), + }) + .collect() } /// Generate a few validators pub fn gen_validators( keypairs: &[ferveo_common::Keypair], - ) -> ValidatorSet { + ) -> Vec> { gen_n_validators(keypairs, 4) } @@ -305,7 +301,7 @@ pub(crate) mod test_common { let keypairs = gen_n_keypairs(n_validators); for _keypair in &keypairs {} let validators = gen_n_validators(&keypairs, n_validators); - let me = validators.validators[my_index].clone(); + let me = validators[my_index].clone(); PubliclyVerifiableDkg::new( validators, Params { @@ -400,7 +396,6 @@ mod test_dkg_init { retry_after: 2, }, TendermintValidator:: { - power: 9001, address: "non-existant-validator".into(), public_key: keypair.public(), }, @@ -493,8 +488,7 @@ mod test_dealing { )); let pvss = dkg.share(rng).expect("Test failed"); let sender = TendermintValidator:: { - power: 9001, - address: "Goku".into(), + address: "fake-address".into(), public_key: ferveo_common::Keypair::::new(rng) .public(), }; diff --git a/ferveo/src/lib.rs b/ferveo/src/lib.rs index df3a81e8..bc1e7811 100644 --- a/ferveo/src/lib.rs +++ b/ferveo/src/lib.rs @@ -42,7 +42,7 @@ mod test_dkg_full { use ark_bls12_381::{Bls12_381 as EllipticCurve, Bls12_381, G2Projective}; use ark_ec::bls12::G2Affine; use ark_ff::{Fp12, UniformRand}; - use ferveo_common::{Keypair, TendermintValidator, ValidatorSet}; + use ferveo_common::{Keypair, TendermintValidator}; use group_threshold_cryptography as tpke; use group_threshold_cryptography::Ciphertext; use itertools::{zip_eq, Itertools}; diff --git a/ferveo/src/vss/pvss.rs b/ferveo/src/vss/pvss.rs index 5d6b02f5..5ce417ec 100644 --- a/ferveo/src/vss/pvss.rs +++ b/ferveo/src/vss/pvss.rs @@ -279,7 +279,7 @@ mod test_pvss { use crate::dkg::pv::test_common::*; use ark_bls12_381::Bls12_381 as EllipticCurve; use ark_ff::UniformRand; - use ferveo_common::{TendermintValidator, ValidatorSet}; + use ferveo_common::TendermintValidator; type Fr = ::Fr; type G1 = ::G1Affine; From 8bd2888a95ec91686ce8e62da1533459dc159469 Mon Sep 17 00:00:00 2001 From: Piotr Roslaniec Date: Thu, 5 Jan 2023 10:33:01 +0100 Subject: [PATCH 18/28] rename TendermintValidator to ExternalValidator --- ferveo-common/src/lib.rs | 14 +++++++------- ferveo/benches/benchmarks/pvdkg.rs | 6 +++--- ferveo/examples/pvdkg.rs | 6 +++--- ferveo/src/dkg/common.rs | 4 ++-- ferveo/src/dkg/pv.rs | 20 ++++++++++---------- ferveo/src/lib.rs | 2 +- ferveo/src/vss/pvss.rs | 2 +- 7 files changed, 27 insertions(+), 27 deletions(-) diff --git a/ferveo-common/src/lib.rs b/ferveo-common/src/lib.rs index 0898d700..93df3f99 100644 --- a/ferveo-common/src/lib.rs +++ b/ferveo-common/src/lib.rs @@ -9,29 +9,29 @@ pub use keypair::*; use std::cmp::Ordering; #[derive(Clone, Debug, CanonicalSerialize, CanonicalDeserialize)] -/// Represents a tendermint validator -pub struct TendermintValidator { +/// Represents an external validator +pub struct ExternalValidator { /// The established address of the validator pub address: String, /// The Public key pub public_key: PublicKey, } -impl PartialEq for TendermintValidator { +impl PartialEq for ExternalValidator { fn eq(&self, other: &Self) -> bool { (&self.address) == (&other.address) } } -impl Eq for TendermintValidator {} +impl Eq for ExternalValidator {} -impl PartialOrd for TendermintValidator { +impl PartialOrd for ExternalValidator { fn partial_cmp(&self, other: &Self) -> Option { Some(self.address.cmp(&other.address)) } } -impl Ord for TendermintValidator { +impl Ord for ExternalValidator { fn cmp(&self, other: &Self) -> Ordering { self.address.cmp(&other.address) } @@ -39,7 +39,7 @@ impl Ord for TendermintValidator { #[derive(Clone, Debug, CanonicalSerialize, CanonicalDeserialize)] pub struct Validator { - pub validator: TendermintValidator, + pub validator: ExternalValidator, pub share_index: usize, } diff --git a/ferveo/benches/benchmarks/pvdkg.rs b/ferveo/benches/benchmarks/pvdkg.rs index 292b691e..a6eca403 100644 --- a/ferveo/benches/benchmarks/pvdkg.rs +++ b/ferveo/benches/benchmarks/pvdkg.rs @@ -1,6 +1,6 @@ pub use ark_bls12_381::Bls12_381 as EllipticCurve; use criterion::{criterion_group, criterion_main, Criterion}; -use ferveo_common::TendermintValidator; +use ferveo_common::ExternalValidator; use pprof::criterion::{Output, PProfProfiler}; use ferveo::*; @@ -47,9 +47,9 @@ pub fn gen_keypairs(num: u64) -> Vec> { /// Generate a few validators pub fn gen_validators( keypairs: &[ferveo_common::Keypair], -) -> Vec> { +) -> Vec> { (0..keypairs.len()) - .map(|i| TendermintValidator { + .map(|i| ExternalValidator { address: format!("validator_{}", i), public_key: keypairs[i].public(), }) diff --git a/ferveo/examples/pvdkg.rs b/ferveo/examples/pvdkg.rs index d06c401c..10f0032a 100644 --- a/ferveo/examples/pvdkg.rs +++ b/ferveo/examples/pvdkg.rs @@ -1,6 +1,6 @@ pub use ark_bls12_381::Bls12_381 as EllipticCurve; use ferveo::*; -use ferveo_common::TendermintValidator; +use ferveo_common::ExternalValidator; use measure_time::print_time; pub fn main() { @@ -21,9 +21,9 @@ pub fn gen_keypairs(num: u64) -> Vec> { /// Generate a few validators pub fn gen_validators( keypairs: &[ferveo_common::Keypair], -) -> Vec> { +) -> Vec> { (0..keypairs.len()) - .map(|i| TendermintValidator { + .map(|i| ExternalValidator { address: format!("validator_{}", i), public_key: keypairs[i].public(), }) diff --git a/ferveo/src/dkg/common.rs b/ferveo/src/dkg/common.rs index 67987860..c519db1b 100644 --- a/ferveo/src/dkg/common.rs +++ b/ferveo/src/dkg/common.rs @@ -1,9 +1,9 @@ use crate::*; -use ferveo_common::TendermintValidator; +use ferveo_common::ExternalValidator; use itertools::izip; pub fn make_validators( - validators: Vec>, + validators: Vec>, ) -> Vec> { validators .iter() diff --git a/ferveo/src/dkg/pv.rs b/ferveo/src/dkg/pv.rs index c8936319..4fb34c60 100644 --- a/ferveo/src/dkg/pv.rs +++ b/ferveo/src/dkg/pv.rs @@ -5,7 +5,7 @@ use ark_ec::PairingEngine; use ark_ff::Field; use ark_serialize::*; use ark_std::{end_timer, start_timer}; -use ferveo_common::{PublicKey, TendermintValidator}; +use ferveo_common::{PublicKey, ExternalValidator}; use std::collections::BTreeMap; /// The DKG context that holds all of the local state for participating in the DKG @@ -31,9 +31,9 @@ impl PubliclyVerifiableDkg { /// `me` the validator creating this instance /// `session_keypair` the keypair for `me` pub fn new( - validators: Vec>, + validators: Vec>, params: Params, - me: TendermintValidator, + me: ExternalValidator, session_keypair: ferveo_common::Keypair, ) -> Result { use ark_std::UniformRand; @@ -149,7 +149,7 @@ impl PubliclyVerifiableDkg { /// `payload` is the content of the message pub fn verify_message( &self, - sender: &TendermintValidator, + sender: &ExternalValidator, payload: &Message, ) -> Result<()> { match payload { @@ -194,7 +194,7 @@ impl PubliclyVerifiableDkg { /// to the state machine pub fn apply_message( &mut self, - sender: TendermintValidator, + sender: ExternalValidator, payload: Message, ) -> Result<()> { match payload { @@ -276,9 +276,9 @@ pub(crate) mod test_common { pub fn gen_n_validators( keypairs: &[ferveo_common::Keypair], n: u32, - ) -> Vec> { + ) -> Vec> { (0..n) - .map(|i| TendermintValidator { + .map(|i| ExternalValidator { address: format!("validator_{}", i), public_key: keypairs[i as usize].public(), }) @@ -288,7 +288,7 @@ pub(crate) mod test_common { /// Generate a few validators pub fn gen_validators( keypairs: &[ferveo_common::Keypair], - ) -> Vec> { + ) -> Vec> { gen_n_validators(keypairs, 4) } @@ -395,7 +395,7 @@ mod test_dkg_init { shares_num: 8, retry_after: 2, }, - TendermintValidator:: { + ExternalValidator:: { address: "non-existant-validator".into(), public_key: keypair.public(), }, @@ -487,7 +487,7 @@ mod test_dealing { } )); let pvss = dkg.share(rng).expect("Test failed"); - let sender = TendermintValidator:: { + let sender = ExternalValidator:: { address: "fake-address".into(), public_key: ferveo_common::Keypair::::new(rng) .public(), diff --git a/ferveo/src/lib.rs b/ferveo/src/lib.rs index bc1e7811..c88b3253 100644 --- a/ferveo/src/lib.rs +++ b/ferveo/src/lib.rs @@ -42,7 +42,7 @@ mod test_dkg_full { use ark_bls12_381::{Bls12_381 as EllipticCurve, Bls12_381, G2Projective}; use ark_ec::bls12::G2Affine; use ark_ff::{Fp12, UniformRand}; - use ferveo_common::{Keypair, TendermintValidator}; + use ferveo_common::{Keypair, ExternalValidator}; use group_threshold_cryptography as tpke; use group_threshold_cryptography::Ciphertext; use itertools::{zip_eq, Itertools}; diff --git a/ferveo/src/vss/pvss.rs b/ferveo/src/vss/pvss.rs index 5ce417ec..7efba119 100644 --- a/ferveo/src/vss/pvss.rs +++ b/ferveo/src/vss/pvss.rs @@ -279,7 +279,7 @@ mod test_pvss { use crate::dkg::pv::test_common::*; use ark_bls12_381::Bls12_381 as EllipticCurve; use ark_ff::UniformRand; - use ferveo_common::TendermintValidator; + use ferveo_common::ExternalValidator; type Fr = ::Fr; type G1 = ::G1Affine; From 002d407d1f592af1de836af1f5030b9baa423b90 Mon Sep 17 00:00:00 2001 From: Piotr Roslaniec Date: Thu, 5 Jan 2023 10:49:54 +0100 Subject: [PATCH 19/28] remove unused code --- ferveo-common/src/lib.rs | 44 +----------------------------- ferveo/benches/benchmarks/pvdkg.rs | 2 +- ferveo/examples/pvdkg.rs | 2 +- ferveo/src/dkg/pv.rs | 10 +++---- 4 files changed, 8 insertions(+), 50 deletions(-) diff --git a/ferveo-common/src/lib.rs b/ferveo-common/src/lib.rs index 93df3f99..98d77bb1 100644 --- a/ferveo-common/src/lib.rs +++ b/ferveo-common/src/lib.rs @@ -6,9 +6,8 @@ use ark_serialize::{ pub mod keypair; pub use keypair::*; -use std::cmp::Ordering; -#[derive(Clone, Debug, CanonicalSerialize, CanonicalDeserialize)] +#[derive(Clone, Debug, CanonicalSerialize, CanonicalDeserialize, PartialEq)] /// Represents an external validator pub struct ExternalValidator { /// The established address of the validator @@ -17,53 +16,12 @@ pub struct ExternalValidator { pub public_key: PublicKey, } -impl PartialEq for ExternalValidator { - fn eq(&self, other: &Self) -> bool { - (&self.address) == (&other.address) - } -} - -impl Eq for ExternalValidator {} - -impl PartialOrd for ExternalValidator { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.address.cmp(&other.address)) - } -} - -impl Ord for ExternalValidator { - fn cmp(&self, other: &Self) -> Ordering { - self.address.cmp(&other.address) - } -} - #[derive(Clone, Debug, CanonicalSerialize, CanonicalDeserialize)] pub struct Validator { pub validator: ExternalValidator, pub share_index: usize, } -impl PartialEq for Validator { - fn eq(&self, other: &Self) -> bool { - (&self.validator, self.share_index) - == (&other.validator, other.share_index) - } -} - -impl Eq for Validator {} - -impl PartialOrd for Validator { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.validator.cmp(&other.validator)) - } -} - -impl Ord for Validator { - fn cmp(&self, other: &Self) -> Ordering { - self.validator.cmp(&other.validator) - } -} - impl Rng for ark_std::rand::prelude::StdRng {} pub trait Rng: ark_std::rand::CryptoRng + ark_std::rand::RngCore {} diff --git a/ferveo/benches/benchmarks/pvdkg.rs b/ferveo/benches/benchmarks/pvdkg.rs index a6eca403..9211bab7 100644 --- a/ferveo/benches/benchmarks/pvdkg.rs +++ b/ferveo/benches/benchmarks/pvdkg.rs @@ -72,7 +72,7 @@ pub fn setup_dkg( shares_num: 4, retry_after: 2, }, - me, + &me, keypairs[validator], ) .expect("Setup failed") diff --git a/ferveo/examples/pvdkg.rs b/ferveo/examples/pvdkg.rs index 10f0032a..ac7aea39 100644 --- a/ferveo/examples/pvdkg.rs +++ b/ferveo/examples/pvdkg.rs @@ -47,7 +47,7 @@ pub fn setup_dkg( shares_num: shares, retry_after: 1, }, - me, + &me, keypairs[validator], ) .expect("Setup failed") diff --git a/ferveo/src/dkg/pv.rs b/ferveo/src/dkg/pv.rs index 4fb34c60..c933d831 100644 --- a/ferveo/src/dkg/pv.rs +++ b/ferveo/src/dkg/pv.rs @@ -33,7 +33,7 @@ impl PubliclyVerifiableDkg { pub fn new( validators: Vec>, params: Params, - me: ExternalValidator, + me: &ExternalValidator, session_keypair: ferveo_common::Keypair, ) -> Result { use ark_std::UniformRand; @@ -45,7 +45,7 @@ impl PubliclyVerifiableDkg { // keep track of the owner of this instance in the validator set let me = validators .iter() - .position(|probe| me.address == probe.address) + .position(|probe| me == probe) .context( "could not find this validator in the provided validator set", )?; @@ -158,7 +158,7 @@ impl PubliclyVerifiableDkg { // an address keyed hashmap after partitioning the shares shares // in the [`new`] method let sender = self.validators - .iter().position(|probe| sender.address == probe.validator.address) + .iter().position(|probe| sender == &probe.validator) .context("dkg received unknown dealer")?; if self.vss.contains_key(&(sender as u32)) { Err(anyhow!("Repeat dealer {}", sender)) @@ -310,7 +310,7 @@ pub(crate) mod test_common { shares_num, retry_after: 2, }, - me, + &me, keypairs[my_index], ) .expect("Setup failed") @@ -395,7 +395,7 @@ mod test_dkg_init { shares_num: 8, retry_after: 2, }, - ExternalValidator:: { + &ExternalValidator:: { address: "non-existant-validator".into(), public_key: keypair.public(), }, From 0125381809b9ae50e1a40cc167bfe7d2fa710e69 Mon Sep 17 00:00:00 2001 From: Piotr Roslaniec Date: Thu, 5 Jan 2023 11:03:20 +0100 Subject: [PATCH 20/28] fix rustfmt --- ferveo/src/dkg/pv.rs | 43 ++++++++++++++++--------------------------- ferveo/src/lib.rs | 2 +- 2 files changed, 17 insertions(+), 28 deletions(-) diff --git a/ferveo/src/dkg/pv.rs b/ferveo/src/dkg/pv.rs index c933d831..1d0df3dd 100644 --- a/ferveo/src/dkg/pv.rs +++ b/ferveo/src/dkg/pv.rs @@ -5,7 +5,7 @@ use ark_ec::PairingEngine; use ark_ff::Field; use ark_serialize::*; use ark_std::{end_timer, start_timer}; -use ferveo_common::{PublicKey, ExternalValidator}; +use ferveo_common::{ExternalValidator, PublicKey}; use std::collections::BTreeMap; /// The DKG context that holds all of the local state for participating in the DKG @@ -43,12 +43,9 @@ impl PubliclyVerifiableDkg { .ok_or_else(|| anyhow!("unable to construct domain"))?; // keep track of the owner of this instance in the validator set - let me = validators - .iter() - .position(|probe| me == probe) - .context( - "could not find this validator in the provided validator set", - )?; + let me = validators.iter().position(|probe| me == probe).context( + "could not find this validator in the provided validator set", + )?; let validators = make_validators(validators); @@ -342,17 +339,15 @@ pub(crate) mod test_common { let rng = &mut ark_std::test_rng(); // Gather everyone's transcripts - let transcripts = (0..n_validators) - .map(|i| { - let mut dkg = setup_dkg_for_n_validators( - n_validators, - security_threshold, - shares_num, - i as usize, - ); - dkg.share(rng).expect("Test failed") - }) - .collect::>(); + let transcripts = (0..n_validators).map(|i| { + let mut dkg = setup_dkg_for_n_validators( + n_validators, + security_threshold, + shares_num, + i as usize, + ); + dkg.share(rng).expect("Test failed") + }); // Our test dkg let mut dkg = setup_dkg_for_n_validators( @@ -361,16 +356,10 @@ pub(crate) mod test_common { shares_num, 0, ); - transcripts - .into_iter() - .enumerate() - .for_each(|(sender, pvss)| { - dkg.apply_message( - dkg.validators[sender].validator.clone(), - pvss, - ) + transcripts.enumerate().for_each(|(sender, pvss)| { + dkg.apply_message(dkg.validators[sender].validator.clone(), pvss) .expect("Setup failed"); - }); + }); dkg } } diff --git a/ferveo/src/lib.rs b/ferveo/src/lib.rs index c88b3253..2a9cfc55 100644 --- a/ferveo/src/lib.rs +++ b/ferveo/src/lib.rs @@ -42,7 +42,7 @@ mod test_dkg_full { use ark_bls12_381::{Bls12_381 as EllipticCurve, Bls12_381, G2Projective}; use ark_ec::bls12::G2Affine; use ark_ff::{Fp12, UniformRand}; - use ferveo_common::{Keypair, ExternalValidator}; + use ferveo_common::{ExternalValidator, Keypair}; use group_threshold_cryptography as tpke; use group_threshold_cryptography::Ciphertext; use itertools::{zip_eq, Itertools}; From dc53f7b568abe296f2f0812b8233e5e388965277 Mon Sep 17 00:00:00 2001 From: Piotr Roslaniec Date: Wed, 18 Jan 2023 17:40:24 +0100 Subject: [PATCH 21/28] fix after rebase --- ferveo/src/lib.rs | 6 ++++-- ferveo/src/vss/pvss.rs | 16 +++++++++++----- tpke/benches/benchmarks.rs | 1 + tpke/benches/tpke.rs | 7 +++++-- tpke/src/combine.rs | 5 ++--- tpke/src/decryption.rs | 2 -- tpke/src/lib.rs | 14 +++++++++----- 7 files changed, 32 insertions(+), 19 deletions(-) diff --git a/ferveo/src/lib.rs b/ferveo/src/lib.rs index 2a9cfc55..cb125135 100644 --- a/ferveo/src/lib.rs +++ b/ferveo/src/lib.rs @@ -84,7 +84,8 @@ mod test_dkg_full { &ciphertext, aad, &shared_secret, - ); + ) + .unwrap(); assert_eq!(plaintext, msg); } @@ -130,7 +131,8 @@ mod test_dkg_full { &ciphertext, aad, &shared_secret, - ); + ) + .unwrap(); assert_eq!(plaintext, msg); } } diff --git a/ferveo/src/vss/pvss.rs b/ferveo/src/vss/pvss.rs index 7efba119..31fdbcd0 100644 --- a/ferveo/src/vss/pvss.rs +++ b/ferveo/src/vss/pvss.rs @@ -8,7 +8,7 @@ use ark_ec::PairingEngine; use ark_ff::UniformRand; use ark_serialize::*; use ferveo_common::{Keypair, PublicKey}; -use group_threshold_cryptography::Ciphertext; +use group_threshold_cryptography::{Ciphertext, DecryptionShareSimple}; use itertools::{zip_eq, Itertools}; use subproductdomain::fast_multiexp; @@ -257,8 +257,8 @@ pub fn make_decryption_shares( ciphertext: &Ciphertext, validator_keypairs: Vec>, aggregate: Vec, -) -> Vec { - let decryption_shares = aggregate +) -> Vec> { + aggregate .iter() .zip_eq(validator_keypairs.iter()) .map(|(encrypted_share, keypair)| { @@ -268,8 +268,14 @@ pub fn make_decryption_shares( let u = ciphertext.commitment; E::pairing(u, z_i) }) - .collect::>(); - decryption_shares + .enumerate() + .map( + |(decrypter_index, decryption_share)| DecryptionShareSimple { + decrypter_index, + decryption_share, + }, + ) + .collect::>() } #[cfg(test)] diff --git a/tpke/benches/benchmarks.rs b/tpke/benches/benchmarks.rs index e69de29b..8b137891 100644 --- a/tpke/benches/benchmarks.rs +++ b/tpke/benches/benchmarks.rs @@ -0,0 +1 @@ + diff --git a/tpke/benches/tpke.rs b/tpke/benches/tpke.rs index 965b004b..1ab7d2a8 100644 --- a/tpke/benches/tpke.rs +++ b/tpke/benches/tpke.rs @@ -110,7 +110,8 @@ impl SetupSimple { .collect(); let pub_contexts = contexts[0].clone().public_decryption_contexts; - let lagrange = prepare_combine_simple::(&pub_contexts); + let domain: Vec = pub_contexts.iter().map(|c| c.domain).collect(); + let lagrange = prepare_combine_simple::(&domain); let shared_secret = share_combine_simple::(&decryption_shares, &lagrange); @@ -203,7 +204,9 @@ pub fn bench_share_prepare(c: &mut Criterion) { }; let simple = { let setup = SetupSimple::new(shares_num, msg_size, rng); - move || black_box(prepare_combine_simple(&setup.pub_contexts)) + let domain: Vec = + setup.pub_contexts.iter().map(|c| c.domain).collect(); + move || black_box(prepare_combine_simple::(&domain)) }; group.bench_function( diff --git a/tpke/src/combine.rs b/tpke/src/combine.rs index bc0b655c..d98bb7b6 100644 --- a/tpke/src/combine.rs +++ b/tpke/src/combine.rs @@ -44,12 +44,11 @@ pub fn prepare_combine_fast( } pub fn prepare_combine_simple( - pub_contexts: &[PublicDecryptionContextSimple], + domain: &[E::Fr], ) -> Vec { - let shares_x: Vec<_> = pub_contexts.iter().map(|c| c.domain).collect(); // See https://en.wikipedia.org/wiki/Lagrange_polynomial#Optimal_algorithm // In this formula x_i = 0, hence numerator is x_m - lagrange_basis_at::(&shares_x, &E::Fr::zero()) + lagrange_basis_at::(domain, &E::Fr::zero()) } /// Calculate lagrange coefficients using optimized formula diff --git a/tpke/src/decryption.rs b/tpke/src/decryption.rs index 910ff2d8..b00f7379 100644 --- a/tpke/src/decryption.rs +++ b/tpke/src/decryption.rs @@ -3,8 +3,6 @@ use crate::*; -use ark_ec::ProjectiveCurve; - #[derive(Debug, Clone)] pub struct DecryptionShareFast { pub decrypter_index: usize, diff --git a/tpke/src/lib.rs b/tpke/src/lib.rs index 9b487874..394c3056 100644 --- a/tpke/src/lib.rs +++ b/tpke/src/lib.rs @@ -482,11 +482,14 @@ mod tests { .iter() .map(|c| c.create_share(&ciphertext)) .collect(); - let lagrange = prepare_combine_simple::( - &contexts[0].public_decryption_contexts, - ); + let domain = contexts[0] + .public_decryption_contexts + .iter() + .map(|c| c.domain) + .collect::>(); + let lagrange = prepare_combine_simple::(&domain); - let shared_secret = + let shared_secret = share_combine_simple::(&decryption_shares, &lagrange); test_ciphertext_validation_fails(msg, aad, &ciphertext, &shared_secret); @@ -549,7 +552,8 @@ mod tests { pub_contexts: &[PublicDecryptionContextSimple], decryption_shares: &[DecryptionShareSimple], ) -> E::Fqk { - let lagrange = prepare_combine_simple::(pub_contexts); + let domain = pub_contexts.iter().map(|c| c.domain).collect::>(); + let lagrange = prepare_combine_simple::(&domain); share_combine_simple::(decryption_shares, &lagrange) } From 6fb4c890cef5c1ca077d301bf4e3e12c78584d39 Mon Sep 17 00:00:00 2001 From: Piotr Roslaniec Date: Wed, 18 Jan 2023 18:09:47 +0100 Subject: [PATCH 22/28] documents and refactor code --- ferveo-common/src/lib.rs | 4 +-- ferveo/benches/benchmarks/pvdkg.rs | 2 +- ferveo/examples/pvdkg.rs | 6 ++-- ferveo/src/dkg.rs | 1 + ferveo/src/dkg/pv.rs | 56 ++++++++++++++---------------- ferveo/src/main.rs | 3 -- ferveo/src/vss/pvss.rs | 22 ++++++++---- 7 files changed, 49 insertions(+), 45 deletions(-) delete mode 100644 ferveo/src/main.rs diff --git a/ferveo-common/src/lib.rs b/ferveo-common/src/lib.rs index 98d77bb1..7868a5d6 100644 --- a/ferveo-common/src/lib.rs +++ b/ferveo-common/src/lib.rs @@ -38,7 +38,7 @@ pub mod ark_serde { { use serde::ser::Error; let mut bytes = vec![]; - data.serialize(&mut bytes).map_err(S::Error::custom)?; + data.serialize(&mut bytes).map_err(Error::custom)?; serde_bytes::Bytes::new(&bytes).serialize(serializer) } /// Deserialize an ark type with serde @@ -49,7 +49,7 @@ pub mod ark_serde { { use serde::de::Error; let bytes = ::deserialize(deserializer)?; - T::deserialize(bytes.as_slice()).map_err(D::Error::custom) + T::deserialize(bytes.as_slice()).map_err(Error::custom) } } diff --git a/ferveo/benches/benchmarks/pvdkg.rs b/ferveo/benches/benchmarks/pvdkg.rs index 9211bab7..c95504bf 100644 --- a/ferveo/benches/benchmarks/pvdkg.rs +++ b/ferveo/benches/benchmarks/pvdkg.rs @@ -69,7 +69,7 @@ pub fn setup_dkg( Params { tau: 0, security_threshold: 300 / 3, - shares_num: 4, + shares_num: 300, retry_after: 2, }, &me, diff --git a/ferveo/examples/pvdkg.rs b/ferveo/examples/pvdkg.rs index ac7aea39..82968d1c 100644 --- a/ferveo/examples/pvdkg.rs +++ b/ferveo/examples/pvdkg.rs @@ -34,7 +34,7 @@ pub fn gen_validators( pub fn setup_dkg( validator: usize, num: u64, - shares: u32, + shares_num: u32, ) -> PubliclyVerifiableDkg { let keypairs = gen_keypairs(num); let validators = gen_validators(&keypairs); @@ -43,8 +43,8 @@ pub fn setup_dkg( validators, Params { tau: 0, - security_threshold: shares / 3, - shares_num: shares, + security_threshold: shares_num / 3, + shares_num, retry_after: 1, }, &me, diff --git a/ferveo/src/dkg.rs b/ferveo/src/dkg.rs index b8e6bb1d..93aebf7f 100644 --- a/ferveo/src/dkg.rs +++ b/ferveo/src/dkg.rs @@ -17,6 +17,7 @@ use ed25519_dalek as ed25519; pub mod common; pub mod pv; + pub use common::*; pub use pv::*; diff --git a/ferveo/src/dkg/pv.rs b/ferveo/src/dkg/pv.rs index 1d0df3dd..6973be39 100644 --- a/ferveo/src/dkg/pv.rs +++ b/ferveo/src/dkg/pv.rs @@ -25,8 +25,7 @@ pub struct PubliclyVerifiableDkg { impl PubliclyVerifiableDkg { /// Create a new DKG context to participate in the DKG /// Every identity in the DKG is linked to an ed25519 public key; - /// `validator_set`: The set of validators and their respective voting powers - /// *IMPORTANT: this set should be reverse sorted* + /// `validatorst`: List of validators /// `params` contains the parameters of the DKG such as number of shares /// `me` the validator creating this instance /// `session_keypair` the keypair for `me` @@ -290,14 +289,13 @@ pub(crate) mod test_common { } pub fn setup_dkg_for_n_validators( - n_validators: u32, security_threshold: u32, shares_num: u32, my_index: usize, ) -> PubliclyVerifiableDkg { - let keypairs = gen_n_keypairs(n_validators); + let keypairs = gen_n_keypairs(shares_num); for _keypair in &keypairs {} - let validators = gen_n_validators(&keypairs, n_validators); + let validators = gen_n_validators(&keypairs, shares_num); let me = validators[my_index].clone(); PubliclyVerifiableDkg::new( validators, @@ -317,7 +315,7 @@ pub(crate) mod test_common { /// /// The [`test_dkg_init`] module checks correctness of this setup pub fn setup_dkg(validator: usize) -> PubliclyVerifiableDkg { - setup_dkg_for_n_validators(4, 2, 6, validator) + setup_dkg_for_n_validators(2, 4, validator) } /// Set up a dkg with enough pvss transcripts to meet the threshold @@ -331,7 +329,6 @@ pub(crate) mod test_common { security_threshold: u32, shares_num: u32, ) -> PubliclyVerifiableDkg { - let n_validators = shares_num; // Make sure that the number of shares is a power of 2 for the FFT to work (Radix-2 FFT domain is being used) let is_power_of_2 = |n: u32| n != 0 && (n & (n - 1)) == 0; assert!(is_power_of_2(shares_num)); @@ -339,9 +336,8 @@ pub(crate) mod test_common { let rng = &mut ark_std::test_rng(); // Gather everyone's transcripts - let transcripts = (0..n_validators).map(|i| { + let transcripts = (0..shares_num).map(|i| { let mut dkg = setup_dkg_for_n_validators( - n_validators, security_threshold, shares_num, i as usize, @@ -350,12 +346,8 @@ pub(crate) mod test_common { }); // Our test dkg - let mut dkg = setup_dkg_for_n_validators( - n_validators, - security_threshold, - shares_num, - 0, - ); + let mut dkg = + setup_dkg_for_n_validators(security_threshold, shares_num, 0); transcripts.enumerate().for_each(|(sender, pvss)| { dkg.apply_message(dkg.validators[sender].validator.clone(), pvss) .expect("Setup failed"); @@ -428,7 +420,7 @@ mod test_dealing { } // our test dkg let mut dkg = setup_dkg(0); - // iterate over transcripts + let mut expected = 0u32; for (sender, pvss) in transcripts.iter().enumerate() { // check the verification passes @@ -442,9 +434,9 @@ mod test_dealing { pvss.clone(), ) .is_ok()); - expected += 1; // dkg.validators[3 - sender].validator.power as u32; - // As long as we still have transcripts to deal, we should be in the Dealt state - if sender < transcripts.len() - 1 { + + expected += 1; + if sender < (dkg.params.security_threshold - 1) as usize { // check that shares accumulates correctly match dkg.state { DkgState::Sharing { @@ -501,6 +493,7 @@ mod test_dealing { fn test_pvss_sent_twice_rejected() { let rng = &mut ark_std::test_rng(); let mut dkg = setup_dkg(0); + // We start with an empty state assert!(matches!( dkg.state, DkgState::Sharing { @@ -508,13 +501,13 @@ mod test_dealing { block: 0, } )); + let pvss = dkg.share(rng).expect("Test failed"); let sender = dkg.validators[3].validator.clone(); - // check that verification fails + + // First PVSS is accepted assert!(dkg.verify_message(&sender, &pvss).is_ok()); - // check that application fails assert!(dkg.apply_message(sender.clone(), pvss.clone()).is_ok()); - // check that state has appropriately changed assert!(matches!( dkg.state, DkgState::Sharing { @@ -522,7 +515,8 @@ mod test_dealing { block: 0, } )); - // check that sending another pvss from same sender fails + + // Second PVSS is rejected assert!(dkg.verify_message(&sender, &pvss).is_err()); } @@ -532,6 +526,7 @@ mod test_dealing { fn test_own_pvss() { let rng = &mut ark_std::test_rng(); let mut dkg = setup_dkg(0); + // We start with an empty state assert!(matches!( dkg.state, DkgState::Sharing { @@ -539,8 +534,10 @@ mod test_dealing { block: 0, } )); - // create share message and check state update + + // Sender creates a PVSS transcript let pvss = dkg.share(rng).expect("Test failed"); + // Note that state of DKG has not changed assert!(matches!( dkg.state, DkgState::Sharing { @@ -548,11 +545,12 @@ mod test_dealing { block: 0, } )); + let sender = dkg.validators[0].validator.clone(); - // check that verification fails + + // Sender verifies it's own PVSS transcript assert!(dkg.verify_message(&sender, &pvss).is_ok()); assert!(dkg.apply_message(sender, pvss).is_ok()); - // check that state did not change assert!(matches!( dkg.state, DkgState::Sharing { @@ -582,7 +580,7 @@ mod test_dealing { assert!(dkg.share(rng).is_err()); // check that even if security threshold is met, we can still share - dkg.state = DkgState::Dealt; + dkg.state = Dealt; assert!(dkg.share(rng).is_ok()); } @@ -609,7 +607,7 @@ mod test_dealing { assert!(dkg.apply_message(sender.clone(), pvss.clone()).is_err()); // check that we can still accept pvss transcripts after meeting threshold - dkg.state = DkgState::Dealt; + dkg.state = Dealt; assert!(dkg.verify_message(&sender, &pvss).is_ok()); assert!(dkg.apply_message(sender, pvss).is_ok()); assert!(matches!(dkg.state, DkgState::Dealt)) @@ -634,7 +632,7 @@ mod test_dealing { fn test_pvss_wait_if_not_in_sharing_state() { let mut dkg = setup_dkg(0); for state in vec![ - DkgState::Dealt, + Dealt, DkgState::Success { final_key: G1::zero(), }, diff --git a/ferveo/src/main.rs b/ferveo/src/main.rs deleted file mode 100644 index e7a11a96..00000000 --- a/ferveo/src/main.rs +++ /dev/null @@ -1,3 +0,0 @@ -fn main() { - println!("Hello, world!"); -} diff --git a/ferveo/src/vss/pvss.rs b/ferveo/src/vss/pvss.rs index 31fdbcd0..719a0ab0 100644 --- a/ferveo/src/vss/pvss.rs +++ b/ferveo/src/vss/pvss.rs @@ -88,7 +88,7 @@ impl PubliclyVerifiableSS { // ek_{i}^{eval_i}, i = validator index fast_multiexp( // &evals.evals[i..i] = &evals.evals[i] - &[evals.evals[val.share_index]], + &[evals.evals[val.share_index]], // one share per validator val.validator.public_key.encryption_key.into_projective(), )[0] }) @@ -102,7 +102,6 @@ impl PubliclyVerifiableSS { // TODO: Cross check proof of knowledge check with the whitepaper; this check proves that there is a relationship between the secret and the pvss transcript // Sigma is a proof of knowledge of the secret, sigma = h^s let sigma = E::G2Affine::prime_subgroup_generator().mul(*s).into(); //todo hash to curve - // So at this point, we have a commitment to the polynomial, a number of shares, and a proof of knowledge let vss = Self { coeffs, shares, @@ -303,9 +302,12 @@ mod test_pvss { // check that the chosen secret coefficient is correct assert_eq!(pvss.coeffs[0], G1::prime_subgroup_generator().mul(s)); //check that a polynomial of the correct degree was created - assert_eq!(pvss.coeffs.len(), 5); + assert_eq!( + pvss.coeffs.len(), + dkg.params.security_threshold as usize + 1 + ); // check that the correct number of shares were created - assert_eq!(pvss.shares.len(), 4); + assert_eq!(pvss.shares.len(), dkg.validators.len()); // check that the prove of knowledge is correct assert_eq!(pvss.sigma, G2::prime_subgroup_generator().mul(s)); // check that the optimistic verify returns true @@ -340,15 +342,21 @@ mod test_pvss { let dkg = setup_dealt_dkg(); let aggregate = aggregate(&dkg); //check that a polynomial of the correct degree was created - assert_eq!(aggregate.coeffs.len(), 3); + assert_eq!( + aggregate.coeffs.len(), + dkg.params.security_threshold as usize + 1 + ); // check that the correct number of shares were created - assert_eq!(aggregate.shares.len(), 4); + assert_eq!(aggregate.shares.len(), dkg.validators.len()); // check that the optimistic verify returns true assert!(aggregate.verify_optimistic()); // check that the full verify returns true assert!(aggregate.verify_full(&dkg)); // check that the verification of aggregation passes - assert_eq!(aggregate.verify_aggregation(&dkg).expect("Test failed"), 4); + assert_eq!( + aggregate.verify_aggregation(&dkg).expect("Test failed"), + dkg.validators.len() as u32 + ); } /// Check that if the aggregated pvss transcript has an From e9d706481adb3010924c2fc5014d4fff96e742fd Mon Sep 17 00:00:00 2001 From: Piotr Roslaniec Date: Wed, 18 Jan 2023 18:40:07 +0100 Subject: [PATCH 23/28] remove rebasing artifact --- tpke/benches/benchmarks.rs | 1 - 1 file changed, 1 deletion(-) delete mode 100644 tpke/benches/benchmarks.rs diff --git a/tpke/benches/benchmarks.rs b/tpke/benches/benchmarks.rs deleted file mode 100644 index 8b137891..00000000 --- a/tpke/benches/benchmarks.rs +++ /dev/null @@ -1 +0,0 @@ - From dce013c0825ad5cabf7fe74edfc9d96ce80a44da Mon Sep 17 00:00:00 2001 From: Piotr Roslaniec Date: Thu, 19 Jan 2023 09:45:50 +0100 Subject: [PATCH 24/28] refactor to a single share per validator --- tpke/src/api.rs | 2 + tpke/src/ciphertext.rs | 12 ----- tpke/src/combine.rs | 17 ++---- tpke/src/context.rs | 15 +++--- tpke/src/decryption.rs | 21 ++++++++ tpke/src/key_share.rs | 95 ++++++++------------------------- tpke/src/lib.rs | 117 ++++++++++++----------------------------- tpke/src/refresh.rs | 4 +- 8 files changed, 92 insertions(+), 191 deletions(-) diff --git a/tpke/src/api.rs b/tpke/src/api.rs index 492862c7..ee769e9a 100644 --- a/tpke/src/api.rs +++ b/tpke/src/api.rs @@ -1,5 +1,7 @@ //! Contains the public API of the library. +#![allow(dead_code)] + // TODO: Refactor this module to deduplicate shared code from tpke-wasm and tpke-wasm. use std::convert::TryInto; diff --git a/tpke/src/ciphertext.rs b/tpke/src/ciphertext.rs index 76b2ee8a..68389d53 100644 --- a/tpke/src/ciphertext.rs +++ b/tpke/src/ciphertext.rs @@ -56,7 +56,6 @@ impl Ciphertext { ); let auth_tag = E::G2Affine::read(&auth_tag_bytes[..]).unwrap(); - const CIPHERTEXT_LEN: usize = 33; let ciphertext = bytes[COMMITMENT_LEN + AUTH_TAG_LEN..].to_vec(); Self { @@ -118,17 +117,6 @@ pub fn check_ciphertext_validity( ]) == E::Fqk::one() } -fn decrypt( - ciphertext: &Ciphertext, - privkey: E::G2Affine, -) -> Vec { - let s = E::product_of_pairings(&[( - E::G1Prepared::from(ciphertext.commitment), - E::G2Prepared::from(privkey), - )]); - decrypt_with_shared_secret(ciphertext, &s) -} - pub fn checked_decrypt( ciphertext: &Ciphertext, aad: &[u8], diff --git a/tpke/src/combine.rs b/tpke/src/combine.rs index d98bb7b6..a1b54870 100644 --- a/tpke/src/combine.rs +++ b/tpke/src/combine.rs @@ -11,30 +11,19 @@ pub fn prepare_combine_fast( let mut domain = vec![]; // omega_i, vector of domain points let mut n_0 = E::Fr::one(); for d_i in shares.iter() { - // There's just one domain point per participant, TODO: Refactor underlying data structures - assert_eq!( - public_decryption_contexts[d_i.decrypter_index].domain.len(), - 1 - ); - domain.push(public_decryption_contexts[d_i.decrypter_index].domain[0]); + domain.push(public_decryption_contexts[d_i.decrypter_index].domain); n_0 *= public_decryption_contexts[d_i.decrypter_index].lagrange_n_0; // n_0_i = 1 * t^1 * t^2 ... } let s = SubproductDomain::::new(domain); let mut lagrange = s.inverse_lagrange_coefficients(); // 1/L_i - // TODO: If this is really 1/L_i can I just return here and use it directly? Or is 1/L_i somehow different from L_i(0)? // Given a vector of field elements {v_i}, compute the vector {coeff * v_i^(-1)} ark_ff::batch_inversion_and_mul(&mut lagrange, &n_0); // n_0 * L_i // L_i * [b]Z_i izip!(shares.iter(), lagrange.iter()) .map(|(d_i, lambda)| { let decrypter = &public_decryption_contexts[d_i.decrypter_index]; - // There's just one share per participant, TODO: Refactor underlying data structures - assert_eq!( - decrypter.blinded_key_shares.blinded_key_shares.len(), - 1 - ); let blinded_key_share = - decrypter.blinded_key_shares.blinded_key_shares[0]; + decrypter.blinded_key_share.blinded_key_share; E::G2Prepared::from( // [b]Z_i * L_i blinded_key_share.mul(*lambda).into_affine(), @@ -77,7 +66,7 @@ pub fn share_combine_fast( let mut pairing_product: Vec<(E::G1Prepared, E::G2Prepared)> = vec![]; for (d_i, prepared_key_share) in izip!(shares, prepared_key_shares.iter()) { - // e(D_i, [b*omega_i^-1] Z_{i,omega_i}), TODO: Is this formula correct? + // e(D_i, [b*omega_i^-1] Z_{i,omega_i}) pairing_product.push(( // D_i E::G1Prepared::from(d_i.decryption_share), diff --git a/tpke/src/context.rs b/tpke/src/context.rs index ddff1a25..c4652a10 100644 --- a/tpke/src/context.rs +++ b/tpke/src/context.rs @@ -3,9 +3,9 @@ use ark_ec::ProjectiveCurve; #[derive(Clone, Debug)] pub struct PublicDecryptionContextFast { - pub domain: Vec, - pub public_key_shares: PublicKeyShares, - pub blinded_key_shares: BlindedKeyShares, + pub domain: E::Fr, + pub public_key_share: PublicKeyShare, + pub blinded_key_share: BlindedKeyShare, // This decrypter's contribution to N(0), namely (-1)^|domain| * \prod_i omega_i pub lagrange_n_0: E::Fr, } @@ -13,8 +13,8 @@ pub struct PublicDecryptionContextFast { #[derive(Clone, Debug)] pub struct PublicDecryptionContextSimple { pub domain: E::Fr, - pub public_key_shares: PublicKeyShares, - pub blinded_key_shares: BlindedKeyShares, + pub public_key_share: PublicKeyShare, + pub blinded_key_share: BlindedKeyShare, } #[derive(Clone, Debug)] @@ -34,7 +34,6 @@ pub struct PrivateDecryptionContextFast { pub private_key_share: PrivateKeyShare, pub public_decryption_contexts: Vec>, pub scalar_bits: usize, - pub window_size: usize, } impl PrivateDecryptionContextFast { @@ -66,7 +65,7 @@ impl PrivateDecryptionContextFast { .iter() .map(|d| { self.public_decryption_contexts[d.decrypter_index] - .blinded_key_shares + .blinded_key_share .blinding_key_prepared .clone() }) @@ -132,7 +131,7 @@ impl PrivateDecryptionContextSimple { ) -> DecryptionShareSimple { let u = ciphertext.commitment; let z_i = self.private_key_share.clone(); - let z_i = z_i.private_key_shares[0]; + let z_i = z_i.private_key_share; // C_i = e(U, Z_i) let c_i = E::pairing(u, z_i); DecryptionShareSimple { diff --git a/tpke/src/decryption.rs b/tpke/src/decryption.rs index b00f7379..c09a9628 100644 --- a/tpke/src/decryption.rs +++ b/tpke/src/decryption.rs @@ -41,3 +41,24 @@ pub struct DecryptionShareSimple { pub decrypter_index: usize, pub decryption_share: E::Fqk, } + +#[cfg(test)] +mod tests { + use crate::*; + + type E = ark_bls12_381::Bls12_381; + + #[test] + fn decryption_share_serialization() { + let decryption_share = DecryptionShareFast:: { + decrypter_index: 1, + decryption_share: ark_bls12_381::G1Affine::prime_subgroup_generator( + ), + }; + + let serialized = decryption_share.to_bytes(); + let deserialized: DecryptionShareFast = + DecryptionShareFast::from_bytes(&serialized); + assert_eq!(serialized, deserialized.to_bytes()) + } +} diff --git a/tpke/src/key_share.rs b/tpke/src/key_share.rs index 9719a745..241963f3 100644 --- a/tpke/src/key_share.rs +++ b/tpke/src/key_share.rs @@ -3,112 +3,63 @@ use crate::*; use ark_ec::ProjectiveCurve; -use itertools::Itertools; #[derive(Debug, Clone)] -pub struct PublicKeyShares { - pub public_key_shares: Vec, // A_{i, \omega_i} +pub struct PublicKeyShare { + pub public_key_share: E::G1Affine, // A_{i, \omega_i} } #[derive(Debug, Clone)] -pub struct BlindedKeyShares { - pub blinding_key: E::G2Affine, - pub blinding_key_prepared: E::G2Prepared, // [b] H - pub blinded_key_shares: Vec, // [b] Z_{i, \omega_i} - pub window_tables: Vec>, // [b*omega_i^-1] Z_{i, \omega_i} +pub struct BlindedKeyShare { + pub blinding_key: E::G2Affine, // [b] H + pub blinded_key_share: E::G2Affine, // [b] Z_{i, \omega_i} + pub blinding_key_prepared: E::G2Prepared, } -impl BlindedKeyShares { +impl BlindedKeyShare { pub fn verify_blinding( &self, - public_key_shares: &PublicKeyShares, + public_key_share: &PublicKeyShare, rng: &mut R, ) -> bool { let g = E::G1Affine::prime_subgroup_generator(); - let _alpha = E::Fr::rand(rng); - let alpha_i = generate_random::<_, E>( - public_key_shares.public_key_shares.len(), - rng, - ); + let alpha = E::Fr::rand(rng); - let alpha_a_i = E::G1Prepared::from( - g + public_key_shares - .public_key_shares - .iter() - .zip_eq(alpha_i.iter()) - .map(|(key, alpha)| key.mul(*alpha)) - .sum::() - .into_affine(), + let alpha_a = E::G1Prepared::from( + g + public_key_share.public_key_share.mul(alpha).into_affine(), ); // Sum of Yi - let alpha_z_i = E::G2Prepared::from( - self.blinding_key - + self - .blinded_key_shares - .iter() - .zip_eq(alpha_i.iter()) - .map(|(key, alpha)| key.mul(*alpha)) - .sum::() - .into_affine(), + let alpha_z = E::G2Prepared::from( + self.blinding_key + self.blinded_key_share.mul(alpha).into_affine(), ); - // e(g, sum(Yi)) == e(sum(Ai), [b] H) + // e(g, Yi) == e(Ai, [b] H) E::product_of_pairings(&[ - (E::G1Prepared::from(-g), alpha_z_i), - (alpha_a_i, E::G2Prepared::from(self.blinding_key)), + (E::G1Prepared::from(-g), alpha_z), + (alpha_a, E::G2Prepared::from(self.blinding_key)), ]) == E::Fqk::one() } - pub fn get_window_table( - &self, - window_size: usize, - scalar_bits: usize, - domain_inv: &[E::Fr], - ) -> Vec> { - izip!(self.blinded_key_shares.iter(), domain_inv.iter()) - .map(|(key, omega_inv)| BlindedKeyShareWindowTable:: { - window_table: FixedBaseMSM::get_window_table( - scalar_bits, - window_size, - key.mul(-*omega_inv), - ), - }) - .collect::>() - } - - // key shares = [a, b, c] - // domain_inv = [1, 2, 3] - // output = [a * 1, b * 2, c * 3] - pub fn multiply_by_omega_inv(&mut self, domain_inv: &[E::Fr]) { - izip!(self.blinded_key_shares.iter_mut(), domain_inv.iter()).for_each( - |(key, omega_inv)| *key = key.mul(-*omega_inv).into_affine(), - ) + pub fn multiply_by_omega_inv(&mut self, omega_inv: &E::Fr) { + self.blinded_key_share = + self.blinded_key_share.mul(-*omega_inv).into_affine(); } } -#[derive(Debug, Clone)] -pub struct BlindedKeyShareWindowTable { - pub window_table: Vec>, -} #[derive(Debug, Clone)] pub struct PrivateKeyShare { - pub private_key_shares: Vec, + pub private_key_share: E::G2Affine, } impl PrivateKeyShare { - pub fn blind(&self, b: E::Fr) -> BlindedKeyShares { + pub fn blind(&self, b: E::Fr) -> BlindedKeyShare { let blinding_key = E::G2Affine::prime_subgroup_generator().mul(b).into_affine(); - BlindedKeyShares:: { + BlindedKeyShare:: { blinding_key, blinding_key_prepared: E::G2Prepared::from(blinding_key), - blinded_key_shares: self - .private_key_shares - .iter() - .map(|z| z.mul(b).into_affine()) - .collect::>(), - window_tables: vec![], + blinded_key_share: self.private_key_share.mul(b).into_affine(), } } } diff --git a/tpke/src/lib.rs b/tpke/src/lib.rs index 394c3056..edcb3880 100644 --- a/tpke/src/lib.rs +++ b/tpke/src/lib.rs @@ -1,10 +1,7 @@ -#![allow(non_snake_case)] -#![allow(dead_code)] - use crate::hash_to_curve::htp_bls12381_g2; use crate::SetupParams; -use ark_ec::{msm::FixedBaseMSM, AffineCurve, PairingEngine}; +use ark_ec::{AffineCurve, PairingEngine}; use ark_ff::{Field, One, PrimeField, ToBytes, UniformRand, Zero}; use ark_poly::{ univariate::DensePolynomial, EvaluationDomain, Polynomial, UVPolynomial, @@ -109,40 +106,29 @@ pub fn setup_fast( let mut domain_points_inv = Vec::with_capacity(shares_num); let mut point_inv = E::Fr::one(); - // domain_points are the powers of the generator g - // domain_points_inv are the powers of the inverse of the generator g - // It seems like domain points are being used in "share partitioning" - // https://nikkolasg.github.io/ferveo/dkginit.html#share-partitioning - // There's also a mention of this operation here: - // "DKG.PartitionDomain({ek_i, s_i}) -> {(ek_i, Omega_i)}" - // https://nikkolasg.github.io/ferveo/tpke-concrete.html for _ in 0..shares_num { - // domain_points is the share domain of the i-th participant (?) domain_points.push(point); // 1, t, t^2, t^3, ...; where t is a scalar generator fft_domain.group_gen point *= fft_domain.group_gen; domain_points_inv.push(point_inv); point_inv *= fft_domain.group_gen_inv; } - let window_size = FixedBaseMSM::get_mul_window_size(100); let scalar_bits = E::Fr::size_in_bits(); // A - public key shares of participants let pubkey_shares = subproductdomain::fast_multiexp(&evals.evals, g.into_projective()); let pubkey_share = g.mul(evals.evals[0]); - assert!(pubkey_shares[0] == E::G1Affine::from(pubkey_share)); + debug_assert!(pubkey_shares[0] == E::G1Affine::from(pubkey_share)); // Y, but only when b = 1 - private key shares of participants let privkey_shares = subproductdomain::fast_multiexp(&evals.evals, h.into_projective()); - // h^{f(omega)} // a_0 let x = threshold_poly.coeffs[0]; // F_0 - The commitment to the constant term, and is the public key output Y from PVDKG - // TODO: It seems like the rest of the F_i are not computed? let pubkey = g.mul(x); let privkey = h.mul(x); @@ -151,26 +137,19 @@ pub fn setup_fast( // (domain, domain_inv, A, Y) for (index, (domain, domain_inv, public, private)) in izip!( - // Since we're assigning only one key share to one entity we can use chunks(1) - // This is a quick workaround to avoid refactoring all related entities that assume there are multiple key shares - // TODO: Refactor this code and all related code - domain_points.chunks(1), - domain_points_inv.chunks(1), - pubkey_shares.chunks(1), - privkey_shares.chunks(1) + domain_points.iter(), + domain_points_inv.iter(), + pubkey_shares.iter(), + privkey_shares.iter() ) .enumerate() { let private_key_share = PrivateKeyShare:: { - private_key_shares: private.to_vec(), + private_key_share: *private, }; let b = E::Fr::one(); // TODO: Not blinding for now let mut blinded_key_shares = private_key_share.blind(b); blinded_key_shares.multiply_by_omega_inv(domain_inv); - // TODO: Is `blinded_key_shares` equal to [b]Z_{i,omega_i})? - // Z_{i,omega_i}) = [dk_{i}^{-1}]*\hat{Y}_{i_omega_j}] - /*blinded_key_shares.window_tables = - blinded_key_shares.get_window_table(window_size, scalar_bits, domain_inv);*/ private_contexts.push(PrivateDecryptionContextFast:: { index, setup_params: SetupParams { @@ -184,15 +163,14 @@ pub fn setup_fast( private_key_share, public_decryption_contexts: vec![], scalar_bits, - window_size, }); public_contexts.push(PublicDecryptionContextFast:: { - domain: domain.to_vec(), - public_key_shares: PublicKeyShares:: { - public_key_shares: public.to_vec(), + domain: *domain, + public_key_share: PublicKeyShare:: { + public_key_share: *public, }, - blinded_key_shares, - lagrange_n_0: domain.iter().product::(), + blinded_key_share: blinded_key_shares, + lagrange_n_0: *domain, }); } for private in private_contexts.iter_mut() { @@ -216,7 +194,7 @@ pub fn setup_simple( let g = E::G1Affine::prime_subgroup_generator(); let h = E::G2Affine::prime_subgroup_generator(); - // The delear chooses a uniformly random polynomial f of degree t-1 + // The dealer chooses a uniformly random polynomial f of degree t-1 let threshold_poly = DensePolynomial::::rand(threshold - 1, rng); // Domain, or omega Ω let fft_domain = @@ -233,14 +211,12 @@ pub fn setup_simple( assert!(pubkey_shares[0] == E::G1Affine::from(pubkey_share)); // Y, but only when b = 1 - private key shares of participants - // Z_i = h^{f(omega)} ? let privkey_shares = subproductdomain::fast_multiexp(&evals.evals, h.into_projective()); // a_0 let x = threshold_poly.coeffs[0]; // F_0 - // TODO: It seems like the rest of the F_i are not computed? let pubkey = g.mul(x); let privkey = h.mul(x); @@ -251,18 +227,12 @@ pub fn setup_simple( let mut public_contexts = vec![]; // (domain, A, Y) - for (index, (domain, public, private)) in izip!( - // Since we're assigning only one key share to one entity we can use chunks(1) - // This is a quick workaround to avoid refactoring all related entities that assume there are multiple key shares - // TODO: Refactor this code and all related code - shares_x.chunks(1), - pubkey_shares.chunks(1), - privkey_shares.chunks(1) - ) - .enumerate() + for (index, (domain, public, private)) in + izip!(shares_x.iter(), pubkey_shares.iter(), privkey_shares.iter()) + .enumerate() { let private_key_share = PrivateKeyShare:: { - private_key_shares: private.to_vec(), + private_key_share: *private, }; // let b = E::Fr::rand(rng); let b = E::Fr::one(); // TODO: Not blinding for now @@ -281,22 +251,17 @@ pub fn setup_simple( public_decryption_contexts: vec![], }); public_contexts.push(PublicDecryptionContextSimple:: { - domain: domain[0], - public_key_shares: PublicKeyShares:: { - public_key_shares: public.to_vec(), + domain: *domain, + public_key_share: PublicKeyShare:: { + public_key_share: *public, }, - blinded_key_shares, + blinded_key_share: blinded_key_shares, }); } for private in private_contexts.iter_mut() { private.public_decryption_contexts = public_contexts.clone(); } - // TODO: Should we also be returning some sort of signed transcript? - // "Post the signed message \(\tau, (F_0, \ldots, F_t), \hat{u}2, (\hat{Y}{i,\omega_j})\) to the blockchain" - // \tau - unique session identifier - // See: https://nikkolasg.github.io/ferveo/pvss.html#dealers-role - (pubkey.into(), privkey.into(), private_contexts) } @@ -307,16 +272,6 @@ pub fn generate_random( (0..n).map(|_| E::Fr::rand(rng)).collect::>() } -fn make_decryption_share( - private_share: &PrivateKeyShare, - ciphertext: &Ciphertext, -) -> E::Fqk { - let z_i = private_share; - let u = ciphertext.commitment; - let z_i = z_i.private_key_shares[0]; - E::pairing(u, z_i) -} - #[cfg(test)] mod tests { use crate::*; @@ -345,20 +300,6 @@ mod tests { assert_eq!(serialized, deserialized.to_bytes()) } - #[test] - fn decryption_share_serialization() { - let decryption_share = DecryptionShareFast:: { - decrypter_index: 1, - decryption_share: ark_bls12_381::G1Affine::prime_subgroup_generator( - ), - }; - - let serialized = decryption_share.to_bytes(); - let deserialized: DecryptionShareFast = - DecryptionShareFast::from_bytes(&serialized); - assert_eq!(serialized, deserialized.to_bytes()) - } - #[test] fn symmetric_encryption() { let rng = &mut test_rng(); @@ -516,7 +457,7 @@ mod tests { .unwrap() .domain; let original_y_r = - selected_participant.private_key_share.private_key_shares[0]; + selected_participant.private_key_share.private_key_share; // Now, we have to remove the participant from the contexts and all nested structures let mut remaining_participants = contexts; @@ -557,6 +498,16 @@ mod tests { share_combine_simple::(decryption_shares, &lagrange) } + fn make_decryption_share( + private_share: &PrivateKeyShare, + ciphertext: &Ciphertext, + ) -> E::Fqk { + let z_i = private_share; + let u = ciphertext.commitment; + let z_i = z_i.private_key_share; + E::pairing(u, z_i) + } + #[test] /// Ñ parties (where t <= Ñ <= N) jointly execute a "share recovery" algorithm, and the output is 1 new share. /// The new share is independent from the previously existing shares. We can use this to on-board a new participant into an existing cohort. @@ -593,7 +544,7 @@ mod tests { rng, ); let recovered_key_share = PrivateKeyShare { - private_key_shares: vec![y_r.into_affine()], + private_key_share: y_r.into_affine(), }; // Creating decryption shares @@ -649,7 +600,7 @@ mod tests { .enumerate() .map(|(decrypter_index, private_share)| { let private_share = PrivateKeyShare { - private_key_shares: vec![private_share.into_affine()], + private_key_share: private_share.into_affine(), }; let decryption_share = make_decryption_share(&private_share, &ciphertext); diff --git a/tpke/src/refresh.rs b/tpke/src/refresh.rs index b233ed10..66821ea4 100644 --- a/tpke/src/refresh.rs +++ b/tpke/src/refresh.rs @@ -74,7 +74,7 @@ fn update_shares_for_recovery( .map(|p| { let i = p.index; let mut new_y = E::G2Projective::from( - p.private_key_share.private_key_shares[0], // y_i + p.private_key_share.private_key_share, // y_i ); for j in deltas.keys() { new_y += deltas[j][&i]; @@ -145,7 +145,7 @@ pub fn refresh_shares( .map(|p| { let i = p.index; let mut new_y = E::G2Projective::from( - p.private_key_share.private_key_shares[0], // y_i + p.private_key_share.private_key_share, // y_i ); new_y += share_updates[&i]; new_y From 57c9763712be26ecf9e39863b9a37785b2da6c3e Mon Sep 17 00:00:00 2001 From: Piotr Roslaniec Date: Thu, 19 Jan 2023 10:10:17 +0100 Subject: [PATCH 25/28] enable key share blinding in fast tdec --- tpke/src/context.rs | 7 ++++--- tpke/src/lib.rs | 9 ++++----- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/tpke/src/context.rs b/tpke/src/context.rs index c4652a10..89e21e6a 100644 --- a/tpke/src/context.rs +++ b/tpke/src/context.rs @@ -41,9 +41,10 @@ impl PrivateDecryptionContextFast { &self, ciphertext: &Ciphertext, ) -> DecryptionShareFast { - // let decryption_share = - // ciphertext.commitment.mul(self.b_inv).into_affine(); - let decryption_share = ciphertext.commitment; + let decryption_share = ciphertext + .commitment + .mul(self.setup_params.b_inv) + .into_affine(); DecryptionShareFast { decrypter_index: self.index, diff --git a/tpke/src/lib.rs b/tpke/src/lib.rs index edcb3880..b2be3ff6 100644 --- a/tpke/src/lib.rs +++ b/tpke/src/lib.rs @@ -147,7 +147,7 @@ pub fn setup_fast( let private_key_share = PrivateKeyShare:: { private_key_share: *private, }; - let b = E::Fr::one(); // TODO: Not blinding for now + let b = E::Fr::rand(rng); let mut blinded_key_shares = private_key_share.blind(b); blinded_key_shares.multiply_by_omega_inv(domain_inv); private_contexts.push(PrivateDecryptionContextFast:: { @@ -234,8 +234,7 @@ pub fn setup_simple( let private_key_share = PrivateKeyShare:: { private_key_share: *private, }; - // let b = E::Fr::rand(rng); - let b = E::Fr::one(); // TODO: Not blinding for now + let b = E::Fr::rand(rng); let blinded_key_shares = private_key_share.blind(b); private_contexts.push(PrivateDecryptionContextSimple:: { index, @@ -375,8 +374,8 @@ mod tests { #[test] fn fast_threshold_encryption() { let mut rng = &mut test_rng(); - let threshold = 16 * 2 / 3; let shares_num = 16; + let threshold = shares_num * 2 / 3; let msg: &[u8] = "abc".as_bytes(); let aad: &[u8] = "my-aad".as_bytes(); @@ -407,8 +406,8 @@ mod tests { #[test] fn simple_threshold_decryption() { let mut rng = &mut test_rng(); - let threshold = 16 * 2 / 3; let shares_num = 16; + let threshold = shares_num * 2 / 3; let msg: &[u8] = "abc".as_bytes(); let aad: &[u8] = "my-aad".as_bytes(); From 3c2c8ac8d10836b713eb0b0eddc9c9020563a108 Mon Sep 17 00:00:00 2001 From: Piotr Roslaniec Date: Fri, 20 Jan 2023 15:47:10 +0100 Subject: [PATCH 26/28] disable a failing benchmark job --- .github/workflows/workspace.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/workspace.yml b/.github/workflows/workspace.yml index 70db57d2..1f4bb821 100644 --- a/.github/workflows/workspace.yml +++ b/.github/workflows/workspace.yml @@ -104,12 +104,12 @@ jobs: key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} restore-keys: ${{ runner.os }}-cargo- - - name: Run benchmarks - uses: boa-dev/criterion-compare-action@v3 - if: github.event_name == 'pull_request' - with: - cwd: ${{ matrix.component }} - branchName: ${{ github.base_ref }} +# - name: Run benchmarks +# uses: boa-dev/criterion-compare-action@v3 +# if: github.event_name == 'pull_request' +# with: +# cwd: ${{ matrix.component }} +# branchName: ${{ github.base_ref }} # The next steps have been adapted from https://raw.githubusercontent.com/unicode-org/icu4x/main/.github/workflows/build-test.yml From bacea0a2b2e31adcfcdb78bff45b4b69f82c54de Mon Sep 17 00:00:00 2001 From: Piotr Roslaniec Date: Fri, 20 Jan 2023 15:47:44 +0100 Subject: [PATCH 27/28] remove unused variable --- ferveo/src/dkg/pv.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/ferveo/src/dkg/pv.rs b/ferveo/src/dkg/pv.rs index 6973be39..ec90673a 100644 --- a/ferveo/src/dkg/pv.rs +++ b/ferveo/src/dkg/pv.rs @@ -294,7 +294,6 @@ pub(crate) mod test_common { my_index: usize, ) -> PubliclyVerifiableDkg { let keypairs = gen_n_keypairs(shares_num); - for _keypair in &keypairs {} let validators = gen_n_validators(&keypairs, shares_num); let me = validators[my_index].clone(); PubliclyVerifiableDkg::new( From 618117998ece797319bd5aba765ad51120872d83 Mon Sep 17 00:00:00 2001 From: Piotr Roslaniec Date: Mon, 23 Jan 2023 18:13:12 +0100 Subject: [PATCH 28/28] replace redundant variable --- ferveo/benches/benchmarks/pvdkg.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/ferveo/benches/benchmarks/pvdkg.rs b/ferveo/benches/benchmarks/pvdkg.rs index c95504bf..82600589 100644 --- a/ferveo/benches/benchmarks/pvdkg.rs +++ b/ferveo/benches/benchmarks/pvdkg.rs @@ -64,12 +64,13 @@ pub fn setup_dkg( let keypairs = gen_keypairs(num); let validators = gen_validators(&keypairs); let me = validators[validator].clone(); + let shares_num = 300; PubliclyVerifiableDkg::new( validators, Params { tau: 0, - security_threshold: 300 / 3, - shares_num: 300, + security_threshold: shares_num / 3, + shares_num, retry_after: 2, }, &me,