Skip to content

Commit

Permalink
feat: improve zeroizing support (#72)
Browse files Browse the repository at this point in the history
Adds zeroizing and zeroing on drop to `ExtendedMask`. Moves from manual
zeroizing-on-drop to the derived `ZeroizeOnDrop` trait for
`CommitmentOpening` and `RangeWitness`.

Updates the prover algorithm to zeroize value- and mask-related data,
including nonces.

Updates the documentation with a brief note about zeroizing support and
caveats.

Closes #71.
  • Loading branch information
AaronFeickert committed Sep 20, 2023
1 parent e71a275 commit c7b076f
Show file tree
Hide file tree
Showing 7 changed files with 90 additions and 140 deletions.
2 changes: 2 additions & 0 deletions README.md
Expand Up @@ -24,6 +24,8 @@ Compared to an [updated fork](https://github.com/tari-project/bulletproofs) of t

As always, your mileage may vary.

This implementation uses the excellent [`zeroize`](https://crates.io/crates/zeroize) library to make a best-effort approach at minimizing exposure of value- and mask-related data in memory, as this is often considered sensitive. However, it is difficult in general to guarantee that there are no coding patterns leading to [unintended copies](https://docs.rs/zeroize/1.6.0/zeroize/#stackheap-zeroing-notes) of data, so care should always be taken not to make too many assumptions about the contents of memory.

## References

This implementation takes its cue from the `dalek-cryptography` [Bulletproofs](https://github.com/dalek-cryptography/bulletproofs) implementation, as well as the Monero [Bulletproofs+](https://www.getmonero.org/2020/12/24/Bulletproofs+-in-Monero.html) implementation.
Expand Down
12 changes: 2 additions & 10 deletions src/commitment_opening.rs
Expand Up @@ -4,12 +4,12 @@
//! Bulletproofs+ commitment opening struct

use curve25519_dalek::scalar::Scalar;
use zeroize::Zeroize;
use zeroize::{Zeroize, ZeroizeOnDrop};

use crate::errors::ProofError;

/// Commitment openings to be used for Pedersen commitments
#[derive(Clone, Zeroize)]
#[derive(Clone, Zeroize, ZeroizeOnDrop)]
pub struct CommitmentOpening {
/// Value
pub(crate) v: u64,
Expand All @@ -35,14 +35,6 @@ impl CommitmentOpening {
}
}

/// Overwrite secrets with null bytes when they go out of scope.
impl Drop for CommitmentOpening {
fn drop(&mut self) {
self.v.zeroize();
self.r.zeroize();
}
}

#[cfg(test)]
mod test {
use super::*;
Expand Down
3 changes: 2 additions & 1 deletion src/extended_mask.rs
Expand Up @@ -4,11 +4,12 @@
//! Bulletproofs+ embedded extended mask

use curve25519_dalek::scalar::Scalar;
use zeroize::{Zeroize, ZeroizeOnDrop};

use crate::{errors::ProofError, generators::pedersen_gens::ExtensionDegree};

/// Contains the embedded extended mask for non-aggregated proofs
#[derive(Debug, PartialEq)]
#[derive(Debug, PartialEq, Zeroize, ZeroizeOnDrop)]
pub struct ExtendedMask {
blindings: Vec<Scalar>, // Do not allow direct assignment of struct member (i.e. should not be public)
}
Expand Down
3 changes: 2 additions & 1 deletion src/generators/pedersen_gens.rs
Expand Up @@ -7,6 +7,7 @@
use std::{borrow::Borrow, convert::TryFrom, iter::once};

use curve25519_dalek::{scalar::Scalar, traits::MultiscalarMul};
use zeroize::Zeroize;

use crate::{errors::ProofError, traits::Compressable};

Expand Down Expand Up @@ -37,7 +38,7 @@ pub struct PedersenGens<P: Compressable> {
/// The extension degree for extended commitments. Currently this is limited to 5 extension degrees, but in theory it
/// could be arbitrarily long, although practically, very few if any test cases will use more than 2 extension degrees.
/// These values MUST increment, or other functions may panic.
#[derive(Copy, Clone, Debug, PartialEq)]
#[derive(Copy, Clone, Debug, PartialEq, Zeroize)]
pub enum ExtensionDegree {
/// Default Pedersen commitment
DefaultPedersen = 1,
Expand Down
129 changes: 76 additions & 53 deletions src/range_proof.rs
Expand Up @@ -8,7 +8,7 @@
use std::{
convert::{TryFrom, TryInto},
marker::PhantomData,
ops::{Add, Mul},
ops::{Add, Mul, Shr},
};

use curve25519_dalek::{
Expand All @@ -19,6 +19,7 @@ use itertools::{izip, Itertools};
use merlin::Transcript;
use rand::thread_rng;
use serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer};
use zeroize::Zeroizing;

use crate::{
errors::ProofError,
Expand All @@ -33,7 +34,7 @@ use crate::{
range_witness::RangeWitness,
traits::{Compressable, Decompressable, FixedBytesRepr, Precomputable},
transcripts,
utils::generic::{bit_vector_of_scalars, nonce, split_at_checked},
utils::generic::{nonce, split_at_checked},
};

/// Optionally extract masks when verifying the proofs
Expand Down Expand Up @@ -248,26 +249,33 @@ where
)?;

// Set bit arrays
let mut a_li = Vec::with_capacity(full_length);
let mut a_ri = Vec::with_capacity(full_length);
for (j, value) in witness.openings.iter().map(|o| o.v).enumerate() {
let bit_vector = if let Some(minimum_value) = statement.minimum_value_promises[j] {
let offset_value = value.checked_sub(minimum_value).ok_or(ProofError::InvalidArgument(
let mut a_li = Zeroizing::new(Vec::with_capacity(full_length));
let mut a_ri = Zeroizing::new(Vec::with_capacity(full_length));
for (minimum_value, value) in statement
.minimum_value_promises
.iter()
.zip(witness.openings.iter().map(|o| &o.v))
{
// Offset by the minimum value if needed
let offset_value = if let Some(minimum_value) = minimum_value {
Zeroizing::new(value.checked_sub(*minimum_value).ok_or(ProofError::InvalidArgument(
"Minimum value is larger than value".to_string(),
))?;
bit_vector_of_scalars(offset_value, bit_length)?
))?)
} else {
bit_vector_of_scalars(value, bit_length)?
Zeroizing::new(*value)
};
for bit_field in bit_vector.clone() {
a_li.push(bit_field);
a_ri.push(bit_field - Scalar::ONE);

// Decompose into bits
for i in 0..bit_length {
let i_u32 = u32::try_from(i).map_err(|_| ProofError::SizeOverflow)?;
a_li.push(Scalar::from(offset_value.shr(i_u32) & 1));
a_ri.push(Scalar::from(offset_value.shr(i_u32) & 1) - Scalar::ONE);
}
}

// Compute A by multi-scalar multiplication
let rng = &mut thread_rng();
let mut alpha = Vec::with_capacity(extension_degree);
let mut alpha = Zeroizing::new(Vec::with_capacity(extension_degree));
for k in 0..extension_degree {
alpha.push(if let Some(seed_nonce) = statement.seed_nonce {
nonce(&seed_nonce, "alpha", None, Some(k))?
Expand Down Expand Up @@ -309,7 +317,7 @@ where
}

// Prepare for inner product
for item in &mut a_li {
for item in a_li.iter_mut() {
*item -= z;
}
for (i, item) in a_ri.iter_mut().enumerate() {
Expand Down Expand Up @@ -357,32 +365,40 @@ where
let a_hi_offset = a_hi.iter().map(|s| s * y_powers[n]).collect::<Vec<Scalar>>();

let d_l = if let Some(seed_nonce) = statement.seed_nonce {
(0..extension_degree)
.map(|k| nonce(&seed_nonce, "dL", Some(round), Some(k)))
.collect::<Result<Vec<_>, ProofError>>()?
Zeroizing::new(
(0..extension_degree)
.map(|k| nonce(&seed_nonce, "dL", Some(round), Some(k)))
.collect::<Result<Vec<_>, ProofError>>()?,
)
} else {
// Zero is allowed by the protocol, but excluded by the implementation to be unambiguous
(0..extension_degree).map(|_| Scalar::random_not_zero(rng)).collect()
Zeroizing::new((0..extension_degree).map(|_| Scalar::random_not_zero(rng)).collect())
};
let d_r = if let Some(seed_nonce) = statement.seed_nonce {
(0..extension_degree)
.map(|k| nonce(&seed_nonce, "dR", Some(round), Some(k)))
.collect::<Result<Vec<_>, ProofError>>()?
Zeroizing::new(
(0..extension_degree)
.map(|k| nonce(&seed_nonce, "dR", Some(round), Some(k)))
.collect::<Result<Vec<_>, ProofError>>()?,
)
} else {
// Zero is allowed by the protocol, but excluded by the implementation to be unambiguous
(0..extension_degree).map(|_| Scalar::random_not_zero(rng)).collect()
Zeroizing::new((0..extension_degree).map(|_| Scalar::random_not_zero(rng)).collect())
};

round += 1;

let c_l = izip!(a_lo, y_powers.iter().skip(1), b_hi)
.fold(Scalar::ZERO, |acc, (a, y_power, b)| acc + a * y_power * b);
let c_r = izip!(a_hi, y_powers.iter().skip(n + 1), b_lo)
.fold(Scalar::ZERO, |acc, (a, y_power, b)| acc + a * y_power * b);
let c_l = Zeroizing::new(
izip!(a_lo, y_powers.iter().skip(1), b_hi)
.fold(Scalar::ZERO, |acc, (a, y_power, b)| acc + a * y_power * b),
);
let c_r = Zeroizing::new(
izip!(a_hi, y_powers.iter().skip(n + 1), b_lo)
.fold(Scalar::ZERO, |acc, (a, y_power, b)| acc + a * y_power * b),
);

// Compute L and R by multi-scalar multiplication
li.push(P::vartime_multiscalar_mul(
std::iter::once(&c_l)
std::iter::once::<&Scalar>(&c_l)
.chain(d_l.iter())
.chain(a_lo_offset.iter())
.chain(b_hi.iter()),
Expand All @@ -392,7 +408,7 @@ where
.chain(hi_base_lo),
));
ri.push(P::vartime_multiscalar_mul(
std::iter::once(&c_r)
std::iter::once::<&Scalar>(&c_r)
.chain(d_r.iter())
.chain(a_hi_offset.iter())
.chain(b_lo.iter()),
Expand Down Expand Up @@ -428,16 +444,18 @@ where
.zip(hi_base_hi.iter())
.map(|(lo, hi)| P::vartime_multiscalar_mul([&e, &e_inverse], [lo, hi]))
.collect();
a_li = a_lo
.iter()
.zip(a_hi_offset.iter())
.map(|(lo, hi)| lo * e + hi * e_inverse)
.collect();
a_ri = b_lo
.iter()
.zip(b_hi.iter())
.map(|(lo, hi)| lo * e_inverse + hi * e)
.collect();
a_li = Zeroizing::new(
a_lo.iter()
.zip(a_hi_offset.iter())
.map(|(lo, hi)| lo * e + hi * e_inverse)
.collect(),
);
a_ri = Zeroizing::new(
b_lo.iter()
.zip(b_hi.iter())
.map(|(lo, hi)| lo * e_inverse + hi * e)
.collect(),
);

for (alpha, (l, r)) in alpha.iter_mut().zip(d_l.iter().zip(d_r.iter())) {
*alpha += l * e_square + r * e_inverse_square;
Expand All @@ -446,27 +464,32 @@ where

// Random masks
// Zero is allowed by the protocol, but excluded by the implementation to be unambiguous
let (r, s) = (Scalar::random_not_zero(rng), Scalar::random_not_zero(rng));
let r = Zeroizing::new(Scalar::random_not_zero(rng));
let s = Zeroizing::new(Scalar::random_not_zero(rng));
let d = if let Some(seed_nonce) = statement.seed_nonce {
(0..extension_degree)
.map(|k| nonce(&seed_nonce, "d", None, Some(k)))
.collect::<Result<Vec<_>, ProofError>>()?
Zeroizing::new(
(0..extension_degree)
.map(|k| nonce(&seed_nonce, "d", None, Some(k)))
.collect::<Result<Vec<_>, ProofError>>()?,
)
} else {
// Zero is allowed by the protocol, but excluded by the implementation to be unambiguous
(0..extension_degree).map(|_| Scalar::random_not_zero(rng)).collect()
Zeroizing::new((0..extension_degree).map(|_| Scalar::random_not_zero(rng)).collect())
};
let eta = if let Some(seed_nonce) = statement.seed_nonce {
(0..extension_degree)
.map(|k| nonce(&seed_nonce, "eta", None, Some(k)))
.collect::<Result<Vec<_>, ProofError>>()?
Zeroizing::new(
(0..extension_degree)
.map(|k| nonce(&seed_nonce, "eta", None, Some(k)))
.collect::<Result<Vec<_>, ProofError>>()?,
)
} else {
// Zero is allowed by the protocol, but excluded by the implementation to be unambiguous
(0..extension_degree).map(|_| Scalar::random_not_zero(rng)).collect()
Zeroizing::new((0..extension_degree).map(|_| Scalar::random_not_zero(rng)).collect())
};

let mut a1 =
&gi_base[0] * r + &hi_base[0] * s + h_base * (r * y_powers[1] * a_ri[0] + s * y_powers[1] * a_li[0]);
let mut b = h_base * (r * y_powers[1] * s);
&gi_base[0] * *r + &hi_base[0] * *s + h_base * (*r * y_powers[1] * a_ri[0] + *s * y_powers[1] * a_li[0]);
let mut b = h_base * (*r * y_powers[1] * *s);
for k in 0..extension_degree {
a1 += &g_base[k] * d[k];
b += &g_base[k] * eta[k]
Expand All @@ -475,9 +498,9 @@ where
let e = transcripts::transcript_points_a1_b_challenge_e(&mut transcript, &a1.compress(), &b.compress())?;
let e_square = e * e;

let r1 = r + a_li[0] * e;
let s1 = s + a_ri[0] * e;
let d1: Vec<Scalar> = izip!(eta, d, alpha.iter())
let r1 = *r + a_li[0] * e;
let s1 = *s + a_ri[0] * e;
let d1: Vec<Scalar> = izip!(eta.iter(), d.iter(), alpha.iter())
.map(|(eta, d, alpha)| eta + d * e + alpha * e_square)
.collect();

Expand Down
13 changes: 2 additions & 11 deletions src/range_witness.rs
Expand Up @@ -5,12 +5,12 @@

use std::convert::TryInto;

use zeroize::Zeroize;
use zeroize::{Zeroize, ZeroizeOnDrop};

use crate::{commitment_opening::CommitmentOpening, errors::ProofError, generators::pedersen_gens::ExtensionDegree};

/// A convenience struct for holding commitment openings for the aggregated case
#[derive(Clone)]
#[derive(Clone, Zeroize, ZeroizeOnDrop)]
pub struct RangeWitness {
/// The vector of commitment openings for the aggregated case
pub openings: Vec<CommitmentOpening>,
Expand Down Expand Up @@ -39,15 +39,6 @@ impl RangeWitness {
}
}

/// Overwrite secrets with null bytes when they go out of scope.
impl Drop for RangeWitness {
fn drop(&mut self) {
for item in &mut self.openings {
item.zeroize();
}
}
}

#[cfg(test)]
mod test {
use curve25519_dalek::Scalar;
Expand Down

0 comments on commit c7b076f

Please sign in to comment.