Skip to content

Commit

Permalink
[1 changes] chore: schnorr signature verification in noir (noir-lang/…
Browse files Browse the repository at this point in the history
…noir#5188)

fix(experimental elaborator): Fix globals which use function calls (noir-lang/noir#5172)
feat!: restrict noir word size to u32 (noir-lang/noir#5180)
chore: break out helper methods for writing foreign call results (noir-lang/noir#5181)
fix: Fix panic in `get_global_let_statement` (noir-lang/noir#5177)
fix(elaborator): Invert unconstrained check (noir-lang/noir#5176)
chore: loosen trait bounds on impls depending on `AcirField` (noir-lang/noir#5115)
feat: support casting in globals (noir-lang/noir#5164)
chore(experimental elaborator): Handle `comptime` expressions in the elaborator (noir-lang/noir#5169)
chore: avoid manual creation of contract artifact in wasm (noir-lang/noir#5117)
chore: start moving lints into a separate linting directory (noir-lang/noir#5165)
chore: move acir docs to code declaration (noir-lang/noir#5040)
feat!: separate proving from `noir_js` (noir-lang/noir#5072)
  • Loading branch information
AztecBot committed Jun 6, 2024
1 parent 1d785fd commit 6464ee6
Show file tree
Hide file tree
Showing 299 changed files with 30,889 additions and 4,656 deletions.
2 changes: 1 addition & 1 deletion .noir-sync-commit
Original file line number Diff line number Diff line change
@@ -1 +1 @@
e4eb5f539f377fd3c2e1a874707ffce62a5bc10a
070d7e71d6587679721437dd5b996478bd17ed85
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ jobs:
fi
env:
# We treat any cancelled, skipped or failing jobs as a failure for the workflow as a whole.
FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped') }}
FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'skipped') }}

- name: Checkout
if: ${{ failure() }}
Expand Down
8 changes: 5 additions & 3 deletions noir/noir-repo/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 2 additions & 3 deletions noir/noir-repo/acvm-repo/acir/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,8 @@ criterion.workspace = true
pprof.workspace = true

[features]
default = ["bn254"]
bn254 = ["acir_field/bn254", "brillig/bn254"]
bls12_381 = ["acir_field/bls12_381", "brillig/bls12_381"]
bn254 = ["acir_field/bn254"]
bls12_381 = ["acir_field/bls12_381"]

[[bench]]
name = "serialization"
Expand Down
335 changes: 206 additions & 129 deletions noir/noir-repo/acvm-repo/acir/README.md

Large diffs are not rendered by default.

8 changes: 4 additions & 4 deletions noir/noir-repo/acvm-repo/acir/benches/serialization.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@ use pprof::criterion::{Output, PProfProfiler};

const SIZES: [usize; 9] = [10, 50, 100, 500, 1000, 5000, 10000, 50000, 100000];

fn sample_program(num_opcodes: usize) -> Program {
let assert_zero_opcodes: Vec<Opcode> = (0..num_opcodes)
fn sample_program(num_opcodes: usize) -> Program<FieldElement> {
let assert_zero_opcodes: Vec<Opcode<_>> = (0..num_opcodes)
.map(|i| {
Opcode::AssertZero(Expression {
mul_terms: vec![(
Expand Down Expand Up @@ -83,7 +83,7 @@ fn bench_deserialization(c: &mut Criterion) {
BenchmarkId::from_parameter(size),
&serialized_program,
|b, program| {
b.iter(|| Program::deserialize_program(program));
b.iter(|| Program::<FieldElement>::deserialize_program(program));
},
);
}
Expand All @@ -107,7 +107,7 @@ fn bench_deserialization(c: &mut Criterion) {
|b, program| {
b.iter(|| {
let mut deserializer = serde_json::Deserializer::from_slice(program);
Program::deserialize_program_base64(&mut deserializer)
Program::<FieldElement>::deserialize_program_base64(&mut deserializer)
});
},
);
Expand Down
197 changes: 179 additions & 18 deletions noir/noir-repo/acvm-repo/acir/src/circuit/black_box_functions.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
//! Black box functions are ACIR opcodes which rely on backends implementing support for specialized constraints.
//! This makes certain zk-snark unfriendly computations cheaper than if they were implemented in more basic constraints.
//! Black box functions are ACIR opcodes which rely on backends implementing
//! support for specialized constraints.
//! This makes certain zk-snark unfriendly computations cheaper than if they were
//! implemented in more basic constraints.

use serde::{Deserialize, Serialize};
#[cfg(test)]
Expand All @@ -9,61 +11,220 @@ use strum_macros::EnumIter;
#[derive(Clone, Debug, Hash, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[cfg_attr(test, derive(EnumIter))]
pub enum BlackBoxFunc {
/// Encrypts the input using AES128.
/// Ciphers (encrypts) the provided plaintext using AES128 in CBC mode,
/// padding the input using PKCS#7.
/// - inputs: byte array `[u8; N]`
/// - iv: initialization vector `[u8; 16]`
/// - key: user key `[u8; 16]`
/// - outputs: byte vector `[u8]` of length `input.len() + (16 - input.len() % 16)`
AES128Encrypt,
/// Bitwise AND.

/// Performs the bitwise AND of `lhs` and `rhs`. `bit_size` must be the same for
/// both inputs.
/// - lhs: (witness, bit_size)
/// - rhs: (witness, bit_size)
/// - output: a witness whose value is constrained to be lhs AND rhs, as
/// bit_size bit integers
AND,
/// Bitwise XOR.

/// Performs the bitwise XOR of `lhs` and `rhs`. `bit_size` must be the same for
/// both inputs.
/// - lhs: (witness, bit_size)
/// - rhs: (witness, bit_size)
/// - output: a witness whose value is constrained to be lhs XOR rhs, as
/// bit_size bit integers
XOR,
/// Range constraint to ensure that a [`FieldElement`][acir_field::FieldElement] can be represented in a specified number of bits.

/// Range constraint to ensure that a witness
/// can be represented in the specified number of bits.
/// - input: (witness, bit_size)
RANGE,
/// Calculates the SHA256 hash of the inputs.

/// Computes SHA256 of the inputs
/// - inputs are a byte array, i.e a vector of (witness, 8)
/// - output is a byte array of len 32, i.e an array of 32 (witness, 8),
/// constrained to be the sha256 of the inputs.
SHA256,
/// Calculates the Blake2s hash of the inputs.

/// Computes the Blake2s hash of the inputs, as specified in
/// https://tools.ietf.org/html/rfc7693
/// - inputs are a byte array, i.e a vector of (witness, 8)
/// - output is a byte array of length 32, i.e. an array of 32
/// (witness, 8), constrained to be the blake2s of the inputs.
Blake2s,
/// Calculates the Blake3 hash of the inputs.

/// Computes the Blake3 hash of the inputs
/// - inputs are a byte array, i.e a vector of (witness, 8)
/// - output is a byte array of length 32, i.e an array of 32
/// (witness, 8), constrained to be the blake3 of the inputs.
Blake3,
/// Verifies a Schnorr signature over a curve which is "pairing friendly" with the curve on which the ACIR circuit is defined.

/// Verify a Schnorr signature over the embedded curve
/// - inputs are:
/// - Public key as 2 (witness, 254)
/// - signature as a vector of 64 bytes (witness, 8)
/// - message as a vector of (witness, 8)
/// - output: A witness representing the result of the signature
/// verification; 0 for failure and 1 for success.
///
/// Since the scalar field of the embedded curve is NOT the ACIR field, the
/// `(r,s)` signature is represented as a 64 bytes array for the two field
/// elements. On the other hand, the public key coordinates are ACIR fields.
/// The proving system decides how the message is to be hashed. Barretenberg
/// uses Blake2s.
///
/// The exact curve which this signature uses will vary based on the curve being used by ACIR.
/// For example, the BN254 curve supports Schnorr signatures over the [Grumpkin][grumpkin] curve.
/// Verifies a Schnorr signature over a curve which is "pairing friendly"
/// with the curve on which the ACIR circuit is defined.
///
/// The exact curve which this signature uses will vary based on the curve
/// being used by ACIR. For example, the BN254 curve supports Schnorr
/// signatures over the [Grumpkin][grumpkin] curve.
///
/// [grumpkin]: https://hackmd.io/@aztec-network/ByzgNxBfd#2-Grumpkin---A-curve-on-top-of-BN-254-for-SNARK-efficient-group-operations
SchnorrVerify,

/// Calculates a Pedersen commitment to the inputs.
///
/// Computes a Pedersen commitment of the inputs using generators of the
/// embedded curve
/// - input: vector of (witness, 254)
/// - output: 2 witnesses representing the x,y coordinates of the resulting
/// Grumpkin point
/// - domain separator: a constant public value (a field element) that you
/// can use so that the commitment also depends on the domain separator.
/// Noir uses 0 as domain separator.
///
/// The backend should handle proper conversion between the inputs being ACIR
/// field elements and the scalar field of the embedded curve. In the case of
/// Aztec's Barretenberg, the latter is bigger than the ACIR field so it is
/// straightforward. The Pedersen generators are managed by the proving
/// system.
///
/// The commitment is expected to be additively homomorphic
PedersenCommitment,

/// Calculates a Pedersen hash to the inputs.
///
/// Computes a Pedersen hash of the inputs and their number, using
/// generators of the embedded curve
/// - input: vector of (witness, 254)
/// - output: the x-coordinate of the pedersen commitment of the
/// 'prepended input' (see below)
/// - domain separator: a constant public value (a field element) that you
/// can use so that the hash also depends on the domain separator. Noir
/// uses 0 as domain separator.
///
/// In Barretenberg, PedersenHash is doing the same as PedersenCommitment,
/// except that it prepends the inputs with their length. This is expected
/// to not be additively homomorphic.
PedersenHash,

/// Verifies a ECDSA signature over the secp256k1 curve.
/// - inputs:
/// - x coordinate of public key as 32 bytes
/// - y coordinate of public key as 32 bytes
/// - the signature, as a 64 bytes array
/// - the hash of the message, as a vector of bytes
/// - output: 0 for failure and 1 for success
///
/// Inputs and outputs are similar to SchnorrVerify, except that because we
/// use a different curve (secp256k1), the field elements involved in the
/// signature and the public key are defined as an array of 32 bytes.
/// Another difference is that we assume the message is already hashed.
EcdsaSecp256k1,

/// Verifies a ECDSA signature over the secp256r1 curve.
///
/// Same as EcdsaSecp256k1, but done over another curve.
EcdsaSecp256r1,
/// Performs multi scalar multiplication over the embedded curve.

/// Multiple scalar multiplication (MSM) with a variable base/input point
/// (P) of the embedded curve. An MSM multiplies the points and scalars and
/// sums the results.
/// - input:
/// points (witness, N) a vector of x and y coordinates of input
/// points `[x1, y1, x2, y2,...]`.
/// scalars (witness, N) a vector of low and high limbs of input
/// scalars `[s1_low, s1_high, s2_low, s2_high, ...]`. (witness, N)
/// For Barretenberg, they must both be less than 128 bits.
/// - output:
/// a tuple of `x` and `y` coordinates of output.
/// Points computed as `s_low*P+s_high*2^{128}*P`
///
/// Because the Grumpkin scalar field is bigger than the ACIR field, we
/// provide 2 ACIR fields representing the low and high parts of the Grumpkin
/// scalar $a$: `a=low+high*2^{128}`, with `low, high < 2^{128}`
MultiScalarMul,
/// Calculates the Keccak256 hash of the inputs.

/// Computes the Keccak-256 (Ethereum version) of the inputs.
/// - inputs: Vector of bytes (witness, 8)
/// - outputs: Array of 32 bytes (witness, 8)
Keccak256,
/// Keccak Permutation function of 1600 width

/// Keccak Permutation function of width 1600
/// - inputs: An array of 25 64-bit Keccak lanes that represent a keccak sponge of 1600 bits
/// - outputs: The result of a keccak f1600 permutation on the input state. Also an array of 25 Keccak lanes.
Keccakf1600,
/// Compute a recursive aggregation object when verifying a proof inside another circuit.
/// This outputted aggregation object will then be either checked in a top-level verifier or aggregated upon again.

/// Compute a recursive aggregation object when verifying a proof inside
/// another circuit.
/// This outputted aggregation object will then be either checked in a
/// top-level verifier or aggregated upon again.
///
/// **Warning: this opcode is subject to change.**
/// Note that the `254` in `(witness, 254)` refers to the upper bound of
/// the `witness`.
/// - verification_key: Vector of (witness, 254) representing the
/// verification key of the circuit being verified
/// - public_inputs: Vector of (witness, 254) representing the public
/// inputs corresponding to the proof being verified
/// - key_hash: one (witness, 254). It should be the hash of the
/// verification key. Barretenberg expects the Pedersen hash of the
/// verification key
///
/// Another thing that it does is preparing the verification of the proof.
/// In order to fully verify a proof, some operations may still be required
/// to be done by the final verifier. This is why this black box function
/// does not say if verification is passing or not.
///
/// This black box function does not fully verify a proof, what it does is
/// verifying that the key_hash is indeed a hash of verification_key,
/// allowing the user to use the verification key as private inputs and only
/// have the key_hash as public input, which is more performant.
///
/// If one of the recursive proofs you verify with the black box function does not
/// verify, then the verification of the proof of the main ACIR program will
/// ultimately fail.
RecursiveAggregation,
/// Addition over the embedded curve on which [`FieldElement`][acir_field::FieldElement] is defined.

/// Addition over the embedded curve on which the witness is defined.
EmbeddedCurveAdd,

/// BigInt addition
BigIntAdd,

/// BigInt subtraction
BigIntSub,

/// BigInt multiplication
BigIntMul,

/// BigInt division
BigIntDiv,

/// BigInt from le bytes
BigIntFromLeBytes,

/// BigInt to le bytes
BigIntToLeBytes,

/// Permutation function of Poseidon2
Poseidon2Permutation,

/// SHA256 compression function
/// - input: [(witness, 32); 16]
/// - state: [(witness, 32); 8]
/// - output: [(witness, 32); 8]
Sha256Compression,
}

Expand Down
10 changes: 5 additions & 5 deletions noir/noir-repo/acvm-repo/acir/src/circuit/brillig.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@ use serde::{Deserialize, Serialize};
/// Inputs for the Brillig VM. These are the initial inputs
/// that the Brillig VM will use to start.
#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug)]
pub enum BrilligInputs {
Single(Expression),
Array(Vec<Expression>),
pub enum BrilligInputs<F> {
Single(Expression<F>),
Array(Vec<Expression<F>>),
MemoryArray(BlockId),
}

Expand All @@ -24,6 +24,6 @@ pub enum BrilligOutputs {
/// a full Brillig function to be executed by the Brillig VM.
/// This is stored separately on a program and accessed through a [BrilligPointer].
#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Default, Debug)]
pub struct BrilligBytecode {
pub bytecode: Vec<BrilligOpcode>,
pub struct BrilligBytecode<F> {
pub bytecode: Vec<BrilligOpcode<F>>,
}
4 changes: 2 additions & 2 deletions noir/noir-repo/acvm-repo/acir/src/circuit/directives.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize};
/// Directives do not apply any constraints.
/// You can think of them as opcodes that allow one to use non-determinism
/// In the future, this can be replaced with asm non-determinism blocks
pub enum Directive {
pub enum Directive<F> {
//decomposition of a: a=\sum b[i]*radix^i where b is an array of witnesses < radix in little endian form
ToLeRadix { a: Expression, b: Vec<Witness>, radix: u32 },
ToLeRadix { a: Expression<F>, b: Vec<Witness>, radix: u32 },
}
Loading

0 comments on commit 6464ee6

Please sign in to comment.