diff --git a/.gitignore b/.gitignore index 2f0a46ef53..8de98ea8ed 100644 --- a/.gitignore +++ b/.gitignore @@ -5,4 +5,6 @@ .idea *.log *.json -*.sh \ No newline at end of file +*.sh +*.txt +*.srs \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index 8b4898dea2..b1b05e5e02 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -20,6 +20,25 @@ dependencies = [ "opaque-debug 0.3.0", ] +[[package]] +name = "aggregator" +version = "0.1.0" +dependencies = [ + "ark-std", + "env_logger 0.10.0", + "eth-types 0.1.0", + "ethers-core", + "halo2_proofs", + "itertools", + "log", + "rand", + "serde", + "serde_json", + "snark-verifier", + "snark-verifier-sdk", + "zkevm-circuits", +] + [[package]] name = "ahash" version = "0.7.6" @@ -78,6 +97,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1df2c09229cbc5a028b1d70e00fdb2acee28b1055dfb5ca73eea49c5a25c4e7c" dependencies = [ + "colored", "num-traits", "rand", ] diff --git a/Cargo.toml b/Cargo.toml index 7698a67c3e..af51f7fcbf 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -11,7 +11,8 @@ members = [ "eth-types", "external-tracer", "mock", - "testool" + "testool", + "aggregator" ] [patch.crates-io] diff --git a/aggregator/Cargo.toml b/aggregator/Cargo.toml new file mode 100644 index 0000000000..b7101eeddd --- /dev/null +++ b/aggregator/Cargo.toml @@ -0,0 +1,29 @@ +[package] +name = "aggregator" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +eth-types = { path = "../eth-types" } +zkevm-circuits = { path = "../zkevm-circuits" } + + +ark-std = "0.3.0" +env_logger = "0.10.0" +ethers-core = "0.17.0" +log = "0.4" +itertools = "0.10.3" +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +rand = "0.8" + +halo2_proofs = { git = "https://github.com/privacy-scaling-explorations/halo2.git", tag = "v2023_02_02" } +snark-verifier = { git = "https://github.com/scroll-tech/snark-verifier", branch = "develop" } +snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "develop", default-features=false, features = ["loader_halo2", "loader_evm", "halo2-pse"] } + + +[features] +default = [] +print-trace = [ "ark-std/print-trace" ] \ No newline at end of file diff --git a/aggregator/README.md b/aggregator/README.md new file mode 100644 index 0000000000..98d25c60c8 --- /dev/null +++ b/aggregator/README.md @@ -0,0 +1,59 @@ +Proof Aggregation +----- + +![Architecture](./figures/architecture.png) + +This repo does proof aggregations for zkEVM proofs. + +## zkEVM circuit +A zkEVM circuits generates a ZK proof for a chunk of blocks. It takes 64 field elements as its public input, consist of +- chunk's data hash digest: each byte is encoded in an Fr element +- chunk's public input hash digest: each byte is encoded in an Fr element +The total size for a public input is 64 bytes, encoded in 64 Fr element + +For the ease of testing, this repo implements a `MockCircuit` which hash same public input APIs as a zkEVM circuit. + +## First compression circuit +The first compression circuit takes in a fresh snark proof and generates a new (potentially small) snark proof. +The public inputs to the new snark proof consists of +- 12 elements from the accumulators + - an accumulator consists of 2 G1 elements, which are the left and right inputs to the pairing + - this is treated as 4 Fq elements, each decomposed into 3 limbs and encoded in Fr +- 64 elements from previous snark + - re-expose the same public inputs as the original snark + +The first compression circuit is configured [wide config file](./configs/compression_wide.config). + +## Second compression circuit + +The second compression circuit takes in a compressed snark proof and generates a new (potentially small) snark proof. +The public inputs to the new snark proof consists of +- 12 elements from the accumulators + - an accumulator consists of 2 G1 elements, which are the left and right inputs to the pairing + - this is treated as 4 Fq elements, each decomposed into 3 limbs and encoded in Fr + - accumulator from the previous snark is accumulated into the current accumulator +- 64 elements from previous snark + - skipping the first 12 elements which are previous accumulator, as they are already accumulated + - re-expose the rest 64 field elements as the public inputs + +The second compression circuit is configured [thin config file](./configs/compression_thin.config). + +## Aggregation circuit +An aggregation circuit takes in a batch of `k` proofs, each for a chunk of blocks. +It generates a single proof asserting the validity of all the proofs. + +It also performs public input aggregation, i.e., reducing the `64k` public elements into a fixed number of `144` elements: +- 12 elements from accumulators, which accumulates all the previous `k` accumulators from each snark +- 132 elements from the hashes + - first_chunk_prev_state_root: 32 Field elements + - last_chunk_post_state_root: 32 Field elements + - last_chunk_withdraw_root: 32 Field elements + - batch_public_input_hash: 32 Field elements + - chain_id: 8 Field elements + +In addition, it attests that, for chunks indexed from `0` to `k-1`, +- batch_data_hash := keccak(chunk_0.data_hash || ... || chunk_k-1.data_hash) where chunk_i.data_hash is a public input to the i-th batch snark circuit +- chunk_pi_hash := keccak(chain_id || prev_state_root || post_state_root || withdraw_root || chunk_data_hash) where chunk_data_hash is a public input to the i-th batch snark circuit +- and the related field matches public input + +See [public input aggregation](./src/proof_aggregation/public_input_aggregation.rs) for the details of public input aggregation. \ No newline at end of file diff --git a/aggregator/configs/compression_thin.config b/aggregator/configs/compression_thin.config new file mode 100644 index 0000000000..6975f69170 --- /dev/null +++ b/aggregator/configs/compression_thin.config @@ -0,0 +1 @@ +{"strategy":"Simple","degree":26,"num_advice":[1],"num_lookup_advice":[1],"num_fixed":1,"lookup_bits":20,"limb_bits":88,"num_limbs":3} \ No newline at end of file diff --git a/aggregator/configs/compression_wide.config b/aggregator/configs/compression_wide.config new file mode 100644 index 0000000000..78bbf04075 --- /dev/null +++ b/aggregator/configs/compression_wide.config @@ -0,0 +1 @@ +{"strategy":"Simple","degree":22,"num_advice":[8],"num_lookup_advice":[1],"num_fixed":1,"lookup_bits":20,"limb_bits":88,"num_limbs":3} diff --git a/aggregator/figures/architecture.png b/aggregator/figures/architecture.png new file mode 100644 index 0000000000..d5d89ef432 Binary files /dev/null and b/aggregator/figures/architecture.png differ diff --git a/aggregator/src/chunk.rs b/aggregator/src/chunk.rs new file mode 100644 index 0000000000..734a9bd750 --- /dev/null +++ b/aggregator/src/chunk.rs @@ -0,0 +1,66 @@ +//! This module implements `Chunk` related data types. +//! A chunk is a list of blocks. +use eth_types::H256; +use ethers_core::utils::keccak256; + +#[derive(Default, Debug, Clone, Copy)] +/// A chunk is a set of continuous blocks. +/// A ChunkHash consists of 4 hashes, representing the changes incurred by this chunk of blocks: +/// - state root before this chunk +/// - state root after this chunk +/// - the withdraw root of this chunk +/// - the data hash of this chunk +pub struct ChunkHash { + /// Chain identifier + pub(crate) chain_id: u64, + /// state root before this chunk + pub(crate) prev_state_root: H256, + /// state root after this chunk + pub(crate) post_state_root: H256, + /// the withdraw root of this chunk + pub(crate) withdraw_root: H256, + /// the data hash of this chunk + pub(crate) data_hash: H256, +} + +impl ChunkHash { + /// Sample a chunk hash from random (for testing) + #[cfg(test)] + pub(crate) fn mock_chunk_hash(r: &mut R) -> Self { + let mut prev_state_root = [0u8; 32]; + r.fill_bytes(&mut prev_state_root); + let mut post_state_root = [0u8; 32]; + r.fill_bytes(&mut post_state_root); + let mut withdraw_root = [0u8; 32]; + r.fill_bytes(&mut withdraw_root); + let mut data_hash = [0u8; 32]; + r.fill_bytes(&mut data_hash); + Self { + chain_id: 0, + prev_state_root: prev_state_root.into(), + post_state_root: post_state_root.into(), + withdraw_root: withdraw_root.into(), + data_hash: data_hash.into(), + } + } + + /// Public input hash for a given chunk is defined as + /// keccak( chain id || prev state root || post state root || withdraw root || data hash ) + pub fn public_input_hash(&self) -> H256 { + let preimage = self.extract_hash_preimage(); + keccak256::<&[u8]>(preimage.as_ref()).into() + } + + /// Extract the preimage for the hash + /// chain id || prev state root || post state root || withdraw root || data hash + pub fn extract_hash_preimage(&self) -> Vec { + [ + self.chain_id.to_be_bytes().as_ref(), + self.prev_state_root.as_bytes(), + self.post_state_root.as_bytes(), + self.withdraw_root.as_bytes(), + self.data_hash.as_bytes(), + ] + .concat() + } +} diff --git a/aggregator/src/compression.rs b/aggregator/src/compression.rs new file mode 100644 index 0000000000..5ed296ac86 --- /dev/null +++ b/aggregator/src/compression.rs @@ -0,0 +1,14 @@ +//! Input a proof, a compression circuit generates a new proof that may have smaller size. +//! +//! It re-exposes same public inputs from the input snark. +//! All this circuit does is to reduce the proof size. + +/// Circuit implementation of compression circuit. +mod circuit; +/// CircuitExt implementation of compression circuit. +mod circuit_ext; +/// Config for compression circuit +mod config; + +pub use circuit::CompressionCircuit; +pub use config::CompressionConfig; diff --git a/aggregator/src/compression/circuit.rs b/aggregator/src/compression/circuit.rs new file mode 100644 index 0000000000..7dfb6660ee --- /dev/null +++ b/aggregator/src/compression/circuit.rs @@ -0,0 +1,235 @@ +//! Circuit implementation for compression circuit. + +use std::fs::File; + +use ark_std::{end_timer, start_timer}; +use halo2_proofs::{ + circuit::{Layouter, SimpleFloorPlanner, Value}, + halo2curves::bn256::{Fq, G1Affine}, + plonk::{Circuit, ConstraintSystem, Error}, +}; +use rand::Rng; +use snark_verifier::{ + loader::{ + halo2::{ + halo2_ecc::halo2_base::{ + self, + halo2_proofs::{ + halo2curves::bn256::{Bn256, Fr}, + poly::{commitment::ParamsProver, kzg::commitment::ParamsKZG}, + }, + Context, ContextParams, + }, + Halo2Loader, + }, + native::NativeLoader, + }, + pcs::kzg::{Bdfg21, Kzg, KzgAccumulator, KzgSuccinctVerifyingKey}, + util::arithmetic::fe_to_limbs, +}; +use snark_verifier_sdk::{aggregate, flatten_accumulator, types::Svk, Snark, SnarkWitness}; + +use crate::{core::extract_accumulators_and_proof, param::ConfigParams, ACC_LEN, BITS, LIMBS}; + +use super::config::CompressionConfig; + +/// Input a proof, this compression circuit generates a new proof that may have smaller size. +/// +/// It re-exposes same public inputs from the input snark. +/// All this circuit does is to reduce the proof size. +#[derive(Clone)] +pub struct CompressionCircuit { + pub(crate) svk: KzgSuccinctVerifyingKey, + pub(crate) snark: SnarkWitness, + /// whether this circuit compresses a fresh snark + pub(crate) is_fresh: bool, + /// instances, flattened. + /// It re-exposes same public inputs from the input snark. + /// If the previous snark is already a compressed, this flattened_instances will + /// exclude the previous accumulator. + pub(crate) flattened_instances: Vec, + // accumulation scheme proof, private input + pub(crate) as_proof: Value>, +} + +impl Circuit for CompressionCircuit { + type Config = CompressionConfig; + type FloorPlanner = SimpleFloorPlanner; + + fn without_witnesses(&self) -> Self { + let flattened_instances = self + .snark + .instances + .iter() + .flat_map(|instance| instance.iter().map(|_| Fr::zero())) + .collect(); + + Self { + svk: self.svk, + snark: SnarkWitness::without_witnesses(&self.snark), + is_fresh: true, + flattened_instances, + as_proof: Value::unknown(), + } + } + + fn configure(meta: &mut ConstraintSystem) -> Self::Config { + // Too bad that configure function doesn't take additional input + // it would be nicer to load parameters from API rather than ENV + let path = std::env::var("COMPRESSION_CONFIG") + .unwrap_or_else(|_| "configs/compression_wide.config".to_owned()); + let params: ConfigParams = serde_json::from_reader( + File::open(path.as_str()).unwrap_or_else(|_| panic!("{path:?} does not exist")), + ) + .unwrap_or_else(|_| ConfigParams::default_compress_wide_param()); + + log::info!( + "compression circuit configured with k = {} and {:?} advice columns", + params.degree, + params.num_advice + ); + + // circuit configuration is built from config with given num columns etc + // can be wide or thin circuit + Self::Config::configure(meta, params) + } + + fn synthesize( + &self, + config: Self::Config, + mut layouter: impl Layouter, + ) -> Result<(), Error> { + let witness_time = start_timer!(|| "synthesize | compression Circuit"); + config + .range() + .load_lookup_table(&mut layouter) + .expect("load range lookup table"); + let mut first_pass = halo2_base::SKIP_FIRST_PASS; + let mut instances = vec![]; + layouter.assign_region( + || "compression circuit", + |region| { + if first_pass { + first_pass = false; + return Ok(()); + } + let ctx = Context::new( + region, + ContextParams { + max_rows: config.gate().max_rows, + num_context_ids: 1, + fixed_columns: config.gate().constants.clone(), + }, + ); + + let ecc_chip = config.ecc_chip(); + let loader = Halo2Loader::new(ecc_chip, ctx); + let (assigned_instances, acc) = aggregate::>( + &self.svk, + &loader, + &[self.snark.clone()], + self.as_proof(), + ); + + // instance of the compression circuit is defined as + // - accumulators + // - re-export the public input from snark + instances.extend( + flatten_accumulator(acc) + .iter() + .map(|assigned| assigned.cell()), + ); + // - if the snark is not a fresh one, assigned_instances already contains an + // accumulator so we want to skip the first 12 elements from the public input + let skip = if self.is_fresh { 0 } else { ACC_LEN }; + instances.extend(assigned_instances.iter().flat_map(|instance_column| { + instance_column.iter().skip(skip).map(|x| x.cell()) + })); + + config.range().finalize(&mut loader.ctx_mut()); + + loader.ctx_mut().print_stats(&["Range"]); + Ok(()) + }, + )?; + + // Expose instances + for (i, cell) in instances.into_iter().enumerate() { + layouter.constrain_instance(cell, config.instance, i)?; + } + + end_timer!(witness_time); + Ok(()) + } +} + +impl CompressionCircuit { + /// Build a new circuit from a snark, with a flag whether this snark has been compressed before + pub fn new( + params: &ParamsKZG, + snark: Snark, + is_fresh: bool, + rng: impl Rng + Send, + ) -> Result { + let svk = params.get_g()[0].into(); + + // for the proof compression, only ONE snark is under accumulation + // it is turned into an accumulator via KzgAs accumulation scheme + // in case not first time: + // (old_accumulator, public inputs) -> (new_accumulator, public inputs) + let (accumulator, as_proof) = + extract_accumulators_and_proof(params, &[snark.clone()], rng)?; + + // the instance for the outer circuit is + // - new accumulator, consists of 12 elements + // - inner circuit's instance, flattened (old accumulator is stripped out if exists) + // + // it is important that new accumulator is the first 12 elements + // as specified in CircuitExt::accumulator_indices() + let KzgAccumulator:: { lhs, rhs } = accumulator; + let acc_instances = [lhs.x, lhs.y, rhs.x, rhs.y] + .map(fe_to_limbs::) + .concat(); + // skip the old accumulator if exists + let skip = if is_fresh { 0 } else { ACC_LEN }; + let snark_instance = snark + .instances + .iter() + .flat_map(|instance| instance.iter().skip(skip)); + + let flattened_instances = acc_instances + .iter() + .chain(snark_instance) + .cloned() + .collect::>(); + + { + log::trace!("acc lhs: {:?}", lhs); + log::trace!("acc rhs: {:?}", rhs); + log::trace!("flattened instances:"); + for i in flattened_instances.iter() { + log::trace!("{:?}", i); + } + } + + Ok(Self { + svk, + snark: snark.into(), + is_fresh, + flattened_instances, + as_proof: Value::known(as_proof), + }) + } + + pub fn succinct_verifying_key(&self) -> &Svk { + &self.svk + } + + pub fn snark(&self) -> &SnarkWitness { + &self.snark + } + + pub fn as_proof(&self) -> Value<&[u8]> { + self.as_proof.as_ref().map(Vec::as_slice) + } +} diff --git a/aggregator/src/compression/circuit_ext.rs b/aggregator/src/compression/circuit_ext.rs new file mode 100644 index 0000000000..77a8f6d14a --- /dev/null +++ b/aggregator/src/compression/circuit_ext.rs @@ -0,0 +1,34 @@ +//! CircuitExt implementation for compression circuit. + +use halo2_proofs::{halo2curves::bn256::Fr, plonk::Selector}; +use snark_verifier_sdk::CircuitExt; + +use crate::ACC_LEN; + +use super::circuit::CompressionCircuit; + +impl CircuitExt for CompressionCircuit { + fn num_instance(&self) -> Vec { + // re-expose inner public input + let snark_pi_len: usize = self.snark.instances.iter().map(|x| x.len()).sum(); + + // if the snark is not fresh, the snark_pi already contains elements for the accumulator + vec![snark_pi_len + ACC_LEN * self.is_fresh as usize] + } + + fn instances(&self) -> Vec> { + vec![self.flattened_instances.clone()] + } + + fn accumulator_indices() -> Option> { + // the accumulator are the first 12 cells in the instance + Some((0..ACC_LEN).map(|idx| (0, idx)).collect()) + } + + fn selectors(config: &Self::Config) -> Vec { + config.gate().basic_gates[0] + .iter() + .map(|gate| gate.q_enable) + .collect() + } +} diff --git a/aggregator/src/compression/config.rs b/aggregator/src/compression/config.rs new file mode 100644 index 0000000000..d577679266 --- /dev/null +++ b/aggregator/src/compression/config.rs @@ -0,0 +1,74 @@ +use halo2_proofs::{ + halo2curves::bn256::{Fq, Fr, G1Affine}, + plonk::{Column, ConstraintSystem, Instance}, +}; +use snark_verifier::loader::halo2::halo2_ecc::{ + ecc::{BaseFieldEccChip, EccChip}, + fields::fp::FpConfig, + halo2_base::{ + gates::{flex_gate::FlexGateConfig, range::RangeConfig}, + utils::modulus, + }, +}; + +use crate::{ + constants::{BITS, LIMBS}, + param::ConfigParams, +}; + +#[derive(Clone, Debug)] +/// Configurations for compression circuit +/// This config is hard coded for BN256 curve +pub struct CompressionConfig { + /// Non-native field chip configurations + pub base_field_config: FpConfig, + /// Instance for public input + pub instance: Column, +} + +impl CompressionConfig { + /// Build a configuration from parameters. + pub fn configure(meta: &mut ConstraintSystem, params: ConfigParams) -> Self { + assert!( + params.limb_bits == BITS && params.num_limbs == LIMBS, + "For now we fix limb_bits = {}, otherwise change code", + BITS + ); + let base_field_config = FpConfig::configure( + meta, + params.strategy, + ¶ms.num_advice, + ¶ms.num_lookup_advice, + params.num_fixed, + params.lookup_bits, + params.limb_bits, + params.num_limbs, + modulus::(), + 0, + params.degree as usize, + ); + + let instance = meta.instance_column(); + meta.enable_equality(instance); + + Self { + base_field_config, + instance, + } + } + + /// Range gate configuration + pub fn range(&self) -> &RangeConfig { + &self.base_field_config.range + } + + /// Flex gate configuration + pub fn gate(&self) -> &FlexGateConfig { + &self.base_field_config.range.gate + } + + /// Ecc gate configuration + pub fn ecc_chip(&self) -> BaseFieldEccChip { + EccChip::construct(self.base_field_config.clone()) + } +} diff --git a/aggregator/src/constants.rs b/aggregator/src/constants.rs new file mode 100644 index 0000000000..d7674b43dc --- /dev/null +++ b/aggregator/src/constants.rs @@ -0,0 +1,15 @@ +// A chain_id is u64 and uses 8 bytes +#[allow(dead_code)] +pub(crate) const CHAIN_ID_LEN: usize = 8; + +// TODO(ZZ): update to the right degree +#[allow(dead_code)] +pub(crate) const LOG_DEGREE: u32 = 19; + +/// An decomposed accumulator consists of 12 field elements +pub(crate) const ACC_LEN: usize = 12; + +/// number of limbs when decomposing a field element in the ECC chip +pub(crate) const LIMBS: usize = 3; +/// number of bits in each limb in the ECC chip +pub(crate) const BITS: usize = 88; diff --git a/aggregator/src/core.rs b/aggregator/src/core.rs new file mode 100644 index 0000000000..462790f1b5 --- /dev/null +++ b/aggregator/src/core.rs @@ -0,0 +1,64 @@ +use halo2_proofs::{ + halo2curves::bn256::{Bn256, G1Affine}, + poly::{commitment::ParamsProver, kzg::commitment::ParamsKZG}, +}; +use rand::Rng; +use snark_verifier::{ + loader::native::NativeLoader, + pcs::{ + kzg::{Bdfg21, Kzg, KzgAccumulator, KzgAs}, + AccumulationSchemeProver, + }, + verifier::PlonkVerifier, + Error, +}; +use snark_verifier_sdk::{ + types::{PoseidonTranscript, Shplonk, POSEIDON_SPEC}, + Snark, +}; + +/// Subroutine for the witness generations. +/// Extract the accumulator and proof that from previous snarks. +/// Uses SHPlonk for accumulation. +pub(crate) fn extract_accumulators_and_proof( + params: &ParamsKZG, + snarks: &[Snark], + rng: impl Rng + Send, +) -> Result<(KzgAccumulator, Vec), Error> { + let svk = params.get_g()[0].into(); + + let mut transcript_read = + PoseidonTranscript::::from_spec(&[], POSEIDON_SPEC.clone()); + let accumulators = snarks + .iter() + .flat_map(|snark| { + transcript_read.new_stream(snark.proof.as_slice()); + let proof = Shplonk::read_proof( + &svk, + &snark.protocol, + &snark.instances, + &mut transcript_read, + ); + // each accumulator has (lhs, rhs) based on Shplonk + // lhs and rhs are EC points + Shplonk::succinct_verify(&svk, &snark.protocol, &snark.instances, &proof) + }) + .collect::>(); + + let mut transcript_write = + PoseidonTranscript::>::from_spec(vec![], POSEIDON_SPEC.clone()); + // We always use SHPLONK for accumulation scheme when aggregating proofs + let accumulator = + // core step + // KzgAs does KZG accumulation scheme based on given accumulators and random number (for adding blinding) + // accumulated ec_pt = ec_pt_1 * 1 + ec_pt_2 * r + ... + ec_pt_n * r^{n-1} + // ec_pt can be lhs and rhs + // r is the challenge squeezed from proof + KzgAs::>::create_proof::>, _>( + &Default::default(), + &accumulators, + &mut transcript_write, + rng, + )?; + Ok((accumulator, transcript_write.finalize())) +} diff --git a/aggregator/src/lib.rs b/aggregator/src/lib.rs new file mode 100644 index 0000000000..dc43c92a80 --- /dev/null +++ b/aggregator/src/lib.rs @@ -0,0 +1,19 @@ +// This module implements `Chunk` related data types. +// A chunk is a list of blocks. +mod chunk; +/// proof compression +mod compression; +/// Configurations +mod constants; +/// Core module for circuit assignment +mod core; +/// Parameters for compression circuit +mod param; + +#[cfg(test)] +mod tests; + +pub use chunk::ChunkHash; +pub use compression::*; +pub(crate) use constants::*; +pub use param::*; diff --git a/aggregator/src/param.rs b/aggregator/src/param.rs new file mode 100644 index 0000000000..458b20bb95 --- /dev/null +++ b/aggregator/src/param.rs @@ -0,0 +1,57 @@ +use snark_verifier::loader::halo2::halo2_ecc::fields::fp::FpStrategy; + +use crate::{BITS, LIMBS}; + +#[derive(serde::Serialize, serde::Deserialize, Clone, Debug)] +/// Parameters for aggregation circuit and compression circuit configs. +pub struct ConfigParams { + pub strategy: FpStrategy, + pub degree: u32, + pub num_advice: Vec, + pub num_lookup_advice: Vec, + pub num_fixed: usize, + pub lookup_bits: usize, + pub limb_bits: usize, + pub num_limbs: usize, +} + +impl ConfigParams { + pub(crate) fn _aggregation_param() -> Self { + Self { + strategy: FpStrategy::Simple, + degree: 23, + num_advice: vec![8], + num_lookup_advice: vec![1], + num_fixed: 1, + lookup_bits: 20, + limb_bits: BITS, + num_limbs: LIMBS, + } + } + + pub(crate) fn default_compress_wide_param() -> Self { + Self { + strategy: FpStrategy::Simple, + degree: 22, + num_advice: vec![35], + num_lookup_advice: vec![1], + num_fixed: 1, + lookup_bits: 20, + limb_bits: BITS, + num_limbs: LIMBS, + } + } + + pub(crate) fn _compress_thin_param() -> Self { + Self { + strategy: FpStrategy::Simple, + degree: 25, + num_advice: vec![1], + num_lookup_advice: vec![1], + num_fixed: 1, + lookup_bits: 20, + limb_bits: BITS, + num_limbs: LIMBS, + } + } +} diff --git a/aggregator/src/tests.rs b/aggregator/src/tests.rs new file mode 100644 index 0000000000..ef778894ad --- /dev/null +++ b/aggregator/src/tests.rs @@ -0,0 +1,188 @@ +pub(crate) mod compression; +pub(crate) mod mock_chunk; + +#[macro_export] +macro_rules! layer_0 { + // generate a snark for layer 0 + ($circuit: ident, $circuit_type: ident, $param: ident, $degree: ident, $path: ident) => {{ + let timer = start_timer!(|| "gen layer 0 snark"); + + let mut rng = test_rng(); + let param = { + let mut param = $param.clone(); + param.downsize($degree); + param + }; + + let pk = gen_pk( + ¶m, + &$circuit, + Some(&$path.join(Path::new("layer_0.pkey"))), + ); + log::trace!("finished layer 0 pk generation for circuit"); + + let snark = gen_snark_shplonk(¶m, &pk, $circuit.clone(), &mut rng, None::); + log::trace!("finished layer 0 snark generation for circuit"); + + assert!(verify_snark_shplonk::<$circuit_type>( + ¶m, + snark.clone(), + pk.get_vk() + )); + + log::trace!("finished layer 0 snark verification"); + log::trace!("proof size: {}", snark.proof.len()); + log::trace!( + "pi size: {}", + snark.instances.iter().map(|x| x.len()).sum::() + ); + + log::trace!("layer 0 circuit instances"); + for (i, e) in $circuit.instances()[0].iter().enumerate() { + log::trace!("{}-th public input: {:?}", i, e); + } + end_timer!(timer); + snark + }}; +} + +#[macro_export] +macro_rules! compression_layer_snark { + // generate a snark for compression layer + ($previous_snark: ident, $param: ident, $degree: ident, $path: ident, $layer_index: expr) => {{ + let timer = start_timer!(|| format!("gen layer {} snark", $layer_index)); + + let param = { + let mut param = $param.clone(); + param.downsize($degree); + param + }; + + let mut rng = test_rng(); + + let is_fresh = if $layer_index == 1 { true } else { false }; + let compression_circuit = + CompressionCircuit::new(&$param, $previous_snark.clone(), is_fresh, &mut rng).unwrap(); + + let pk = gen_pk(&$param, &compression_circuit, None); + // build the snark for next layer + let snark = gen_snark_shplonk( + ¶m, + &pk, + compression_circuit.clone(), + &mut rng, + None::, // Some(&$path.join(Path::new("layer_1.snark"))), + ); + log::trace!( + "finished layer {} snark generation for circuit", + $layer_index + ); + + assert!(verify_snark_shplonk::( + ¶m, + snark.clone(), + pk.get_vk() + )); + + end_timer!(timer); + snark + }}; +} + +#[macro_export] +macro_rules! compression_layer_evm { + // generate a evm proof and verify it for compression layer + ($previous_snark: ident, $param: ident, $degree: ident, $path: ident,$layer_index: expr) => {{ + let timer = start_timer!(|| format!("gen layer {} snark", $layer_index)); + + let param = { + let mut param = $param.clone(); + param.downsize($degree); + param + }; + + let mut rng = test_rng(); + + let compression_circuit = + CompressionCircuit::new(&$param, $previous_snark, false, &mut rng).unwrap(); + + let instances = compression_circuit.instances(); + + let pk = gen_pk(&$param, &compression_circuit, None); + // build the snark for next layer + let proof = gen_evm_proof_shplonk( + ¶m, + &pk, + compression_circuit.clone(), + instances.clone(), + &mut rng, + ); + + log::trace!("finished layer 4 aggregation generation"); + log::trace!("proof size: {}", proof.len()); + + // verify proof via EVM + let deployment_code = gen_evm_verifier::>( + ¶m, + pk.get_vk(), + compression_circuit.num_instance(), + Some(&$path.join(Path::new("contract.sol"))), + ); + log::trace!("finished layer 4 bytecode generation"); + + evm_verify( + deployment_code, + compression_circuit.instances(), + proof.clone(), + ); + log::trace!("layer 2 evm verification finished"); + + end_timer!(timer); + }}; +} + +#[macro_export] +macro_rules! aggregation_layer_snark { + // generate a snark for compression layer + ($previous_snarks: ident, $param: ident, $degree: ident, $path: ident, $layer_index: expr, $chunks: ident) => {{ + let timer = start_timer!(|| format!("gen layer {} snark", $layer_index)); + + let param = { + let mut param = $param.clone(); + param.downsize($degree); + param + }; + + let mut rng = test_rng(); + + let aggregation_circuit = AggregationCircuit::new( + &$param, + $previous_snarks.as_ref(), + &mut rng, + $chunks.as_ref(), + ); + + let pk = gen_pk(&$param, &aggregation_circuit, None); + // build the snark for next layer + let snark = gen_snark_shplonk( + ¶m, + &pk, + aggregation_circuit.clone(), + &mut rng, + None::, // Some(&$path.join(Path::new("layer_3.snark"))), + ); + log::trace!( + "finished layer {} snark generation for circuit", + $layer_index + ); + + assert!(verify_snark_shplonk::( + ¶m, + snark.clone(), + pk.get_vk() + )); + + end_timer!(timer); + snark + }}; +} diff --git a/aggregator/src/tests/compression.rs b/aggregator/src/tests/compression.rs new file mode 100644 index 0000000000..98e33c807d --- /dev/null +++ b/aggregator/src/tests/compression.rs @@ -0,0 +1,100 @@ +use std::{fs, path::Path, process}; + +use ark_std::{end_timer, start_timer, test_rng}; +use halo2_proofs::{ + dev::MockProver, + halo2curves::bn256::{Bn256, Fr}, + poly::commitment::Params, +}; +use snark_verifier::{ + loader::halo2::halo2_ecc::halo2_base::{halo2_proofs, utils::fs::gen_srs}, + pcs::kzg::{Bdfg21, Kzg}, +}; +use snark_verifier_sdk::{ + evm_verify, gen_evm_proof_shplonk, gen_evm_verifier, gen_pk, gen_snark_shplonk, + verify_snark_shplonk, CircuitExt, +}; + +use crate::{ + compression_layer_evm, compression_layer_snark, layer_0, tests::mock_chunk::MockChunkCircuit, + CompressionCircuit, +}; + +#[ignore = "it takes too much time"] +#[test] +fn test_mock_compression() { + env_logger::init(); + + if std::path::Path::new("data").is_dir() { + println!("data folder already exists\n"); + } else { + println!("Generating data folder used for testing\n"); + std::fs::create_dir("data").unwrap(); + } + + let dir = format!("data/{}", process::id()); + let path = Path::new(dir.as_str()); + fs::create_dir(path).unwrap(); + + let k0 = 8; + let k1 = 22; + + let mut rng = test_rng(); + let params = gen_srs(k1); + + // Proof for test circuit + let circuit = MockChunkCircuit::random(&mut rng, true); + let layer_0_snark = layer_0!(circuit, MockChunkCircuit, params, k0, path); + + std::env::set_var("COMPRESSION_CONFIG", "./configs/compression_wide.config"); + // layer 1 proof compression + { + let param = { + let mut param = params; + param.downsize(k1); + param + }; + let compression_circuit = + CompressionCircuit::new(¶m, layer_0_snark, true, &mut rng).unwrap(); + let instance = compression_circuit.instances(); + println!("instance length {:?}", instance.len()); + + let mock_prover = MockProver::::run(k1, &compression_circuit, instance).unwrap(); + + mock_prover.assert_satisfied_par() + } +} + +// This test takes about 1 hour on CPU +#[ignore = "it takes too much time"] +#[test] +fn test_two_layer_proof_compression() { + env_logger::init(); + + if std::path::Path::new("data").is_dir() { + println!("data folder already exists\n"); + } else { + println!("Generating data folder used for testing\n"); + std::fs::create_dir("data").unwrap(); + } + + let dir = format!("data/{}", process::id()); + let path = Path::new(dir.as_str()); + fs::create_dir(path).unwrap(); + + let k0 = 19; + let k1 = 25; + let k2 = 25; + + let mut rng = test_rng(); + let layer_2_params = gen_srs(k2); + + let circuit = MockChunkCircuit::random(&mut rng, true); + let layer_0_snark = layer_0!(circuit, MockChunkCircuit, layer_2_params, k0, path); + + std::env::set_var("COMPRESSION_CONFIG", "./configs/compression_wide.config"); + let layer_1_snark = compression_layer_snark!(layer_0_snark, layer_2_params, k1, path, 1); + + std::env::set_var("COMPRESSION_CONFIG", "./configs/compression_thin.config"); + compression_layer_evm!(layer_1_snark, layer_2_params, k2, path, 2); +} diff --git a/aggregator/src/tests/mock_chunk.rs b/aggregator/src/tests/mock_chunk.rs new file mode 100644 index 0000000000..057e3d9ba3 --- /dev/null +++ b/aggregator/src/tests/mock_chunk.rs @@ -0,0 +1,52 @@ +use ark_std::test_rng; +use halo2_proofs::{dev::MockProver, halo2curves::bn256::Fr}; +use snark_verifier_sdk::CircuitExt; + +use crate::{ChunkHash, LOG_DEGREE}; + +mod circuit; +mod circuit_ext; +mod config; + +#[derive(Debug, Default, Clone, Copy)] +/// A mock chunk circuit +/// +/// This mock chunk circuit simulates a zkEVM circuit. +/// It's public inputs consists of 64 elements: +/// - data hash +/// - public input hash +pub(crate) struct MockChunkCircuit { + pub(crate) is_fresh: bool, + pub(crate) chain_id: u64, + pub(crate) chunk: ChunkHash, +} + +impl MockChunkCircuit { + #[allow(dead_code)] + pub(crate) fn new(is_fresh: bool, chain_id: u64, chunk: ChunkHash) -> Self { + MockChunkCircuit { + is_fresh, + chain_id, + chunk, + } + } +} + +#[test] +fn test_mock_chunk_prover() { + let mut rng = test_rng(); + + let circuit = MockChunkCircuit::random(&mut rng, true); + let instance = circuit.instances(); + + let mock_prover = MockProver::::run(LOG_DEGREE, &circuit, instance).unwrap(); + + mock_prover.assert_satisfied_par(); + + let circuit = MockChunkCircuit::random(&mut rng, false); + let instance = circuit.instances(); + + let mock_prover = MockProver::::run(LOG_DEGREE, &circuit, instance).unwrap(); + + mock_prover.assert_satisfied_par(); +} diff --git a/aggregator/src/tests/mock_chunk/circuit.rs b/aggregator/src/tests/mock_chunk/circuit.rs new file mode 100644 index 0000000000..9beeb4acaf --- /dev/null +++ b/aggregator/src/tests/mock_chunk/circuit.rs @@ -0,0 +1,91 @@ +use std::iter; + +use halo2_proofs::{ + circuit::{Layouter, SimpleFloorPlanner, Value}, + halo2curves::bn256::Fr, + plonk::{Circuit, ConstraintSystem, Error}, +}; + +use crate::ChunkHash; + +use super::{config::MockPlonkConfig, MockChunkCircuit}; + +impl MockChunkCircuit { + pub(crate) fn random(r: &mut R, is_fresh: bool) -> Self { + Self { + is_fresh, + chain_id: 0, + chunk: ChunkHash::mock_chunk_hash(r), + } + } +} + +impl Circuit for MockChunkCircuit { + type Config = MockPlonkConfig; + type FloorPlanner = SimpleFloorPlanner; + + fn without_witnesses(&self) -> Self { + Self::default() + } + + fn configure(meta: &mut ConstraintSystem) -> Self::Config { + meta.set_minimum_degree(4); + MockPlonkConfig::configure(meta) + } + + fn synthesize( + &self, + config: Self::Config, + mut layouter: impl Layouter, + ) -> Result<(), Error> { + layouter.assign_region( + || "mock circuit", + |mut region| { + let acc_len = if self.is_fresh { 0 } else { 12 }; + + for (i, byte) in iter::repeat(0) + .take(acc_len) + .chain( + self.chunk + .chain_id + .to_be_bytes() + .iter() + .chain( + self.chunk + .data_hash + .as_bytes() + .iter() + .chain(self.chunk.public_input_hash().as_bytes().iter()), + ) + .copied(), + ) + .enumerate() + { + // "q_a·a + q_b·b + q_c·c + q_ab·a·b + constant + instance = 0", + region.assign_advice( + || "a", + config.a, + i, + || Value::known(Fr::from(byte as u64)), + )?; + region.assign_advice(|| "b", config.b, i, || Value::known(Fr::zero()))?; + region.assign_advice(|| "c", config.c, i, || Value::known(Fr::zero()))?; + + region.assign_fixed(|| "q_a", config.q_a, i, || Value::known(-Fr::one()))?; + region.assign_fixed(|| "q_b", config.q_b, i, || Value::known(Fr::zero()))?; + region.assign_fixed(|| "q_c", config.q_c, i, || Value::known(Fr::zero()))?; + region.assign_fixed(|| "q_ab", config.q_ab, i, || Value::known(Fr::zero()))?; + region.assign_fixed( + || "constant", + config.constant, + i, + || Value::known(Fr::zero()), + )?; + } + Ok(()) + }, + )?; + + Ok(()) + } +} diff --git a/aggregator/src/tests/mock_chunk/circuit_ext.rs b/aggregator/src/tests/mock_chunk/circuit_ext.rs new file mode 100644 index 0000000000..3964cfbc5d --- /dev/null +++ b/aggregator/src/tests/mock_chunk/circuit_ext.rs @@ -0,0 +1,38 @@ +use std::iter; + +use halo2_proofs::halo2curves::bn256::Fr; +use snark_verifier_sdk::CircuitExt; + +use crate::{ACC_LEN, CHAIN_ID_LEN}; + +use super::MockChunkCircuit; + +impl CircuitExt for MockChunkCircuit { + /// 64 elements from digest + fn num_instance(&self) -> Vec { + let acc_len = if self.is_fresh { 0 } else { ACC_LEN }; + vec![64 + CHAIN_ID_LEN + acc_len] + } + + /// return vec![data hash | public input hash] + fn instances(&self) -> Vec> { + let acc_len = if self.is_fresh { 0 } else { ACC_LEN }; + vec![iter::repeat(0) + .take(acc_len) + .chain( + self.chain_id + .to_be_bytes() + .iter() + .chain( + self.chunk + .data_hash + .as_bytes() + .iter() + .chain(self.chunk.public_input_hash().as_bytes().iter()), + ) + .copied(), + ) + .map(|x| Fr::from(x as u64)) + .collect()] + } +} diff --git a/aggregator/src/tests/mock_chunk/config.rs b/aggregator/src/tests/mock_chunk/config.rs new file mode 100644 index 0000000000..d5e7f4a4f0 --- /dev/null +++ b/aggregator/src/tests/mock_chunk/config.rs @@ -0,0 +1,60 @@ +use halo2_proofs::{ + halo2curves::bn256::Fr, + plonk::{Advice, Column, ConstraintSystem, Fixed, Instance}, + poly::Rotation, +}; +use snark_verifier::loader::halo2::halo2_ecc::halo2_base::halo2_proofs; + +#[derive(Clone, Copy)] +pub(crate) struct MockPlonkConfig { + pub(crate) a: Column, + pub(crate) b: Column, + pub(crate) c: Column, + pub(crate) q_a: Column, + pub(crate) q_b: Column, + pub(crate) q_c: Column, + pub(crate) q_ab: Column, + pub(crate) constant: Column, + #[allow(dead_code)] + pub(crate) instance: Column, +} + +impl MockPlonkConfig { + pub(crate) fn configure(meta: &mut ConstraintSystem) -> Self { + let [a, b, c] = [(); 3].map(|_| meta.advice_column()); + let [q_a, q_b, q_c, q_ab, constant] = [(); 5].map(|_| meta.fixed_column()); + let instance = meta.instance_column(); + + [a, b, c].map(|column| meta.enable_equality(column)); + + meta.create_gate( + "q_a·a + q_b·b + q_c·c + q_ab·a·b + constant + instance = 0", + |meta| { + let [a, b, c] = [a, b, c].map(|column| meta.query_advice(column, Rotation::cur())); + let [q_a, q_b, q_c, q_ab, constant] = [q_a, q_b, q_c, q_ab, constant] + .map(|column| meta.query_fixed(column, Rotation::cur())); + let instance = meta.query_instance(instance, Rotation::cur()); + Some( + q_a * a.clone() + + q_b * b.clone() + + q_c * c + + q_ab * a * b + + constant + + instance, + ) + }, + ); + + MockPlonkConfig { + a, + b, + c, + q_a, + q_b, + q_c, + q_ab, + constant, + instance, + } + } +} diff --git a/aggregator/tests.sh b/aggregator/tests.sh new file mode 100755 index 0000000000..47fec94898 --- /dev/null +++ b/aggregator/tests.sh @@ -0,0 +1,8 @@ +RUST_LOG=trace MODE=greeter cargo test --release --features=print-trace test_mock_chunk_prover -- --nocapture 2>&1 | tee mock_chunk.log +# RUST_LOG=trace MODE=greeter cargo test --release --features=print-trace test_mock_aggregation -- --nocapture 2>&1 | tee mock_aggregation.log +RUST_LOG=trace MODE=greeter cargo test --release --features=print-trace test_mock_compression -- --nocapture 2>&1 | tee compression.log + +# the following 3 tests takes super long time +# RUST_LOG=trace MODE=greeter cargo test --release --features=print-trace test_aggregation_circuit -- --ignored --nocapture 2>&1 | tee aggregation.log +# RUST_LOG=trace MODE=greeter cargo test --release --features=print-trace test_two_layer_proof_compression -- --ignored --nocapture 2>&1 | tee compression_2_layer.log +# RUST_LOG=trace MODE=greeter cargo test --release --features=print-trace test_e2e -- --ignored --nocapture 2>&1 | tee aggregation_e2e.log diff --git a/zkevm-circuits/src/keccak_circuit.rs b/zkevm-circuits/src/keccak_circuit.rs index fe12af8890..8f45588d16 100644 --- a/zkevm-circuits/src/keccak_circuit.rs +++ b/zkevm-circuits/src/keccak_circuit.rs @@ -2,7 +2,7 @@ mod cell_manager; /// Keccak packed multi pub mod keccak_packed_multi; -mod param; +pub(crate) mod param; mod table; /// Util mod util; @@ -57,7 +57,7 @@ pub struct KeccakCircuitConfig { q_padding_last: Column, /// The columns for other circuits to lookup Keccak hash results pub keccak_table: KeccakTable, - /// Expose the columns that stores the cells for hash input/output + /// The cell manager that stores/allocates the advice columns pub cell_manager: CellManager, round_cst: Column, normalize_3: [TableColumn; 2], @@ -69,6 +69,7 @@ pub struct KeccakCircuitConfig { } /// Circuit configuration arguments +#[derive(Debug, Clone)] pub struct KeccakCircuitConfigArgs { /// KeccakTable pub keccak_table: KeccakTable, @@ -303,7 +304,7 @@ impl SubCircuitConfig for KeccakCircuitConfig { // multiple rows with lookups in a way that doesn't require any // extra additional cells or selectors we have to put all `s[i]`'s on the same // row. This isn't that strong of a requirement actually because we the - // words are split into multipe parts, and so only the parts at the same + // words are split into multiple parts, and so only the parts at the same // position of those words need to be on the same row. let target_word_sizes = target_part_sizes(part_size); let num_word_parts = target_word_sizes.len(); @@ -868,6 +869,7 @@ impl SubCircuitConfig for KeccakCircuitConfig { } impl KeccakCircuitConfig { + /// Assign the circuit for hash function pub(crate) fn assign( &self, layouter: &mut impl Layouter, @@ -960,7 +962,7 @@ impl KeccakCircuitConfig { Ok(res) } - /// Load the auxiliary table for keccak table. + /// Load the auxiliary tables for keccak circuit pub fn load_aux_tables(&self, layouter: &mut impl Layouter) -> Result<(), Error> { load_normalize_table(layouter, "normalize_6", &self.normalize_6, 6u64)?; load_normalize_table(layouter, "normalize_4", &self.normalize_4, 4u64)?; @@ -989,7 +991,12 @@ impl KeccakCircuitConfig { /// KeccakCircuit #[derive(Default, Clone, Debug)] pub struct KeccakCircuit { + // The input is a two dimensional vector + // Each input row is a pre-image of the hash + // The output row of the hash, i.e., the digest is NOT part of the circuit input inputs: Vec>, + // The maximum number of rows, for example, 2^20 + // This needs to be large enough for the circuit. num_rows: usize, _marker: PhantomData, } @@ -1001,7 +1008,7 @@ impl SubCircuit for KeccakCircuit { keccak_unusable_rows() } - /// The `block.circuits_params.keccak_padding` parmeter, when enabled, sets + /// The `block.circuits_params.keccak_padding` parameter, when enabled, sets /// up the circuit to support a fixed number of permutations/keccak_f's, /// independently of the permutations required by `inputs`. fn new_from_block(block: &witness::Block) -> Self { diff --git a/zkevm-circuits/src/keccak_circuit/cell_manager.rs b/zkevm-circuits/src/keccak_circuit/cell_manager.rs index de58ad619e..a2fdb30e57 100644 --- a/zkevm-circuits/src/keccak_circuit/cell_manager.rs +++ b/zkevm-circuits/src/keccak_circuit/cell_manager.rs @@ -153,7 +153,7 @@ impl CellManager { self.rows.iter().cloned().max().unwrap() } - /// expose the columns used for keccak cell + /// Expose the columns used by the cell manager by reference. pub fn columns(&self) -> &[CellColumn] { &self.columns } diff --git a/zkevm-circuits/src/keccak_circuit/keccak_packed_multi.rs b/zkevm-circuits/src/keccak_circuit/keccak_packed_multi.rs index 90fd3e6ddd..544041cbac 100644 --- a/zkevm-circuits/src/keccak_circuit/keccak_packed_multi.rs +++ b/zkevm-circuits/src/keccak_circuit/keccak_packed_multi.rs @@ -67,11 +67,11 @@ pub struct KeccakRow { pub(crate) q_padding: bool, pub(crate) q_padding_last: bool, pub(crate) round_cst: F, - /// if the row is the last row of the current keccak hash + /// if the row is the last row of the current keccak round pub is_final: bool, /// the value of the cells that are to be assigned pub cell_values: Vec, - /// the length of the hash input + /// The input length of the hash function pub length: usize, pub(crate) data_rlc: Value, pub(crate) hash_rlc: Value, @@ -423,7 +423,7 @@ pub(crate) mod transform { } } -// Transfroms values to cells +// Transforms values to cells pub(crate) mod transform_to { use super::{Cell, KeccakRegion, Part, PartValue}; use crate::{ @@ -492,7 +492,10 @@ pub(crate) mod transform_to { } } -fn keccak_rows(bytes: &[u8], challenges: Challenges>) -> Vec> { +pub(crate) fn keccak_rows( + bytes: &[u8], + challenges: Challenges>, +) -> Vec> { let mut rows = Vec::new(); keccak(&mut rows, bytes, challenges); rows @@ -868,18 +871,6 @@ pub fn multi_keccak( }); } - // Dedup actual keccaks - // let inputs_len: usize = bytes.iter().map(|k| k.len()).sum(); - // let inputs_num = bytes.len(); - // for (idx, bytes) in bytes.iter().enumerate() { - // debug!("{}th keccak is of len {}", idx, bytes.len()); - // } - // let bytes: Vec<_> = bytes.iter().unique().collect(); - // let inputs_len2: usize = bytes.iter().map(|k| k.len()).sum(); - // let inputs_num2 = bytes.len(); - // debug!("after dedup inputs, input num {inputs_num}->{inputs_num2}, input total len - // {inputs_len}->{inputs_len2}"); - // TODO: optimize the `extend` using Iter? let real_rows: Vec<_> = bytes .par_iter() diff --git a/zkevm-circuits/src/sig_circuit.rs b/zkevm-circuits/src/sig_circuit.rs index 9e5266f843..f976386b33 100644 --- a/zkevm-circuits/src/sig_circuit.rs +++ b/zkevm-circuits/src/sig_circuit.rs @@ -101,9 +101,9 @@ impl SubCircuitConfig for SigCircuitConfig { let num_advice = [calc_required_advices(MAX_NUM_SIG), 1]; #[cfg(feature = "onephase")] - log::debug!("configuring ECDSA chip with single phase"); + log::info!("configuring ECDSA chip with single phase"); #[cfg(not(feature = "onephase"))] - log::debug!("configuring ECDSA chip with multiple phases"); + log::info!("configuring ECDSA chip with multiple phases"); // halo2-ecc's ECDSA config // @@ -766,7 +766,7 @@ impl SigCircuit { .collect::>, Error>>()?; // IMPORTANT: Move to Phase2 before RLC - log::debug!("before proceeding to the next phase"); + log::info!("before proceeding to the next phase"); ctx.print_stats(&["Range"]); #[cfg(not(feature = "onephase"))] @@ -827,7 +827,7 @@ impl SigCircuit { // check lookups // This is not optional. let lookup_cells = ecdsa_chip.finalize(&mut ctx); - log::debug!("total number of lookup cells: {}", lookup_cells); + log::info!("total number of lookup cells: {}", lookup_cells); ctx.print_stats(&["Range"]); Ok(assigned_keccak_values_and_sigs