From 77c7792ec37dda339755a980ea74046e5b9846f7 Mon Sep 17 00:00:00 2001 From: Georg Wiese Date: Mon, 8 Jul 2024 14:38:46 +0200 Subject: [PATCH] Refactor --- backend/src/composite/mod.rs | 14 +++-- backend/src/estark/mod.rs | 13 +++-- backend/src/estark/starky_wrapper.rs | 14 +++-- backend/src/halo2/mod.rs | 32 ++++++------ backend/src/lib.rs | 23 ++------- executor/src/constant_evaluator/mod.rs | 6 +-- executor/src/witgen/block_processor.rs | 5 +- executor/src/witgen/global_constraints.rs | 5 +- executor/src/witgen/mod.rs | 10 ++-- number/src/lib.rs | 51 +++++++++++++++++- number/src/serialize.rs | 44 ++++++++-------- pipeline/src/pipeline.rs | 63 ++++++++--------------- pipeline/src/util.rs | 16 +++--- 13 files changed, 157 insertions(+), 139 deletions(-) diff --git a/backend/src/composite/mod.rs b/backend/src/composite/mod.rs index b7a66dea8..29339ca00 100644 --- a/backend/src/composite/mod.rs +++ b/backend/src/composite/mod.rs @@ -2,11 +2,11 @@ use std::{collections::BTreeMap, io, marker::PhantomData, path::PathBuf, sync::A use powdr_ast::analyzed::Analyzed; use powdr_executor::witgen::WitgenCallback; -use powdr_number::{DegreeType, FieldElement}; +use powdr_number::{DegreeType, FieldElement, FixedColumns}; use serde::{Deserialize, Serialize}; use split::select_machine_columns; -use crate::{get_only_size, Backend, BackendFactory, BackendOptions, Error, Proof}; +use crate::{Backend, BackendFactory, BackendOptions, Error, Proof}; mod split; @@ -35,7 +35,7 @@ impl> BackendFactory for CompositeBacke fn create<'a>( &self, pil: Arc>, - fixed: Arc>)>>, + fixed: Arc>, output_dir: Option, setup: Option<&mut dyn std::io::Read>, verification_key: Option<&mut dyn std::io::Read>, @@ -47,7 +47,11 @@ impl> BackendFactory for CompositeBacke } // TODO: Handle multiple sizes. - let fixed = Arc::new(get_only_size(&fixed)?); + let fixed = Arc::new( + fixed + .get_only_size() + .map_err(|_| Error::NoVariableDegreeAvailable)?, + ); let per_machine_data = split::split_pil((*pil).clone()) .into_iter() @@ -70,7 +74,7 @@ impl> BackendFactory for CompositeBacke .map(|(column_name, values)| { (column_name, [(values.len(), values)].into_iter().collect()) }) - .collect(), + .into(), ); let backend = self.factory.create( pil.clone(), diff --git a/backend/src/estark/mod.rs b/backend/src/estark/mod.rs index 7ee3cef63..e83380940 100644 --- a/backend/src/estark/mod.rs +++ b/backend/src/estark/mod.rs @@ -5,7 +5,6 @@ pub mod polygon_wrapper; pub mod starky_wrapper; use std::{ - collections::BTreeMap, fs::File, io::{self, BufWriter, Write}, iter::{once, repeat}, @@ -13,11 +12,11 @@ use std::{ sync::Arc, }; -use crate::{get_only_size, Backend, BackendFactory, BackendOptions, Error, Proof}; +use crate::{Backend, BackendFactory, BackendOptions, Error, Proof}; use powdr_ast::analyzed::Analyzed; use powdr_executor::witgen::WitgenCallback; -use powdr_number::{DegreeType, FieldElement}; +use powdr_number::{DegreeType, FieldElement, FixedColumns}; use serde::Serialize; use starky::types::{StarkStruct, Step, PIL}; @@ -223,14 +222,18 @@ impl BackendFactory for DumpFactory { fn create<'a>( &self, analyzed: Arc>, - fixed: Arc>)>>, + fixed: Arc>, output_dir: Option, setup: Option<&mut dyn std::io::Read>, verification_key: Option<&mut dyn std::io::Read>, verification_app_key: Option<&mut dyn std::io::Read>, options: BackendOptions, ) -> Result + 'a>, Error> { - let fixed = Arc::new(get_only_size(&fixed)?); + let fixed = Arc::new( + fixed + .get_only_size() + .map_err(|_| Error::NoVariableDegreeAvailable)?, + ); Ok(Box::new(DumpBackend(EStarkFilesCommon::create( &analyzed, fixed, diff --git a/backend/src/estark/starky_wrapper.rs b/backend/src/estark/starky_wrapper.rs index f182f4d5c..537f500ff 100644 --- a/backend/src/estark/starky_wrapper.rs +++ b/backend/src/estark/starky_wrapper.rs @@ -1,12 +1,12 @@ +use std::io; use std::path::PathBuf; use std::sync::Arc; use std::time::Instant; -use std::{collections::BTreeMap, io}; -use crate::{get_only_size, Backend, BackendFactory, BackendOptions, Error}; +use crate::{Backend, BackendFactory, BackendOptions, Error}; use powdr_ast::analyzed::Analyzed; use powdr_executor::witgen::WitgenCallback; -use powdr_number::{FieldElement, GoldilocksField, LargeInt}; +use powdr_number::{FieldElement, FixedColumns, GoldilocksField, LargeInt}; use starky::{ merklehash::MerkleTreeGL, @@ -27,7 +27,7 @@ impl BackendFactory for Factory { fn create<'a>( &self, pil: Arc>, - fixed: Arc>)>>, + fixed: Arc>, _output_dir: Option, setup: Option<&mut dyn std::io::Read>, verification_key: Option<&mut dyn std::io::Read>, @@ -50,7 +50,11 @@ impl BackendFactory for Factory { return Err(Error::NoVariableDegreeAvailable); } - let fixed = Arc::new(get_only_size(&fixed)?); + let fixed = Arc::new( + fixed + .get_only_size() + .map_err(|_| Error::NoVariableDegreeAvailable)?, + ); let proof_type: ProofType = ProofType::from(options); diff --git a/backend/src/halo2/mod.rs b/backend/src/halo2/mod.rs index bbfcd4c9e..d17026023 100644 --- a/backend/src/halo2/mod.rs +++ b/backend/src/halo2/mod.rs @@ -1,14 +1,13 @@ #![deny(clippy::print_stdout)] -use std::collections::BTreeMap; use std::io; use std::path::PathBuf; use std::sync::Arc; -use crate::{get_only_size, Backend, BackendFactory, BackendOptions, Error, Proof}; +use crate::{Backend, BackendFactory, BackendOptions, Error, Proof}; use powdr_ast::analyzed::Analyzed; use powdr_executor::witgen::WitgenCallback; -use powdr_number::{DegreeType, FieldElement}; +use powdr_number::{DegreeType, FieldElement, FixedColumns}; use prover::{generate_setup, Halo2Prover}; use serde::de::{self, Deserializer}; @@ -77,7 +76,7 @@ impl BackendFactory for Halo2ProverFactory { fn create<'a>( &self, pil: Arc>, - fixed: Arc>)>>, + fixed: Arc>, _output_dir: Option, setup: Option<&mut dyn io::Read>, verification_key: Option<&mut dyn io::Read>, @@ -88,12 +87,12 @@ impl BackendFactory for Halo2ProverFactory { return Err(Error::NoVariableDegreeAvailable); } let proof_type = ProofType::from(options); - let mut halo2 = Box::new(Halo2Prover::new( - pil, - Arc::new(get_only_size(&fixed)?), - setup, - proof_type, - )?); + let fixed = Arc::new( + fixed + .get_only_size() + .map_err(|_| Error::NoVariableDegreeAvailable)?, + ); + let mut halo2 = Box::new(Halo2Prover::new(pil, fixed, setup, proof_type)?); if let Some(vk) = verification_key { halo2.add_verification_key(vk); } @@ -191,7 +190,7 @@ impl BackendFactory for Halo2MockFactory { fn create<'a>( &self, pil: Arc>, - fixed: Arc>)>>, + fixed: Arc>, _output_dir: Option, setup: Option<&mut dyn io::Read>, verification_key: Option<&mut dyn io::Read>, @@ -208,10 +207,13 @@ impl BackendFactory for Halo2MockFactory { return Err(Error::NoAggregationAvailable); } - Ok(Box::new(Halo2Mock { - pil, - fixed: Arc::new(get_only_size(&fixed)?), - })) + let fixed = Arc::new( + fixed + .get_only_size() + .map_err(|_| Error::NoVariableDegreeAvailable)?, + ); + + Ok(Box::new(Halo2Mock { pil, fixed })) } } diff --git a/backend/src/lib.rs b/backend/src/lib.rs index 7e3027a2c..09ca3002a 100644 --- a/backend/src/lib.rs +++ b/backend/src/lib.rs @@ -10,8 +10,8 @@ mod composite; use powdr_ast::analyzed::Analyzed; use powdr_executor::witgen::WitgenCallback; -use powdr_number::{DegreeType, FieldElement}; -use std::{collections::BTreeMap, io, path::PathBuf, sync::Arc}; +use powdr_number::{DegreeType, FieldElement, FixedColumns}; +use std::{io, path::PathBuf, sync::Arc}; use strum::{Display, EnumString, EnumVariantNames}; #[derive(Clone, EnumString, EnumVariantNames, Display, Copy)] @@ -131,11 +131,10 @@ pub type Proof = Vec; pub trait BackendFactory { /// Create a new backend object. #[allow(clippy::too_many_arguments)] - #[allow(clippy::type_complexity)] fn create<'a>( &self, pil: Arc>, - fixed: Arc>)>>, + fixed: Arc>, output_dir: Option, setup: Option<&mut dyn io::Read>, verification_key: Option<&mut dyn io::Read>, @@ -186,19 +185,3 @@ pub trait Backend<'a, F: FieldElement> { Err(Error::NoEthereumVerifierAvailable) } } - -fn get_only_size( - columns: &[(String, BTreeMap>)], -) -> Result)>, Error> { - // TODO: This clones the values - columns - .iter() - .map(|(name, column_by_size)| { - if column_by_size.len() != 1 { - return Err(Error::NoVariableDegreeAvailable); - } - let values = column_by_size.values().next().unwrap().clone(); - Ok((name.clone(), values)) - }) - .collect() -} diff --git a/executor/src/constant_evaluator/mod.rs b/executor/src/constant_evaluator/mod.rs index fe23392f2..83cfdb1eb 100644 --- a/executor/src/constant_evaluator/mod.rs +++ b/executor/src/constant_evaluator/mod.rs @@ -11,7 +11,7 @@ use powdr_ast::{ IndexAccess, }, }; -use powdr_number::{BigInt, BigUint, DegreeType, FieldElement}; +use powdr_number::{BigInt, BigUint, DegreeType, FieldElement, FixedColumns}; use powdr_pil_analyzer::evaluator::{self, Definitions, SymbolLookup, Value}; use rayon::prelude::{IntoParallelIterator, ParallelIterator}; @@ -20,7 +20,7 @@ use rayon::prelude::{IntoParallelIterator, ParallelIterator}; /// @returns the names (in source order) and the values for the columns. /// Arrays of columns are flattened, the name of the `i`th array element /// is `name[i]`. -pub fn generate(analyzed: &Analyzed) -> Vec<(String, BTreeMap>)> { +pub fn generate(analyzed: &Analyzed) -> FixedColumns { let mut fixed_cols = HashMap::new(); for (poly, value) in analyzed.constant_polys_in_source_order() { if let Some(value) = value { @@ -38,7 +38,7 @@ pub fn generate(analyzed: &Analyzed) -> Vec<(String, BTreeMa .into_iter() .sorted_by_key(|(_, (id, _))| *id) .map(|(name, (_, values))| (name, [(values.len(), values)].into_iter().collect())) - .collect::>() + .into() } fn generate_values( diff --git a/executor/src/witgen/block_processor.rs b/executor/src/witgen/block_processor.rs index 6894aa1c2..234868d54 100644 --- a/executor/src/witgen/block_processor.rs +++ b/executor/src/witgen/block_processor.rs @@ -152,10 +152,7 @@ mod tests { f: impl Fn(BlockProcessor, BTreeMap, u64, usize) -> R, ) -> R { let analyzed = analyze_string(src); - let constants = generate(&analyzed) - .into_iter() - .map(|(n, c)| (n.to_string(), c.into_values().next().unwrap())) - .collect::>(); + let constants = generate(&analyzed).get_only_size().unwrap(); let fixed_data = FixedData::new(&analyzed, &constants, &[], Default::default(), 0); // No submachines diff --git a/executor/src/witgen/global_constraints.rs b/executor/src/witgen/global_constraints.rs index 6a7d5fa60..42f95f60a 100644 --- a/executor/src/witgen/global_constraints.rs +++ b/executor/src/witgen/global_constraints.rs @@ -436,7 +436,9 @@ namespace Global(2**20); [ D ] in [ SHIFTED ]; "; let analyzed = powdr_pil_analyzer::analyze_string::(pil_source); - let constants = crate::constant_evaluator::generate(&analyzed); + let constants = crate::constant_evaluator::generate(&analyzed) + .get_only_size() + .unwrap(); let fixed_polys = (0..constants.len()) .map(|i| constant_poly_id(i as u64)) .collect::>(); @@ -444,7 +446,6 @@ namespace Global(2**20); .iter() .zip(&constants) .filter_map(|(&poly_id, (_, values))| { - let values = values.values().next().unwrap(); process_fixed_column(values).map(|(constraint, _full)| (poly_id, constraint)) }) .collect::>(); diff --git a/executor/src/witgen/mod.rs b/executor/src/witgen/mod.rs index 099840342..78971f34f 100644 --- a/executor/src/witgen/mod.rs +++ b/executor/src/witgen/mod.rs @@ -8,7 +8,7 @@ use powdr_ast::analyzed::{ }; use powdr_ast::parsed::visitor::ExpressionVisitable; use powdr_ast::parsed::{FunctionKind, LambdaExpression}; -use powdr_number::{DegreeType, FieldElement}; +use powdr_number::{DegreeType, FieldElement, WitnessColumns}; use self::data_structures::column_map::{FixedColumnMap, WitnessColumnMap}; pub use self::eval_result::{ @@ -87,6 +87,7 @@ impl WitgenCallback { .with_external_witness_values(current_witness) .with_challenges(stage, challenges) .generate() + .0 } } @@ -154,7 +155,7 @@ impl<'a, 'b, T: FieldElement> WitnessGenerator<'a, 'b, T> { /// Generates the committed polynomial values /// @returns the values (in source order) and the degree of the polynomials. - pub fn generate(self) -> Vec<(String, Vec)> { + pub fn generate(self) -> WitnessColumns { record_start(OUTER_CODE_NAME); let fixed = FixedData::new( self.analyzed, @@ -250,7 +251,7 @@ impl<'a, 'b, T: FieldElement> WitnessGenerator<'a, 'b, T> { assert!(!column.is_empty()); (name, column) }) - .collect::>(); + .into(); log::debug!("Publics:"); for (name, value) in extract_publics(&witness_cols, self.analyzed) { @@ -261,10 +262,11 @@ impl<'a, 'b, T: FieldElement> WitnessGenerator<'a, 'b, T> { } pub fn extract_publics( - witness: &[(String, Vec)], + witness: &WitnessColumns, pil: &Analyzed, ) -> Vec<(String, T)> { let witness = witness + .0 .iter() .map(|(name, col)| (name.clone(), col)) .collect::>(); diff --git a/number/src/lib.rs b/number/src/lib.rs index f2e2912b5..8ba1f9eb7 100644 --- a/number/src/lib.rs +++ b/number/src/lib.rs @@ -9,9 +9,11 @@ mod goldilocks; mod serialize; mod traits; +use std::collections::BTreeMap; + +use serde::{Deserialize, Serialize}; pub use serialize::{ - buffered_write_file, read_fixed_file, read_polys_csv_file, read_witness_file, write_fixed_file, - write_polys_csv_file, write_witness_file, CsvRenderMode, + buffered_write_file, read_polys_csv_file, write_polys_csv_file, CsvRenderMode, ReadWrite, }; pub use bn254::Bn254Field; @@ -31,6 +33,51 @@ pub fn log2_exact(n: BigUint) -> Option { .filter(|zeros| n == (BigUint::from(1u32) << zeros)) } +#[derive(Serialize, Deserialize)] +pub struct WitnessColumns(pub Vec<(String, Vec)>); + +#[derive(Serialize, Deserialize)] +pub struct FixedColumns(pub Vec<(String, BTreeMap>)>); + +#[derive(Debug)] +pub struct HasMultipleSizesError; + +impl FixedColumns { + pub fn get_only_size(&self) -> Result)>, HasMultipleSizesError> { + // TODO: This clones the values + self.0 + .iter() + .map(|(name, column_by_size)| { + if column_by_size.len() != 1 { + return Err(HasMultipleSizesError); + } + let values = column_by_size.values().next().unwrap().clone(); + Ok((name.clone(), values)) + }) + .collect() + } + + pub fn len(&self) -> usize { + self.0.len() + } + + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } +} + +impl>)>, F: Clone> From for FixedColumns { + fn from(iter: T) -> Self { + Self(iter.collect()) + } +} + +impl)>, F: Clone> From for WitnessColumns { + fn from(iter: T) -> Self { + Self(iter.collect()) + } +} + #[cfg(test)] mod test { use super::*; diff --git a/number/src/serialize.rs b/number/src/serialize.rs index 0e6c8c8d6..debaafcfb 100644 --- a/number/src/serialize.rs +++ b/number/src/serialize.rs @@ -1,5 +1,4 @@ use std::{ - collections::BTreeMap, fs::File, io::{self, BufWriter, Read, Write}, path::Path, @@ -7,9 +6,10 @@ use std::{ use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Compress, Validate}; use csv::{Reader, Writer}; +use serde::{de::DeserializeOwned, Serialize}; use serde_with::{DeserializeAs, SerializeAs}; -use crate::FieldElement; +use crate::{FieldElement, FixedColumns, WitnessColumns}; #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum CsvRenderMode { @@ -103,32 +103,30 @@ pub fn buffered_write_file( Ok(result) } -pub fn write_witness_file( - path: &Path, - polys: &[(String, Vec)], -) -> Result<(), serde_cbor::Error> { - buffered_write_file(path, |writer| serde_cbor::to_writer(writer, &polys))??; - - Ok(()) +pub trait ReadWrite { + fn read(file: &mut impl Read) -> Self; + fn write(&self, path: &Path) -> Result<(), io::Error>; } -pub fn write_fixed_file( - path: &Path, - polys: &[(String, BTreeMap>)], -) -> Result<(), serde_cbor::Error> { - buffered_write_file(path, |writer| serde_cbor::to_writer(writer, &polys))??; - - Ok(()) +impl ReadWrite for FixedColumns { + fn read(file: &mut impl Read) -> Self { + FixedColumns(serde_cbor::from_reader(file).unwrap()) + } + fn write(&self, path: &Path) -> Result<(), io::Error> { + buffered_write_file(path, |writer| serde_cbor::to_writer(writer, &self))?.unwrap(); + Ok(()) + } } -pub fn read_witness_file(file: &mut impl Read) -> Vec<(String, Vec)> { - serde_cbor::from_reader(file).unwrap() -} +impl ReadWrite for WitnessColumns { + fn read(file: &mut impl Read) -> Self { + WitnessColumns(serde_cbor::from_reader(file).unwrap()) + } -pub fn read_fixed_file( - file: &mut impl Read, -) -> Vec<(String, BTreeMap>)> { - serde_cbor::from_reader(file).unwrap() + fn write(&self, path: &Path) -> Result<(), io::Error> { + buffered_write_file(path, |writer| serde_cbor::to_writer(writer, &self))?.unwrap(); + Ok(()) + } } // Serde wrappers for serialize/deserialize diff --git a/pipeline/src/pipeline.rs b/pipeline/src/pipeline.rs index 82a47fa1d..04bf4ee97 100644 --- a/pipeline/src/pipeline.rs +++ b/pipeline/src/pipeline.rs @@ -1,6 +1,5 @@ use std::{ borrow::Borrow, - collections::BTreeMap, fmt::Display, fs, io::{self, BufReader}, @@ -26,7 +25,7 @@ use powdr_executor::{ }, }; use powdr_number::{ - write_fixed_file, write_polys_csv_file, write_witness_file, CsvRenderMode, FieldElement, + write_polys_csv_file, CsvRenderMode, FieldElement, FixedColumns, ReadWrite, WitnessColumns, }; use powdr_schemas::SerializedAnalyzed; @@ -35,23 +34,6 @@ use crate::{ util::{read_fixed_poly_set, read_witness_poly_set, FixedPolySet, WitnessPolySet}, }; -fn get_only_size(columns: &[(String, BTreeMap>)]) -> Vec<(String, Vec)> { - // TODO: This clones the values - columns - .iter() - .map(|(name, column_by_size)| { - if column_by_size.len() != 1 { - panic!(); - } - let values = column_by_size.values().next().unwrap().clone(); - (name.clone(), values) - }) - .collect() -} - -type Columns = Vec<(String, Vec)>; -type VariablySizedColumns = Vec<(String, BTreeMap>)>; - #[derive(Default, Clone)] pub struct Artifacts { /// The path to a single .asm file. @@ -82,9 +64,9 @@ pub struct Artifacts { /// An optimized .pil file. optimized_pil: Option>>, /// Fully evaluated fixed columns. - fixed_cols: Option>>, + fixed_cols: Option>>, /// Generated witnesses. - witness: Option>>, + witness: Option>>, /// The proof (if successful). proof: Option, } @@ -426,6 +408,7 @@ impl Pipeline { /// Sets the witness to the provided value. pub fn set_witness(mut self, witness: Vec<(String, Vec)>) -> Self { + let witness = witness.into_iter().into(); if self.output_dir.is_some() { // Some future steps (e.g. Pilcom verification) require the witness to be persisted. let fixed_cols = self.compute_fixed_cols().unwrap(); @@ -502,29 +485,26 @@ impl Pipeline { Ok(()) } - fn maybe_write_constants( - &self, - constants: &[(String, BTreeMap>)], - ) -> Result<(), Vec> { + fn maybe_write_constants(&self, constants: &FixedColumns) -> Result<(), Vec> { if let Some(path) = self.path_if_should_write(|_| "constants.bin".to_string())? { - write_fixed_file(&path, constants).map_err(|e| vec![format!("{}", e)])?; + constants.write(&path).map_err(|e| vec![format!("{}", e)])?; } Ok(()) } fn maybe_write_witness( &self, - fixed: &[(String, BTreeMap>)], - witness: &[(String, Vec)], + fixed: &FixedColumns, + witness: &WitnessColumns, ) -> Result<(), Vec> { if let Some(path) = self.path_if_should_write(|_| "commits.bin".to_string())? { - write_witness_file(&path, witness).map_err(|e| vec![format!("{}", e)])?; + witness.write(&path).map_err(|e| vec![format!("{}", e)])?; } if self.arguments.export_witness_csv { if let Some(path) = self.path_if_should_write(|name| format!("{name}_columns.csv"))? { - let fixed = get_only_size(fixed); - let columns = fixed.iter().chain(witness.iter()).collect::>(); + let fixed = fixed.get_only_size().unwrap(); + let columns = fixed.iter().chain(witness.0.iter()).collect::>(); let csv_file = fs::File::create(path).map_err(|e| vec![format!("{}", e)])?; write_polys_csv_file(csv_file, self.arguments.csv_render_mode, &columns); @@ -806,7 +786,7 @@ impl Pipeline { Ok(self.artifact.optimized_pil.as_ref().unwrap().clone()) } - pub fn compute_fixed_cols(&mut self) -> Result>, Vec> { + pub fn compute_fixed_cols(&mut self) -> Result>, Vec> { if let Some(ref fixed_cols) = self.artifact.fixed_cols { return Ok(fixed_cols.clone()); } @@ -825,11 +805,11 @@ impl Pipeline { Ok(self.artifact.fixed_cols.as_ref().unwrap().clone()) } - pub fn fixed_cols(&self) -> Result>, Vec> { + pub fn fixed_cols(&self) -> Result>, Vec> { Ok(self.artifact.fixed_cols.as_ref().unwrap().clone()) } - pub fn compute_witness(&mut self) -> Result>, Vec> { + pub fn compute_witness(&mut self) -> Result>, Vec> { if let Some(ref witness) = self.artifact.witness { return Ok(witness.clone()); } @@ -848,10 +828,11 @@ impl Pipeline { .query_callback .clone() .unwrap_or_else(|| Arc::new(unused_query_callback())); - let fixed_cols_one_size = get_only_size(&fixed_cols); - let witness = WitnessGenerator::new(&pil, &fixed_cols_one_size, query_callback.borrow()) - .with_external_witness_values(&external_witness_values) - .generate(); + let fixed_cols_one_size = fixed_cols.get_only_size().unwrap(); + let witness: WitnessColumns = + WitnessGenerator::new(&pil, &fixed_cols_one_size, query_callback.borrow()) + .with_external_witness_values(&external_witness_values) + .generate(); self.log(&format!("Took {}", start.elapsed().as_secs_f32())); @@ -862,7 +843,7 @@ impl Pipeline { Ok(self.artifact.witness.as_ref().unwrap().clone()) } - pub fn witness(&self) -> Result>, Vec> { + pub fn witness(&self) -> Result>, Vec> { Ok(self.artifact.witness.as_ref().unwrap().clone()) } @@ -873,7 +854,7 @@ impl Pipeline { } pub fn witgen_callback(&mut self) -> Result, Vec> { - let fixed_cols = Arc::new(get_only_size(&self.compute_fixed_cols()?)); + let fixed_cols = Arc::new(self.compute_fixed_cols()?.get_only_size().unwrap()); Ok(WitgenCallback::new( self.compute_optimized_pil()?, fixed_cols, @@ -938,7 +919,7 @@ impl Pipeline { .as_ref() .map(|path| fs::read(path).unwrap()); - let proof = match backend.prove(&witness, existing_proof, witgen_callback) { + let proof = match backend.prove(&witness.0, existing_proof, witgen_callback) { Ok(proof) => proof, Err(powdr_backend::Error::BackendError(e)) => { return Err(vec![e.to_string()]); diff --git a/pipeline/src/util.rs b/pipeline/src/util.rs index e7dbefa90..b5a062a3b 100644 --- a/pipeline/src/util.rs +++ b/pipeline/src/util.rs @@ -1,6 +1,6 @@ use powdr_ast::analyzed::{Analyzed, FunctionValueDefinition, Symbol}; -use powdr_number::{read_fixed_file, read_witness_file, FieldElement}; -use std::{collections::BTreeMap, fs::File, io::BufReader, path::Path}; +use powdr_number::{FieldElement, FixedColumns, ReadWrite, WitnessColumns}; +use std::{fs::File, io::BufReader, path::Path}; pub trait PolySet { const FILE_NAME: &'static str; @@ -31,16 +31,12 @@ impl PolySet for WitnessPolySet { } } -#[allow(clippy::type_complexity)] -pub fn read_witness_poly_set(dir: &Path) -> Vec<(String, Vec)> { +pub fn read_witness_poly_set(dir: &Path) -> WitnessColumns { let path = dir.join(P::FILE_NAME); - read_witness_file(&mut BufReader::new(File::open(path).unwrap())) + WitnessColumns::read(&mut BufReader::new(File::open(path).unwrap())) } -#[allow(clippy::type_complexity)] -pub fn read_fixed_poly_set( - dir: &Path, -) -> Vec<(String, BTreeMap>)> { +pub fn read_fixed_poly_set(dir: &Path) -> FixedColumns { let path = dir.join(P::FILE_NAME); - read_fixed_file(&mut BufReader::new(File::open(path).unwrap())) + FixedColumns::read(&mut BufReader::new(File::open(path).unwrap())) }