Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Speed up tests #1556

Merged
merged 6 commits into from
Jul 12, 2024
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 12 additions & 3 deletions pipeline/src/pipeline.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,13 @@ use std::{
io::{self, BufReader},
marker::Send,
path::{Path, PathBuf},
rc::Rc,
sync::Arc,
time::Instant,
};

use log::Level;
use mktemp::Temp;
use powdr_ast::{
analyzed::Analyzed,
asm_analysis::AnalysisASMFile,
Expand Down Expand Up @@ -114,6 +116,10 @@ pub struct Pipeline<T: FieldElement> {
artifact: Artifacts<T>,
/// Output directory for intermediate files. If None, no files are written.
output_dir: Option<PathBuf>,
/// The temporary directory, owned by the pipeline (or any copies of it).
/// This object is not used directly, but keeping it here ensures that the directory
/// is not deleted until the pipeline is dropped.
_tmp_dir: Option<Rc<Temp>>,
Comment on lines +119 to +122
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is much more convenient and save, otherwise you have to keep around the Temp object around for as long as the pipeline lives.

I think we used to do it like that before, not sure we stopped? Maybe because we wanted Pipeline to be clone-able? But the Rc does exactly what you'd want.

/// The name of the pipeline. Used to name output files.
name: Option<String>,
/// Whether to overwrite existing files. If false, an error is returned if a file
Expand All @@ -140,6 +146,7 @@ where
Pipeline {
artifact: Default::default(),
output_dir: None,
_tmp_dir: None,
log_level: Level::Info,
name: None,
force_overwrite: false,
Expand Down Expand Up @@ -190,12 +197,14 @@ where
/// let proof = pipeline.compute_proof().unwrap();
/// ```
impl<T: FieldElement> Pipeline<T> {
/// Initializes the output directory to a temporary directory.
/// Note that the user is responsible for keeping the temporary directory alive.
pub fn with_tmp_output(self, tmp_dir: &mktemp::Temp) -> Self {
/// Initializes the output directory to a temporary directory which lives as long
/// the pipeline does.
pub fn with_tmp_output(self) -> Self {
let tmp_dir = Rc::new(mktemp::Temp::new_dir().unwrap());
Pipeline {
output_dir: Some(tmp_dir.to_path_buf()),
force_overwrite: true,
_tmp_dir: Some(tmp_dir),
..self
}
}
Expand Down
70 changes: 33 additions & 37 deletions pipeline/src/test_util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,43 +67,47 @@ pub fn verify_pipeline(
// TODO: Also test Composite variants
let mut pipeline = pipeline.with_backend(backend, None);

let tmp_dir = mktemp::Temp::new_dir().unwrap();
if pipeline.output_dir().is_none() {
pipeline = pipeline.with_tmp_output(&tmp_dir);
pipeline = pipeline.with_tmp_output();
}

pipeline.compute_proof().unwrap();

verify(pipeline.output_dir().as_ref().unwrap())
}

pub fn make_pipeline<T: FieldElement>(file_name: &str, inputs: Vec<T>) -> Pipeline<T> {
let mut pipeline = Pipeline::default()
.with_tmp_output()
.from_file(resolve_test_file(file_name))
.with_prover_inputs(inputs);
pipeline.compute_witness().unwrap();
pipeline
}

pub fn gen_estark_proof(file_name: &str, inputs: Vec<GoldilocksField>) {
gen_estark_proof_with_backend_variant(file_name, inputs.clone(), BackendVariant::Monolithic);
gen_estark_proof_with_backend_variant(file_name, inputs, BackendVariant::Composite);
let pipeline = make_pipeline(file_name, inputs);
gen_estark_proof_with_backend_variant(pipeline.clone(), BackendVariant::Monolithic);
gen_estark_proof_with_backend_variant(pipeline, BackendVariant::Composite);
}

pub fn gen_estark_proof_with_backend_variant(
file_name: &str,
inputs: Vec<GoldilocksField>,
pipeline: Pipeline<GoldilocksField>,
backend_variant: BackendVariant,
) {
let tmp_dir = mktemp::Temp::new_dir().unwrap();
let backend = match backend_variant {
BackendVariant::Monolithic => BackendType::EStarkStarky,
BackendVariant::Composite => BackendType::EStarkStarkyComposite,
};
let mut pipeline = Pipeline::default()
.with_tmp_output(&tmp_dir)
.from_file(resolve_test_file(file_name))
.with_prover_inputs(inputs)
.with_backend(backend, None);
let mut pipeline = pipeline.with_backend(backend, None);

pipeline.clone().compute_proof().unwrap();

// Repeat the proof generation, but with an externally generated verification key

// Verification Key
let vkey_file_path = tmp_dir.as_path().join("verification_key.bin");
let output_dir = pipeline.output_dir().as_ref().unwrap();
let vkey_file_path = output_dir.join("verification_key.bin");
buffered_write_file(&vkey_file_path, |writer| {
pipeline.export_verification_key(writer).unwrap()
})
Expand All @@ -126,8 +130,9 @@ pub fn gen_estark_proof_with_backend_variant(
}

pub fn test_halo2(file_name: &str, inputs: Vec<Bn254Field>) {
test_halo2_with_backend_variant(file_name, inputs.clone(), BackendVariant::Monolithic);
test_halo2_with_backend_variant(file_name, inputs, BackendVariant::Composite);
let pipeline = make_pipeline(file_name, inputs);
test_halo2_with_backend_variant(pipeline.clone(), BackendVariant::Monolithic);
test_halo2_with_backend_variant(pipeline, BackendVariant::Composite);
}

/// Whether to compute a monolithic or composite proof.
Expand All @@ -138,8 +143,7 @@ pub enum BackendVariant {

#[cfg(feature = "halo2")]
pub fn test_halo2_with_backend_variant(
file_name: &str,
inputs: Vec<Bn254Field>,
pipeline: Pipeline<Bn254Field>,
backend_variant: BackendVariant,
) {
use std::env;
Expand All @@ -150,9 +154,8 @@ pub fn test_halo2_with_backend_variant(
};

// Generate a mock proof (fast and has good error messages)
Pipeline::default()
.from_file(resolve_test_file(file_name))
.with_prover_inputs(inputs.clone())
pipeline
.clone()
.with_backend(backend, None)
.compute_proof()
.unwrap();
Expand All @@ -163,31 +166,25 @@ pub fn test_halo2_with_backend_variant(
.map(|v| v == "true")
.unwrap_or(false);
if is_nightly_test {
gen_halo2_proof(file_name, inputs, backend_variant);
gen_halo2_proof(pipeline, backend_variant);
}
}

#[cfg(not(feature = "halo2"))]
pub fn test_halo2_with_backend_variant(
_file_name: &str,
_inputs: Vec<Bn254Field>,
_pipeline: Pipeline<Bn254Field>,
backend_variant: BackendVariant,
) {
}

#[cfg(feature = "halo2")]
pub fn gen_halo2_proof(file_name: &str, inputs: Vec<Bn254Field>, backend: BackendVariant) {
pub fn gen_halo2_proof(pipeline: Pipeline<Bn254Field>, backend: BackendVariant) {
let backend = match backend {
BackendVariant::Monolithic => BackendType::Halo2,
BackendVariant::Composite => BackendType::Halo2Composite,
};

let tmp_dir = mktemp::Temp::new_dir().unwrap();
let mut pipeline = Pipeline::default()
.with_tmp_output(&tmp_dir)
.from_file(resolve_test_file(file_name))
.with_prover_inputs(inputs)
.with_backend(backend, None);
let mut pipeline = pipeline.clone().with_backend(backend, None);

// Generate a proof with the setup and verification key generated on the fly
pipeline.clone().compute_proof().unwrap();
Expand All @@ -196,7 +193,8 @@ pub fn gen_halo2_proof(file_name: &str, inputs: Vec<Bn254Field>, backend: Backen
let pil = pipeline.compute_optimized_pil().unwrap();

// Setup
let setup_file_path = tmp_dir.as_path().join("params.bin");
let output_dir = pipeline.output_dir().clone().unwrap();
let setup_file_path = output_dir.join("params.bin");
buffered_write_file(&setup_file_path, |writer| {
powdr_backend::BackendType::Halo2
.factory::<Bn254Field>()
Expand All @@ -207,7 +205,7 @@ pub fn gen_halo2_proof(file_name: &str, inputs: Vec<Bn254Field>, backend: Backen
let mut pipeline = pipeline.with_setup_file(Some(setup_file_path));

// Verification Key
let vkey_file_path = tmp_dir.as_path().join("verification_key.bin");
let vkey_file_path = output_dir.join("verification_key.bin");
buffered_write_file(&vkey_file_path, |writer| {
pipeline.export_verification_key(writer).unwrap()
})
Expand All @@ -230,13 +228,12 @@ pub fn gen_halo2_proof(file_name: &str, inputs: Vec<Bn254Field>, backend: Backen
}

#[cfg(not(feature = "halo2"))]
pub fn gen_halo2_proof(_file_name: &str, _inputs: Vec<Bn254Field>, _backend: BackendVariant) {}
pub fn gen_halo2_proof(_pipeline: Pipeline<Bn254Field>, _backend: BackendVariant) {}

#[cfg(feature = "plonky3")]
pub fn test_plonky3(file_name: &str, inputs: Vec<GoldilocksField>) {
let tmp_dir = mktemp::Temp::new_dir().unwrap();
let mut pipeline = Pipeline::default()
.with_tmp_output(&tmp_dir)
.with_tmp_output()
.from_file(resolve_test_file(file_name))
.with_prover_inputs(inputs)
.with_backend(powdr_backend::BackendType::Plonky3, None);
Expand Down Expand Up @@ -323,9 +320,8 @@ pub fn assert_proofs_fail_for_invalid_witnesses_pilcom(
file_name: &str,
witness: &[(String, Vec<u64>)],
) {
let tmp_dir = mktemp::Temp::new_dir().unwrap();
let pipeline = Pipeline::<GoldilocksField>::default()
.with_tmp_output(&tmp_dir)
.with_tmp_output()
.from_file(resolve_test_file(file_name))
.set_witness(convert_witness(witness));

Expand Down
8 changes: 4 additions & 4 deletions pipeline/tests/asm.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@ use powdr_backend::BackendType;
use powdr_number::{Bn254Field, FieldElement, GoldilocksField};
use powdr_pipeline::{
test_util::{
gen_estark_proof, gen_estark_proof_with_backend_variant, resolve_test_file, test_halo2,
test_halo2_with_backend_variant, verify_test_file, BackendVariant,
gen_estark_proof, gen_estark_proof_with_backend_variant, make_pipeline, resolve_test_file,
test_halo2, test_halo2_with_backend_variant, verify_test_file, BackendVariant,
},
util::{read_poly_set, FixedPolySet, WitnessPolySet},
Pipeline,
Expand Down Expand Up @@ -227,8 +227,8 @@ fn vm_to_block_different_length() {
let f = "asm/vm_to_block_different_length.asm";
// Because machines have different lengths, this can only be proven
// with a composite proof.
test_halo2_with_backend_variant(f, vec![], BackendVariant::Composite);
gen_estark_proof_with_backend_variant(f, vec![], BackendVariant::Composite);
test_halo2_with_backend_variant(make_pipeline(f, vec![]), BackendVariant::Composite);
gen_estark_proof_with_backend_variant(make_pipeline(f, vec![]), BackendVariant::Composite);
}

#[test]
Expand Down
8 changes: 4 additions & 4 deletions pipeline/tests/pil.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@ use powdr_pipeline::test_util::{
assert_proofs_fail_for_invalid_witnesses, assert_proofs_fail_for_invalid_witnesses_estark,
assert_proofs_fail_for_invalid_witnesses_halo2,
assert_proofs_fail_for_invalid_witnesses_pilcom, gen_estark_proof,
gen_estark_proof_with_backend_variant, test_halo2, test_halo2_with_backend_variant,
test_plonky3, verify_test_file, BackendVariant,
gen_estark_proof_with_backend_variant, make_pipeline, test_halo2,
test_halo2_with_backend_variant, test_plonky3, verify_test_file, BackendVariant,
};

use test_log::test;
Expand Down Expand Up @@ -313,8 +313,8 @@ fn different_degrees() {
let f = "pil/different_degrees.pil";
// Because machines have different lengths, this can only be proven
// with a composite proof.
test_halo2_with_backend_variant(f, vec![], BackendVariant::Composite);
gen_estark_proof_with_backend_variant(f, vec![], BackendVariant::Composite);
test_halo2_with_backend_variant(make_pipeline(f, vec![]), BackendVariant::Composite);
gen_estark_proof_with_backend_variant(make_pipeline(f, vec![]), BackendVariant::Composite);
}

#[test]
Expand Down
8 changes: 4 additions & 4 deletions pipeline/tests/powdr_std.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@ use powdr_pil_analyzer::evaluator::Value;
use powdr_pipeline::{
test_util::{
evaluate_function, evaluate_integer_function, execute_test_file, gen_estark_proof,
gen_halo2_proof, resolve_test_file, std_analyzed, test_halo2, verify_test_file,
BackendVariant,
gen_halo2_proof, make_pipeline, resolve_test_file, std_analyzed, test_halo2,
verify_test_file, BackendVariant,
},
Pipeline,
};
Expand All @@ -21,8 +21,8 @@ fn poseidon_bn254_test() {
// `test_halo2` only does a mock proof in the PR tests.
// This makes sure we test the whole proof generation for one example
// file even in the PR tests.
gen_halo2_proof(f, Default::default(), BackendVariant::Monolithic);
gen_halo2_proof(f, Default::default(), BackendVariant::Composite);
gen_halo2_proof(make_pipeline(f, vec![]), BackendVariant::Monolithic);
gen_halo2_proof(make_pipeline(f, vec![]), BackendVariant::Composite);
}

#[test]
Expand Down
Loading