Skip to content

Commit

Permalink
Speed up tests (#1556)
Browse files Browse the repository at this point in the history
Recent PRs (#1538, #1534) have significantly slowed down the CI, because
they run every test twice, once with the "plain" backend and once with
the `CompositeBackend`.

This PR makes sure that at least we only do the steps before (analyzing
& optimizing PIL, fixed column computation, witness computation) only
once, by making a "prepared" pipeline once and cloning it to test
different backends.

This leads to a slight speed up in the tests.
  • Loading branch information
georgwiese committed Jul 12, 2024
1 parent 6bd82ff commit 3801937
Show file tree
Hide file tree
Showing 5 changed files with 72 additions and 53 deletions.
15 changes: 12 additions & 3 deletions pipeline/src/pipeline.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,13 @@ use std::{
io::{self, BufReader},
marker::Send,
path::{Path, PathBuf},
rc::Rc,
sync::Arc,
time::Instant,
};

use log::Level;
use mktemp::Temp;
use powdr_ast::{
analyzed::Analyzed,
asm_analysis::AnalysisASMFile,
Expand Down Expand Up @@ -114,6 +116,10 @@ pub struct Pipeline<T: FieldElement> {
artifact: Artifacts<T>,
/// Output directory for intermediate files. If None, no files are written.
output_dir: Option<PathBuf>,
/// The temporary directory, owned by the pipeline (or any copies of it).
/// This object is not used directly, but keeping it here ensures that the directory
/// is not deleted until the pipeline is dropped.
_tmp_dir: Option<Rc<Temp>>,
/// The name of the pipeline. Used to name output files.
name: Option<String>,
/// Whether to overwrite existing files. If false, an error is returned if a file
Expand All @@ -140,6 +146,7 @@ where
Pipeline {
artifact: Default::default(),
output_dir: None,
_tmp_dir: None,
log_level: Level::Info,
name: None,
force_overwrite: false,
Expand Down Expand Up @@ -190,12 +197,14 @@ where
/// let proof = pipeline.compute_proof().unwrap();
/// ```
impl<T: FieldElement> Pipeline<T> {
/// Initializes the output directory to a temporary directory.
/// Note that the user is responsible for keeping the temporary directory alive.
pub fn with_tmp_output(self, tmp_dir: &mktemp::Temp) -> Self {
/// Initializes the output directory to a temporary directory which lives as long
/// the pipeline does.
pub fn with_tmp_output(self) -> Self {
let tmp_dir = Rc::new(mktemp::Temp::new_dir().unwrap());
Pipeline {
output_dir: Some(tmp_dir.to_path_buf()),
force_overwrite: true,
_tmp_dir: Some(tmp_dir),
..self
}
}
Expand Down
76 changes: 38 additions & 38 deletions pipeline/src/test_util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,43 +67,50 @@ pub fn verify_pipeline(
// TODO: Also test Composite variants
let mut pipeline = pipeline.with_backend(backend, None);

let tmp_dir = mktemp::Temp::new_dir().unwrap();
if pipeline.output_dir().is_none() {
pipeline = pipeline.with_tmp_output(&tmp_dir);
pipeline = pipeline.with_tmp_output();
}

pipeline.compute_proof().unwrap();

verify(pipeline.output_dir().as_ref().unwrap())
}

/// Makes a new pipeline for the given file and inputs. All steps until witness generation are
/// already computed, so that the test can branch off from there, without having to re-compute
/// these steps.
pub fn make_prepared_pipeline<T: FieldElement>(file_name: &str, inputs: Vec<T>) -> Pipeline<T> {
let mut pipeline = Pipeline::default()
.with_tmp_output()
.from_file(resolve_test_file(file_name))
.with_prover_inputs(inputs);
pipeline.compute_witness().unwrap();
pipeline
}

pub fn gen_estark_proof(file_name: &str, inputs: Vec<GoldilocksField>) {
gen_estark_proof_with_backend_variant(file_name, inputs.clone(), BackendVariant::Monolithic);
gen_estark_proof_with_backend_variant(file_name, inputs, BackendVariant::Composite);
let pipeline = make_prepared_pipeline(file_name, inputs);
gen_estark_proof_with_backend_variant(pipeline.clone(), BackendVariant::Monolithic);
gen_estark_proof_with_backend_variant(pipeline, BackendVariant::Composite);
}

pub fn gen_estark_proof_with_backend_variant(
file_name: &str,
inputs: Vec<GoldilocksField>,
pipeline: Pipeline<GoldilocksField>,
backend_variant: BackendVariant,
) {
let tmp_dir = mktemp::Temp::new_dir().unwrap();
let backend = match backend_variant {
BackendVariant::Monolithic => BackendType::EStarkStarky,
BackendVariant::Composite => BackendType::EStarkStarkyComposite,
};
let mut pipeline = Pipeline::default()
.with_tmp_output(&tmp_dir)
.from_file(resolve_test_file(file_name))
.with_prover_inputs(inputs)
.with_backend(backend, None);
let mut pipeline = pipeline.with_backend(backend, None);

pipeline.clone().compute_proof().unwrap();

// Repeat the proof generation, but with an externally generated verification key

// Verification Key
let vkey_file_path = tmp_dir.as_path().join("verification_key.bin");
let output_dir = pipeline.output_dir().as_ref().unwrap();
let vkey_file_path = output_dir.join("verification_key.bin");
buffered_write_file(&vkey_file_path, |writer| {
pipeline.export_verification_key(writer).unwrap()
})
Expand All @@ -126,8 +133,9 @@ pub fn gen_estark_proof_with_backend_variant(
}

pub fn test_halo2(file_name: &str, inputs: Vec<Bn254Field>) {
test_halo2_with_backend_variant(file_name, inputs.clone(), BackendVariant::Monolithic);
test_halo2_with_backend_variant(file_name, inputs, BackendVariant::Composite);
let pipeline = make_prepared_pipeline(file_name, inputs);
test_halo2_with_backend_variant(pipeline.clone(), BackendVariant::Monolithic);
test_halo2_with_backend_variant(pipeline, BackendVariant::Composite);
}

/// Whether to compute a monolithic or composite proof.
Expand All @@ -138,8 +146,7 @@ pub enum BackendVariant {

#[cfg(feature = "halo2")]
pub fn test_halo2_with_backend_variant(
file_name: &str,
inputs: Vec<Bn254Field>,
pipeline: Pipeline<Bn254Field>,
backend_variant: BackendVariant,
) {
use std::env;
Expand All @@ -150,9 +157,8 @@ pub fn test_halo2_with_backend_variant(
};

// Generate a mock proof (fast and has good error messages)
Pipeline::default()
.from_file(resolve_test_file(file_name))
.with_prover_inputs(inputs.clone())
pipeline
.clone()
.with_backend(backend, None)
.compute_proof()
.unwrap();
Expand All @@ -163,31 +169,25 @@ pub fn test_halo2_with_backend_variant(
.map(|v| v == "true")
.unwrap_or(false);
if is_nightly_test {
gen_halo2_proof(file_name, inputs, backend_variant);
gen_halo2_proof(pipeline, backend_variant);
}
}

#[cfg(not(feature = "halo2"))]
pub fn test_halo2_with_backend_variant(
_file_name: &str,
_inputs: Vec<Bn254Field>,
_pipeline: Pipeline<Bn254Field>,
backend_variant: BackendVariant,
) {
}

#[cfg(feature = "halo2")]
pub fn gen_halo2_proof(file_name: &str, inputs: Vec<Bn254Field>, backend: BackendVariant) {
pub fn gen_halo2_proof(pipeline: Pipeline<Bn254Field>, backend: BackendVariant) {
let backend = match backend {
BackendVariant::Monolithic => BackendType::Halo2,
BackendVariant::Composite => BackendType::Halo2Composite,
};

let tmp_dir = mktemp::Temp::new_dir().unwrap();
let mut pipeline = Pipeline::default()
.with_tmp_output(&tmp_dir)
.from_file(resolve_test_file(file_name))
.with_prover_inputs(inputs)
.with_backend(backend, None);
let mut pipeline = pipeline.clone().with_backend(backend, None);

// Generate a proof with the setup and verification key generated on the fly
pipeline.clone().compute_proof().unwrap();
Expand All @@ -196,7 +196,8 @@ pub fn gen_halo2_proof(file_name: &str, inputs: Vec<Bn254Field>, backend: Backen
let pil = pipeline.compute_optimized_pil().unwrap();

// Setup
let setup_file_path = tmp_dir.as_path().join("params.bin");
let output_dir = pipeline.output_dir().clone().unwrap();
let setup_file_path = output_dir.join("params.bin");
buffered_write_file(&setup_file_path, |writer| {
powdr_backend::BackendType::Halo2
.factory::<Bn254Field>()
Expand All @@ -207,7 +208,7 @@ pub fn gen_halo2_proof(file_name: &str, inputs: Vec<Bn254Field>, backend: Backen
let mut pipeline = pipeline.with_setup_file(Some(setup_file_path));

// Verification Key
let vkey_file_path = tmp_dir.as_path().join("verification_key.bin");
let vkey_file_path = output_dir.join("verification_key.bin");
buffered_write_file(&vkey_file_path, |writer| {
pipeline.export_verification_key(writer).unwrap()
})
Expand All @@ -230,13 +231,12 @@ pub fn gen_halo2_proof(file_name: &str, inputs: Vec<Bn254Field>, backend: Backen
}

#[cfg(not(feature = "halo2"))]
pub fn gen_halo2_proof(_file_name: &str, _inputs: Vec<Bn254Field>, _backend: BackendVariant) {}
pub fn gen_halo2_proof(_pipeline: Pipeline<Bn254Field>, _backend: BackendVariant) {}

#[cfg(feature = "plonky3")]
pub fn test_plonky3(file_name: &str, inputs: Vec<GoldilocksField>) {
let tmp_dir = mktemp::Temp::new_dir().unwrap();
let mut pipeline = Pipeline::default()
.with_tmp_output(&tmp_dir)
.with_tmp_output()
.from_file(resolve_test_file(file_name))
.with_prover_inputs(inputs)
.with_backend(powdr_backend::BackendType::Plonky3, None);
Expand All @@ -256,7 +256,8 @@ pub fn test_plonky3(file_name: &str, inputs: Vec<GoldilocksField>) {

if pipeline.optimized_pil().unwrap().constant_count() > 0 {
// Export verification Key
let vkey_file_path = tmp_dir.as_path().join("verification_key.bin");
let output_dir = pipeline.output_dir().as_ref().unwrap();
let vkey_file_path = output_dir.join("verification_key.bin");
buffered_write_file(&vkey_file_path, |writer| {
pipeline.export_verification_key(writer).unwrap()
})
Expand Down Expand Up @@ -323,9 +324,8 @@ pub fn assert_proofs_fail_for_invalid_witnesses_pilcom(
file_name: &str,
witness: &[(String, Vec<u64>)],
) {
let tmp_dir = mktemp::Temp::new_dir().unwrap();
let pipeline = Pipeline::<GoldilocksField>::default()
.with_tmp_output(&tmp_dir)
.with_tmp_output()
.from_file(resolve_test_file(file_name))
.set_witness(convert_witness(witness));

Expand Down
12 changes: 8 additions & 4 deletions pipeline/tests/asm.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,9 @@ use powdr_backend::BackendType;
use powdr_number::{Bn254Field, FieldElement, GoldilocksField};
use powdr_pipeline::{
test_util::{
gen_estark_proof, gen_estark_proof_with_backend_variant, resolve_test_file, test_halo2,
test_halo2_with_backend_variant, verify_test_file, BackendVariant,
gen_estark_proof, gen_estark_proof_with_backend_variant, make_prepared_pipeline,
resolve_test_file, test_halo2, test_halo2_with_backend_variant, verify_test_file,
BackendVariant,
},
util::{read_poly_set, FixedPolySet, WitnessPolySet},
Pipeline,
Expand Down Expand Up @@ -227,8 +228,11 @@ fn vm_to_block_different_length() {
let f = "asm/vm_to_block_different_length.asm";
// Because machines have different lengths, this can only be proven
// with a composite proof.
test_halo2_with_backend_variant(f, vec![], BackendVariant::Composite);
gen_estark_proof_with_backend_variant(f, vec![], BackendVariant::Composite);
test_halo2_with_backend_variant(make_prepared_pipeline(f, vec![]), BackendVariant::Composite);
gen_estark_proof_with_backend_variant(
make_prepared_pipeline(f, vec![]),
BackendVariant::Composite,
);
}

#[test]
Expand Down
11 changes: 7 additions & 4 deletions pipeline/tests/pil.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@ use powdr_pipeline::test_util::{
assert_proofs_fail_for_invalid_witnesses, assert_proofs_fail_for_invalid_witnesses_estark,
assert_proofs_fail_for_invalid_witnesses_halo2,
assert_proofs_fail_for_invalid_witnesses_pilcom, gen_estark_proof,
gen_estark_proof_with_backend_variant, test_halo2, test_halo2_with_backend_variant,
test_plonky3, verify_test_file, BackendVariant,
gen_estark_proof_with_backend_variant, make_prepared_pipeline, test_halo2,
test_halo2_with_backend_variant, test_plonky3, verify_test_file, BackendVariant,
};

use test_log::test;
Expand Down Expand Up @@ -313,8 +313,11 @@ fn different_degrees() {
let f = "pil/different_degrees.pil";
// Because machines have different lengths, this can only be proven
// with a composite proof.
test_halo2_with_backend_variant(f, vec![], BackendVariant::Composite);
gen_estark_proof_with_backend_variant(f, vec![], BackendVariant::Composite);
test_halo2_with_backend_variant(make_prepared_pipeline(f, vec![]), BackendVariant::Composite);
gen_estark_proof_with_backend_variant(
make_prepared_pipeline(f, vec![]),
BackendVariant::Composite,
);
}

#[test]
Expand Down
11 changes: 7 additions & 4 deletions pipeline/tests/powdr_std.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@ use powdr_pil_analyzer::evaluator::Value;
use powdr_pipeline::{
test_util::{
evaluate_function, evaluate_integer_function, execute_test_file, gen_estark_proof,
gen_halo2_proof, resolve_test_file, std_analyzed, test_halo2, verify_test_file,
BackendVariant,
gen_halo2_proof, make_prepared_pipeline, resolve_test_file, std_analyzed, test_halo2,
verify_test_file, BackendVariant,
},
Pipeline,
};
Expand All @@ -21,8 +21,11 @@ fn poseidon_bn254_test() {
// `test_halo2` only does a mock proof in the PR tests.
// This makes sure we test the whole proof generation for one example
// file even in the PR tests.
gen_halo2_proof(f, Default::default(), BackendVariant::Monolithic);
gen_halo2_proof(f, Default::default(), BackendVariant::Composite);
gen_halo2_proof(
make_prepared_pipeline(f, vec![]),
BackendVariant::Monolithic,
);
gen_halo2_proof(make_prepared_pipeline(f, vec![]), BackendVariant::Composite);
}

#[test]
Expand Down

0 comments on commit 3801937

Please sign in to comment.