Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Change column map to work efficiently with slices of column IDs. #1515

Open
wants to merge 7 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 5 additions & 10 deletions executor/src/witgen/block_processor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -114,9 +114,9 @@ impl<'a, 'b, 'c, T: FieldElement, Q: QueryCallback<T>> BlockProcessor<'a, 'b, 'c

#[cfg(test)]
mod tests {
use std::collections::BTreeMap;
use std::{collections::BTreeMap, iter::repeat};

use powdr_ast::analyzed::{PolyID, PolynomialType};
use powdr_ast::analyzed::PolyID;
use powdr_number::{FieldElement, GoldilocksField};
use powdr_pil_analyzer::analyze_string;

Expand Down Expand Up @@ -162,16 +162,11 @@ mod tests {
let mut fixed_lookup = FixedLookup::new(fixed_data.global_range_constraints().clone());
let mut machines = [];

let columns = (0..fixed_data.witness_cols.len())
.map(move |i| PolyID {
id: i as u64,
ptype: PolynomialType::Committed,
})
.collect();
let columns = fixed_data.witness_cols.keys().collect();
let basic_row = Row::fresh(&fixed_data, fixed_data.witness_cols.keys());
let data = FinalizableData::with_initial_rows_in_progress(
&columns,
(0..fixed_data.degree)
.map(|i| Row::fresh(&fixed_data, RowIndex::from_degree(i, fixed_data.degree))),
repeat(basic_row).take(fixed_data.degree as usize),
);

let mut mutable_state = MutableState {
Expand Down
60 changes: 46 additions & 14 deletions executor/src/witgen/data_structures/column_map.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use std::{
marker::PhantomData,
ops::{Index, IndexMut},
ops::{Index, IndexMut, Range},
};

use powdr_ast::analyzed::{PolyID, PolynomialType};
Expand Down Expand Up @@ -30,49 +30,63 @@ pub type FixedColumnMap<V> = ColumnMap<V, Fixed>;
/// A Map indexed by polynomial ID, for a specific polynomial type (e.g. fixed or witness).
/// For performance reasons, it uses a Vec<V> internally and assumes that the polynomial IDs
/// are contiguous.
/// If the IDs are not contiguous, accessing the columns between the minimal and maximal column ID
/// will not lead to an error.
#[derive(Clone)]
pub struct ColumnMap<V, T: PolynomialTypeTrait> {
values: Vec<V>,
/// The range of column IDs in the vector.
column_id_range: Range<usize>,
_ptype: PhantomData<T>,
}

impl<V: Clone, T: PolynomialTypeTrait> ColumnMap<V, T> {
/// Create a new ColumnMap with the given initial value and size.
pub fn new(initial_value: V, size: usize) -> Self {
pub fn new(initial_value: V, column_id_range: Range<usize>) -> Self {
ColumnMap {
values: vec![initial_value; size],
values: vec![initial_value; column_id_range.len()],
column_id_range,
_ptype: PhantomData,
}
}
}

impl<V, T: PolynomialTypeTrait> ColumnMap<V, T> {
pub fn from(values: impl Iterator<Item = V>) -> Self {
/// Creates a new ColumnMap from an iterator over values, corresponding
/// to the keys in the given range, in the iteration order.
pub fn from(column_id_range: Range<usize>, values: impl Iterator<Item = V>) -> Self {
let values: Vec<_> = values.collect();
assert_eq!(values.len(), column_id_range.len());
ColumnMap {
values: values.collect(),
values,
column_id_range,
_ptype: PhantomData,
}
}

/// Creates a ColumnMap from an iterator over PolyIDs and values.
pub fn from_indexed(items: impl Iterator<Item = (PolyID, V)>, len: usize) -> Self
pub fn from_indexed(
column_id_range: Range<usize>,
items: impl Iterator<Item = (PolyID, V)>,
) -> Self
where
V: Default,
{
let mut values: Vec<V> = (0..len).map(|_| V::default()).collect();
let mut values: Vec<V> = (0..column_id_range.len()).map(|_| V::default()).collect();
for (poly, value) in items {
values[poly.id as usize] = value;
values[poly.id as usize - column_id_range.start] = value;
debug_assert_eq!(poly.ptype, T::P_TYPE);
}

ColumnMap {
values,
column_id_range,
_ptype: PhantomData,
}
}

pub fn keys(&self) -> impl Iterator<Item = PolyID> {
(0..self.values.len()).map(move |i| PolyID {
self.column_id_range.clone().map(|i| PolyID {
id: i as u64,
ptype: T::P_TYPE,
})
Expand All @@ -86,35 +100,53 @@ impl<V, T: PolynomialTypeTrait> ColumnMap<V, T> {
self.keys().zip(self.values)
}

pub fn iter_mut(&mut self) -> impl Iterator<Item = (PolyID, &mut V)> {
self.keys().zip(self.values.iter_mut())
}

/// Returns an iterator over the values, in the order of the keys.
pub fn values(&self) -> impl Iterator<Item = &V> {
self.values.iter()
}

/// Returns an iterator over the values, in the order of the keys.
pub fn values_into_iter(self) -> impl Iterator<Item = V> {
self.values.into_iter()
}

/// Returns a mutating iterator over the values, in the order of the keys.
pub fn values_iter_mut(&mut self) -> impl Iterator<Item = &mut V> {
self.values.iter_mut()
}

pub fn len(&self) -> usize {
self.values.len()
pub fn column_id_range(&self) -> Range<usize> {
self.column_id_range.clone()
}

#[inline]
pub fn get(&self, poly_id: &PolyID) -> Option<&V> {
debug_assert!(poly_id.ptype == T::P_TYPE);
if self.column_id_range.contains(&(poly_id.id as usize)) {
Some(&self.values[poly_id.id as usize - self.column_id_range.start])
} else {
None
}
}
}

impl<V, T: PolynomialTypeTrait> Default for ColumnMap<V, T> {
fn default() -> Self {
ColumnMap {
values: Vec::new(),
column_id_range: Default::default(),
_ptype: PhantomData,
}
}
}

impl<V: PartialEq, T: PolynomialTypeTrait> PartialEq for ColumnMap<V, T> {
fn eq(&self, other: &Self) -> bool {
self.values == other.values
self.column_id_range == other.column_id_range && self.values == other.values
}
}

Expand All @@ -124,14 +156,14 @@ impl<V, T: PolynomialTypeTrait> Index<&PolyID> for ColumnMap<V, T> {
#[inline]
fn index(&self, poly_id: &PolyID) -> &Self::Output {
debug_assert!(poly_id.ptype == T::P_TYPE);
&self.values[poly_id.id as usize]
&self.values[poly_id.id as usize - self.column_id_range.start]
}
}

impl<V, T: PolynomialTypeTrait> IndexMut<&PolyID> for ColumnMap<V, T> {
#[inline]
fn index_mut(&mut self, poly_id: &PolyID) -> &mut Self::Output {
debug_assert!(poly_id.ptype == T::P_TYPE);
&mut self.values[poly_id.id as usize]
&mut self.values[poly_id.id as usize - self.column_id_range.start]
}
}
6 changes: 4 additions & 2 deletions executor/src/witgen/generator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,7 @@ impl<'a, T: FieldElement> Generator<'a, T> {
witnesses: HashSet<PolyID>,
latch: Option<Expression<T>>,
) -> Self {
// TODO restrict columns to those in witnesses
let data = FinalizableData::new(&witnesses);
Self {
connecting_identities: connecting_identities.clone(),
Expand Down Expand Up @@ -165,14 +166,15 @@ impl<'a, T: FieldElement> Generator<'a, T> {
// Note that using `BlockProcessor` instead of `VmProcessor` is more convenient here because
// it does not assert that the row is "complete" afterwards (i.e., that all identities
// are satisfied assuming 0 for unknown values).
let row = Row::fresh(self.fixed_data, self.witnesses.iter().cloned());
let data = FinalizableData::with_initial_rows_in_progress(
&self.witnesses,
[
Row::fresh(
row.clone().with_external_witness_values(
self.fixed_data,
RowIndex::from_i64(-1, self.fixed_data.degree),
),
Row::fresh(
row.with_external_witness_values(
self.fixed_data,
RowIndex::from_i64(0, self.fixed_data.degree),
),
Expand Down
2 changes: 1 addition & 1 deletion executor/src/witgen/global_constraints.rs
Original file line number Diff line number Diff line change
Expand Up @@ -126,8 +126,8 @@ pub fn set_global_constraints<'a, T: FieldElement>(
}
}
let fixed_constraints = FixedColumnMap::from_indexed(
fixed_data.fixed_cols.column_id_range(),
known_constraints.iter().map(|(p, c)| (*p, Some(c.clone()))),
fixed_data.fixed_cols.len(),
);

let mut retained_identities = vec![];
Expand Down
19 changes: 17 additions & 2 deletions executor/src/witgen/machines/block_machine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
use std::fmt::Display;
use std::iter;


use super::{EvalResult, FixedData, FixedLookup};

use crate::witgen::block_processor::BlockProcessor;
Expand Down Expand Up @@ -107,6 +108,8 @@ pub struct BlockMachine<'a, T: FieldElement> {
connection_type: ConnectionType,
/// The internal identities
identities: Vec<&'a Identity<T>>,
/// A prototypical row with global range constraints set, but uninitialized otherwise.
basic_row: Row<T>,
/// The data of the machine.
data: FinalizableData<T>,
/// The index of the first row that has not been finalized yet.
Expand Down Expand Up @@ -152,9 +155,16 @@ impl<'a, T: FieldElement> BlockMachine<'a, T> {
// In `take_witness_col_values()`, this block will be removed and its values will be used to
// construct the "default" block used to fill up unused rows.
let start_index = RowIndex::from_i64(-(block_size as i64), fixed_data.degree);
// compute the min and max of the witness col ids

let basic_row = Row::fresh(fixed_data, witness_cols.iter().cloned());
let data = FinalizableData::with_initial_rows_in_progress(
witness_cols,
(0..block_size).map(|i| Row::fresh(fixed_data, start_index + i)),
(0..block_size).map(|i| {
basic_row
.clone()
.with_external_witness_values(fixed_data, start_index + i)
}),
);
Some(BlockMachine {
name,
Expand All @@ -163,6 +173,7 @@ impl<'a, T: FieldElement> BlockMachine<'a, T> {
connecting_identities: connecting_identities.clone(),
connection_type: is_permutation,
identities: identities.to_vec(),
basic_row,
data,
first_in_progress_row: block_size,
witness_cols: witness_cols.clone(),
Expand Down Expand Up @@ -560,7 +571,11 @@ impl<'a, T: FieldElement> BlockMachine<'a, T> {
// and the first row of the next block.
let block = FinalizableData::with_initial_rows_in_progress(
&self.witness_cols,
(0..(self.block_size + 2)).map(|i| Row::fresh(self.fixed_data, row_offset + i)),
(0..(self.block_size + 2)).map(|i| {
self.basic_row
.clone()
.with_external_witness_values(self.fixed_data, row_offset + i)
}),
);
let mut processor = BlockProcessor::new(
row_offset,
Expand Down
16 changes: 10 additions & 6 deletions executor/src/witgen/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -296,7 +296,9 @@ impl<'a, T: FieldElement> FixedData<'a, T> {
.collect::<BTreeMap<_, _>>();

let witness_cols =
WitnessColumnMap::from(analyzed.committed_polys_in_source_order().iter().flat_map(
WitnessColumnMap::from(
0..analyzed.commitment_count(),
analyzed.committed_polys_in_source_order().iter().flat_map(
|(poly, value)| {
poly.array_elements()
.map(|(name, poly_id)| {
Expand Down Expand Up @@ -333,13 +335,15 @@ impl<'a, T: FieldElement> FixedData<'a, T> {
);
}

let fixed_cols =
FixedColumnMap::from(fixed_col_values.iter().map(|(n, v)| FixedColumn::new(n, v)));
let fixed_cols = FixedColumnMap::from(
0..fixed_col_values.len(),
fixed_col_values.iter().map(|(n, v)| FixedColumn::new(n, v)),
);

// The global range constraints are not set yet.
let global_range_constraints = GlobalConstraints {
witness_constraints: WitnessColumnMap::new(None, witness_cols.len()),
fixed_constraints: FixedColumnMap::new(None, fixed_cols.len()),
witness_constraints: WitnessColumnMap::new(None, witness_cols.column_id_range()),
fixed_constraints: FixedColumnMap::new(None, fixed_cols.column_id_range()),
};

FixedData {
Expand Down Expand Up @@ -382,7 +386,7 @@ impl<'a, T: FieldElement> FixedData<'a, T> {
}

fn witness_map_with<V: Clone>(&self, initial_value: V) -> WitnessColumnMap<V> {
WitnessColumnMap::new(initial_value, self.witness_cols.len())
WitnessColumnMap::new(initial_value, self.witness_cols.column_id_range())
}

fn column_name(&self, poly_id: &PolyID) -> &str {
Expand Down
1 change: 1 addition & 0 deletions executor/src/witgen/processor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,7 @@ impl<'a, 'b, 'c, T: FieldElement, Q: QueryCallback<T>> Processor<'a, 'b, 'c, T,
witness_cols: &'c HashSet<PolyID>,
) -> Self {
let is_relevant_witness = WitnessColumnMap::from(
fixed_data.witness_cols.column_id_range(),
fixed_data
.witness_cols
.keys()
Expand Down
Loading
Loading