Skip to content

Commit

Permalink
Commitment Scheme evaluation per size
Browse files Browse the repository at this point in the history
  • Loading branch information
spapinistarkware committed Mar 24, 2024
1 parent b658169 commit fc7ccc6
Show file tree
Hide file tree
Showing 8 changed files with 258 additions and 142 deletions.
56 changes: 28 additions & 28 deletions src/core/commitment_scheme/prover.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
//! the unique decoding regime. This is enough for a STARK proof though, where we onyl want to imply
//! the existence of such polynomials, and re ok with having a small decoding list.

use std::iter::zip;
use std::ops::Deref;

use itertools::Itertools;
Expand All @@ -17,20 +16,21 @@ use super::super::circle::CirclePoint;
use super::super::fields::m31::BaseField;
use super::super::fields::qm31::SecureField;
use super::super::fri::{FriConfig, FriProof, FriProver};
use super::super::oods::get_pair_oods_quotient;
use super::super::poly::circle::CanonicCoset;
use super::super::poly::BitReversedOrder;
use super::super::proof_of_work::{ProofOfWork, ProofOfWorkProof};
use super::super::prover::{
LOG_BLOWUP_FACTOR, LOG_LAST_LAYER_DEGREE_BOUND, N_QUERIES, PROOF_OF_WORK_BITS,
};
use super::super::ColumnVec;
use super::quotients::{compute_fri_quotients, PointSample};
use super::utils::TreeVec;
use crate::commitment_scheme::blake2_hash::{Blake2sHash, Blake2sHasher};
use crate::commitment_scheme::merkle_input::{MerkleTreeColumnLayout, MerkleTreeInput};
use crate::commitment_scheme::mixed_degree_decommitment::MixedDecommitment;
use crate::commitment_scheme::mixed_degree_merkle_tree::MixedDegreeMerkleTree;
use crate::core::channel::Channel;
use crate::core::poly::circle::SecureEvaluation;

type MerkleHasher = Blake2sHasher;
type ProofChannel = Blake2sChannel;
Expand Down Expand Up @@ -76,39 +76,39 @@ impl CommitmentSchemeProver {
channel: &mut ProofChannel,
) -> CommitmentSchemeProof {
// Evaluate polynomials on open points.
let proved_values =
self.polynomials()
.zip_cols(&prove_points)
.map_cols(|(poly, points)| {
points
.iter()
.map(|point| poly.eval_at_point(*point))
.collect_vec()
});
channel.mix_felts(&proved_values.clone().flatten_cols());

// Compute oods quotients for boundary constraints on prove_points.
let quotients = self
.evaluations()
.zip_cols(&proved_values)
let openings = self
.polynomials()
.zip_cols(&prove_points)
.map_cols(|((evaluation, values), points)| {
zip(points, values)
.map(|(&point, &value)| {
get_pair_oods_quotient(point, value, evaluation).bit_reverse()
.map_cols(|(poly, points)| {
points
.iter()
.map(|&point| PointSample {
point,
value: poly.eval_at_point(point),
})
.collect_vec()
});
let proved_values = openings
.as_cols_ref()
.map_cols(|x| x.iter().map(|o| o.value).collect());
channel.mix_felts(&proved_values.clone().flatten_cols());

// Compute oods quotients for boundary constraints on prove_points.
let columns = self.evaluations().flatten();
let quotients =
compute_fri_quotients(&columns[..], &openings.flatten(), channel.draw_felt());

// TODO(spapini): Conversion to CircleEvaluation can be removed when FRI supports
// SecureColumn.
let quotients = quotients
.into_iter()
.map(SecureEvaluation::to_cpu)
.collect_vec();

// Run FRI commitment phase on the oods quotients.
let fri_config = FriConfig::new(LOG_LAST_LAYER_DEGREE_BOUND, LOG_BLOWUP_FACTOR, N_QUERIES);
// TODO(spapini): Remove rev() when we start accumulating by size.
// This is only done because fri demands descending sizes.
let fri_prover = FriProver::<CPUBackend, MerkleHasher>::commit(
channel,
fri_config,
&quotients.flatten_cols_rev(),
);
let fri_prover =
FriProver::<CPUBackend, MerkleHasher>::commit(channel, fri_config, &quotients);

// Proof of work.
let proof_of_work = ProofOfWork::new(PROOF_OF_WORK_BITS).prove(channel);
Expand Down
165 changes: 164 additions & 1 deletion src/core/commitment_scheme/quotients.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,20 @@
use std::cmp::Reverse;
use std::collections::BTreeMap;

use itertools::{izip, multiunzip, Itertools};

use crate::core::backend::cpu::quotients::accumulate_row_quotients;
use crate::core::backend::Backend;
use crate::core::circle::CirclePoint;
use crate::core::fields::m31::BaseField;
use crate::core::fields::qm31::SecureField;
use crate::core::fields::secure_column::SecureColumn;
use crate::core::poly::circle::{CircleDomain, CircleEvaluation};
use crate::core::fri::SparseCircleEvaluation;
use crate::core::poly::circle::{CanonicCoset, CircleDomain, CircleEvaluation, SecureEvaluation};
use crate::core::poly::BitReversedOrder;
use crate::core::prover::VerificationError;
use crate::core::queries::SparseSubCircleDomain;
use crate::core::utils::bit_reverse_index;

pub trait QuotientOps: Backend {
/// Accumulates the quotients of the columns at the given domain.
Expand All @@ -28,3 +38,156 @@ pub struct ColumnSampleBatch {
/// The sampled column indices and their values at the point.
pub column_indices_and_values: Vec<(usize, SecureField)>,
}
impl ColumnSampleBatch {
/// Groups column opening by opening point.
/// # Arguments
/// opening: For each column, a vector of openings.
pub fn new(openings: &[&Vec<PointSample>]) -> Vec<Self> {
openings
.iter()
.enumerate()
.flat_map(|(column_index, openings)| {
openings.iter().map(move |opening| (column_index, opening))
})
.group_by(|(_, opening)| opening.point)
.into_iter()
.map(|(point, column_openings)| Self {
point,
column_indices_and_values: column_openings
.map(|(column_index, opening)| (column_index, opening.value))
.collect(),
})
.collect()
}
}

pub struct PointSample {
pub point: CirclePoint<SecureField>,
pub value: SecureField,
}

pub fn compute_fri_quotients<B: QuotientOps>(
columns: &[&CircleEvaluation<B, BaseField, BitReversedOrder>],
samples: &[Vec<PointSample>],
random_coeff: SecureField,
) -> Vec<SecureEvaluation<B>> {
izip!(columns, samples)
.group_by(|(c, _)| c.domain.log_size())
.into_iter()
.sorted_by_key(|(log_size, _)| Reverse(*log_size))
.map(|(log_size, tuples)| {
let (columns, openings): (Vec<_>, Vec<_>) = multiunzip(tuples);
let domain = CanonicCoset::new(log_size).circle_domain();
// TODO: slice.
let batched_openings = ColumnSampleBatch::new(&openings);
let values = B::accumulate_quotients(domain, &columns, random_coeff, &batched_openings);
SecureEvaluation { domain, values }
})
.collect()
}

pub fn fri_answers(
column_log_sizes: Vec<u32>,
openings: &[Vec<PointSample>],
random_coeff: SecureField,
query_domain_per_log_size: BTreeMap<u32, SparseSubCircleDomain>,
queried_values_per_column: &[Vec<BaseField>],
) -> Result<Vec<SparseCircleEvaluation<SecureField>>, VerificationError> {
izip!(column_log_sizes, openings, queried_values_per_column)
.group_by(|(c, ..)| *c)
.into_iter()
.sorted_by_key(|(log_size, _)| Reverse(*log_size))
.map(|(log_size, tuples)| {
let (_, openings, queried_valued_per_column): (Vec<_>, Vec<_>, Vec<_>) =
multiunzip(tuples);
fri_answers_for_log_size(
log_size,
&openings,
random_coeff,
&query_domain_per_log_size[&log_size],
&queried_valued_per_column,
)
})
.collect()
}

pub fn fri_answers_for_log_size(
log_size: u32,
openings: &[&Vec<PointSample>],
random_coeff: SecureField,
query_domain: &SparseSubCircleDomain,
queried_values_per_column: &[&Vec<BaseField>],
) -> Result<SparseCircleEvaluation<SecureField>, VerificationError> {
let commitment_domain = CanonicCoset::new(log_size).circle_domain();
let batched_openings = ColumnSampleBatch::new(openings);
for x in queried_values_per_column {
if x.len() != query_domain.flatten().len() {
return Err(VerificationError::InvalidStructure);
}
}
let mut queried_values_per_column = queried_values_per_column
.iter()
.map(|q| q.iter())
.collect_vec();

let res = SparseCircleEvaluation::new(
query_domain
.iter()
.map(|subdomain| {
let domain = subdomain.to_circle_domain(&commitment_domain);
let column_evals = queried_values_per_column
.iter_mut()
.map(|q| {
CircleEvaluation::new(domain, q.take(domain.size()).copied().collect_vec())
})
.collect_vec();
// TODO(spapini): bit reverse iterator.
let values = (0..domain.size())
.map(|row| {
let domain_point = domain.at(bit_reverse_index(row, log_size));
accumulate_row_quotients(
&batched_openings,
&column_evals.iter().collect_vec(),
row,
random_coeff,
domain_point,
)
})
.collect();
CircleEvaluation::new(domain, values)
})
.collect(),
);
if !queried_values_per_column.iter().all(|x| x.is_empty()) {
return Err(VerificationError::InvalidStructure);
}
Ok(res)
}

#[cfg(test)]
mod tests {
use crate::core::backend::cpu::{CPUCircleEvaluation, CPUCirclePoly};
use crate::core::circle::SECURE_FIELD_CIRCLE_GEN;
use crate::core::commitment_scheme::quotients::{compute_fri_quotients, PointSample};
use crate::core::poly::circle::CanonicCoset;
use crate::{m31, qm31};

#[test]
fn test_quotients_are_low_degree() {
const LOG_SIZE: u32 = 7;
let polynomial = CPUCirclePoly::new((0..1 << LOG_SIZE).map(|i| m31!(i)).collect());
let eval_domain = CanonicCoset::new(LOG_SIZE + 1).circle_domain();
let eval = polynomial.evaluate(eval_domain);
let point = SECURE_FIELD_CIRCLE_GEN;
let value = polynomial.eval_at_point(point);
let coeff = qm31!(1, 2, 3, 4);
let quot_eval =
compute_fri_quotients(&[&eval], &[vec![PointSample { point, value }]], coeff)
.pop()
.unwrap();
let quot_poly_base_field =
CPUCircleEvaluation::new(eval_domain, quot_eval.values.columns[0].clone())
.interpolate();
assert!(quot_poly_base_field.is_in_fft_space(LOG_SIZE));
}
}
Loading

0 comments on commit fc7ccc6

Please sign in to comment.