diff --git a/crates/prover/src/core/backend/cpu/quotients.rs b/crates/prover/src/core/backend/cpu/quotients.rs index 17cc007e1..f157b76ca 100644 --- a/crates/prover/src/core/backend/cpu/quotients.rs +++ b/crates/prover/src/core/backend/cpu/quotients.rs @@ -1,18 +1,18 @@ -use itertools::{izip, zip_eq}; +use itertools::{izip, zip_eq, Itertools}; use num_traits::{One, Zero}; use super::CpuBackend; use crate::core::circle::CirclePoint; use crate::core::constraints::complex_conjugate_line_coeffs; use crate::core::fields::cm31::CM31; -use crate::core::fields::m31::BaseField; +use crate::core::fields::m31::{BaseField, M31}; use crate::core::fields::qm31::SecureField; use crate::core::fields::secure_column::SecureColumnByCoords; use crate::core::fields::FieldExpOps; use crate::core::pcs::quotients::{ColumnSampleBatch, PointSample, QuotientOps}; use crate::core::poly::circle::{CircleDomain, CircleEvaluation, SecureEvaluation}; use crate::core::poly::BitReversedOrder; -use crate::core::utils::{bit_reverse, bit_reverse_index}; +use crate::core::utils::bit_reverse_index; impl QuotientOps for CpuBackend { fn accumulate_quotients( @@ -23,15 +23,15 @@ impl QuotientOps for CpuBackend { _log_blowup_factor: u32, ) -> SecureEvaluation { let mut values = unsafe { SecureColumnByCoords::uninitialized(domain.size()) }; - let quotient_constants = quotient_constants(sample_batches, random_coeff, domain); + let quotient_constants = quotient_constants(sample_batches, random_coeff); for row in 0..domain.size() { let domain_point = domain.at(bit_reverse_index(row, domain.log_size())); + let query_values_at_row = columns.iter().map(|col| col[row]).collect_vec(); let row_value = accumulate_row_quotients( sample_batches, - columns, + &query_values_at_row, "ient_constants, - row, domain_point, ); values.set(row, row_value); @@ -42,23 +42,22 @@ impl QuotientOps for CpuBackend { pub fn accumulate_row_quotients( sample_batches: &[ColumnSampleBatch], - columns: &[&CircleEvaluation], + queried_values_at_row: &[BaseField], quotient_constants: &QuotientConstants, - row: usize, domain_point: CirclePoint, ) -> SecureField { + let denominator_inverses = denominator_inverses(sample_batches, domain_point); let mut row_accumulator = SecureField::zero(); - for (sample_batch, line_coeffs, batch_coeff, denominator_inverses) in izip!( + for (sample_batch, line_coeffs, batch_coeff, denominator_inverse) in izip!( sample_batches, "ient_constants.line_coeffs, "ient_constants.batch_random_coeffs, - "ient_constants.denominator_inverses + denominator_inverses ) { let mut numerator = SecureField::zero(); for ((column_index, _), (a, b, c)) in zip_eq(&sample_batch.columns_and_values, line_coeffs) { - let column = &columns[*column_index]; - let value = column[row] * *c; + let value = queried_values_at_row[*column_index] * *c; // The numerator is a line equation passing through // (sample_point.y, sample_value), (conj(sample_point), conj(sample_value)) // evaluated at (domain_point.y, value). @@ -69,8 +68,7 @@ pub fn accumulate_row_quotients( numerator += value - linear_term; } - row_accumulator = - row_accumulator * *batch_coeff + numerator.mul_cm31(denominator_inverses[row]); + row_accumulator = row_accumulator * *batch_coeff + numerator.mul_cm31(denominator_inverse); } row_accumulator } @@ -118,9 +116,10 @@ pub fn batch_random_coeffs( fn denominator_inverses( sample_batches: &[ColumnSampleBatch], - domain: CircleDomain, -) -> Vec> { - let mut flat_denominators = Vec::with_capacity(sample_batches.len() * domain.size()); + domain_point: CirclePoint, +) -> Vec { + let mut denominators = Vec::new(); + // We want a P to be on a line that passes through a point Pr + uPi in QM31^2, and its conjugate // Pr - uPi. Thus, Pr - P is parallel to Pi. Or, (Pr - P).x * Pi.y - (Pr - P).y * Pi.x = 0. for sample_batch in sample_batches { @@ -129,36 +128,22 @@ fn denominator_inverses( let pry = sample_batch.point.y.0; let pix = sample_batch.point.x.1; let piy = sample_batch.point.y.1; - for row in 0..domain.size() { - let domain_point = domain.at(row); - flat_denominators.push((prx - domain_point.x) * piy - (pry - domain_point.y) * pix); - } + denominators.push((prx - domain_point.x) * piy - (pry - domain_point.y) * pix); } - let mut flat_denominator_inverses = vec![CM31::zero(); flat_denominators.len()]; - CM31::batch_inverse(&flat_denominators, &mut flat_denominator_inverses); + let mut denominator_inverses = vec![CM31::zero(); denominators.len()]; + CM31::batch_inverse(&denominators, &mut denominator_inverses); - flat_denominator_inverses - .chunks_mut(domain.size()) - .map(|denominator_inverses| { - bit_reverse(denominator_inverses); - denominator_inverses.to_vec() - }) - .collect() + denominator_inverses } pub fn quotient_constants( sample_batches: &[ColumnSampleBatch], random_coeff: SecureField, - domain: CircleDomain, ) -> QuotientConstants { - let line_coeffs = column_line_coeffs(sample_batches, random_coeff); - let batch_random_coeffs = batch_random_coeffs(sample_batches, random_coeff); - let denominator_inverses = denominator_inverses(sample_batches, domain); QuotientConstants { - line_coeffs, - batch_random_coeffs, - denominator_inverses, + line_coeffs: column_line_coeffs(sample_batches, random_coeff), + batch_random_coeffs: batch_random_coeffs(sample_batches, random_coeff), } } @@ -170,8 +155,6 @@ pub struct QuotientConstants { /// The random coefficients used to linearly combine the batched quotients For more details see /// [self::batch_random_coeffs]. pub batch_random_coeffs: Vec, - /// The inverses of the denominators of the quotients. - pub denominator_inverses: Vec>, } #[cfg(test)] diff --git a/crates/prover/src/core/fri.rs b/crates/prover/src/core/fri.rs index 528d457dc..03dac47da 100644 --- a/crates/prover/src/core/fri.rs +++ b/crates/prover/src/core/fri.rs @@ -1,30 +1,31 @@ use std::cmp::Reverse; -use std::collections::BTreeMap; +use std::collections::{BTreeMap, BTreeSet}; use std::fmt::Debug; use std::iter::zip; use std::ops::RangeInclusive; -use itertools::Itertools; +use itertools::{zip_eq, Itertools}; use num_traits::Zero; use serde::{Deserialize, Serialize}; use thiserror::Error; -use tracing::{span, Level}; +use tracing::instrument; -use super::backend::CpuBackend; +use super::backend::{Col, CpuBackend}; use super::channel::{Channel, MerkleChannel}; use super::fields::m31::BaseField; -use super::fields::qm31::SecureField; +use super::fields::qm31::{SecureField, QM31}; use super::fields::secure_column::{SecureColumnByCoords, SECURE_EXTENSION_DEGREE}; use super::fields::FieldOps; -use super::poly::circle::{CircleEvaluation, PolyOps, SecureEvaluation}; +use super::poly::circle::{CircleDomain, PolyOps, SecureEvaluation}; use super::poly::line::{LineEvaluation, LinePoly}; use super::poly::twiddles::TwiddleTree; use super::poly::BitReversedOrder; -// TODO(andrew): Create fri/ directory, move queries.rs there and split this file up. -use super::queries::{Queries, SparseSubCircleDomain}; +use super::queries::Queries; +use super::ColumnVec; use crate::core::circle::Coset; use crate::core::fft::ibutterfly; use crate::core::fields::FieldExpOps; +use crate::core::poly::circle::CanonicCoset; use crate::core::poly::line::LineDomain; use crate::core::utils::bit_reverse_index; use crate::core::vcs::ops::{MerkleHasher, MerkleOps}; @@ -123,25 +124,25 @@ pub trait FriOps: FieldOps + PolyOps + Sized + FieldOps eval: &SecureEvaluation, ) -> (SecureEvaluation, SecureField); } + /// A FRI prover that applies the FRI protocol to prove a set of polynomials are of low degree. -pub struct FriProver, MC: MerkleChannel> { +pub struct FriProver<'a, B: FriOps + MerkleOps, MC: MerkleChannel> { config: FriConfig, - inner_layers: Vec>, + first_layer: FriFirstLayerProver<'a, B, MC::H>, + inner_layers: Vec>, last_layer_poly: LinePoly, - /// Unique sizes of committed columns sorted in descending order. - column_log_sizes: Vec, } -impl, MC: MerkleChannel> FriProver { - /// Commits to multiple [CircleEvaluation]s. +impl<'a, B: FriOps + MerkleOps, MC: MerkleChannel> FriProver<'a, B, MC> { + /// Commits to multiple circle polynomials. /// /// `columns` must be provided in descending order by size. /// - /// Mixed degree STARKs involve polynomials evaluated on multiple domains of different size. - /// Combining evaluations on different sized domains into an evaluation of a single polynomial - /// on a single domain for the purpose of commitment is inefficient. Instead, commit to multiple - /// polynomials so combining of evaluations can be taken care of efficiently at the appropriate - /// FRI layer. All evaluations must be taken over canonic [`CircleDomain`]s. + /// This is a batched commitment that handles multiple mixed-degree polynomials, each + /// evaluated over domains of varying sizes. Instead of combining these evaluations into + /// a single polynomial on a unified domain for commitment, this function commits to each + /// polynomial on its respective domain. The evaluations are then efficiently merged in the + /// FRI layer corresponding to the size of a polynomial's domain. /// /// # Panics /// @@ -150,37 +151,46 @@ impl, MC: MerkleChannel> FriProver { /// * An evaluation is not from a sufficiently low degree circle polynomial. /// * An evaluation's domain is smaller than the last layer. /// * An evaluation's domain is not a canonic circle domain. - /// - /// [`CircleDomain`]: super::poly::circle::CircleDomain - // TODO(andrew): Add docs for all evaluations needing to be from canonic domains. + #[instrument(skip_all)] pub fn commit( channel: &mut MC::C, config: FriConfig, - columns: &[SecureEvaluation], + columns: &'a [SecureEvaluation], twiddles: &TwiddleTree, ) -> Self { - let _span = span!(Level::INFO, "FRI commitment").entered(); assert!(!columns.is_empty(), "no columns"); assert!(columns.is_sorted_by_key(|e| Reverse(e.len())), "not sorted"); assert!(columns.iter().all(|e| e.domain.is_canonic()), "not canonic"); + + let first_layer = Self::commit_first_layer(channel, columns); let (inner_layers, last_layer_evaluation) = Self::commit_inner_layers(channel, config, columns, twiddles); let last_layer_poly = Self::commit_last_layer(channel, config, last_layer_evaluation); - let column_log_sizes = columns - .iter() - .map(|e| e.domain.log_size()) - .dedup() - .collect(); Self { config, + first_layer, inner_layers, last_layer_poly, - column_log_sizes, } } - /// Builds and commits to the inner FRI layers (all layers except the last layer). + /// Commits to the first FRI layer. + /// + /// The first layer commits to all input circle polynomial columns (possibly of mixed degree) + /// involved in FRI. + /// + /// All `columns` must be provided in descending order by size. + fn commit_first_layer( + channel: &mut MC::C, + columns: &'a [SecureEvaluation], + ) -> FriFirstLayerProver<'a, B, MC::H> { + let layer = FriFirstLayerProver::new(columns); + MC::mix_root(channel, layer.merkle_tree.root()); + layer + } + + /// Builds and commits to the inner FRI layers (all layers except the first and last). /// /// All `columns` must be provided in descending order by size. /// @@ -190,34 +200,33 @@ impl, MC: MerkleChannel> FriProver { config: FriConfig, columns: &[SecureEvaluation], twiddles: &TwiddleTree, - ) -> (Vec>, LineEvaluation) { - // Returns the length of the [LineEvaluation] a [CircleEvaluation] gets folded into. - let folded_len = - |e: &SecureEvaluation| e.len() >> CIRCLE_TO_LINE_FOLD_STEP; + ) -> (Vec>, LineEvaluation) { + /// Returns the size of the line evaluation a circle evaluation gets folded into. + fn folded_size(v: &SecureEvaluation) -> usize { + v.len() >> CIRCLE_TO_LINE_FOLD_STEP + } - let first_layer_size = folded_len(&columns[0]); - let first_layer_domain = LineDomain::new(Coset::half_odds(first_layer_size.ilog2())); - let mut layer_evaluation = LineEvaluation::new_zero(first_layer_domain); + let circle_poly_folding_alpha = channel.draw_felt(); + let first_inner_layer_log_size = folded_size(&columns[0]).ilog2(); + let first_inner_layer_domain = + LineDomain::new(Coset::half_odds(first_inner_layer_log_size)); + let mut layer_evaluation = LineEvaluation::new_zero(first_inner_layer_domain); let mut columns = columns.iter().peekable(); - let mut layers = Vec::new(); - // Circle polynomials can all be folded with the same alpha. - let circle_poly_alpha = channel.draw_felt(); - while layer_evaluation.len() > config.last_layer_domain_size() { - // Check for any columns (circle poly evaluations) that should be combined. - while let Some(column) = columns.next_if(|c| folded_len(c) == layer_evaluation.len()) { + // Check for circle polys in the first layer that should be combined in this layer. + while let Some(column) = columns.next_if(|c| folded_size(c) == layer_evaluation.len()) { B::fold_circle_into_line( &mut layer_evaluation, column, - circle_poly_alpha, + circle_poly_folding_alpha, twiddles, ); } - let layer = FriLayerProver::new(layer_evaluation); + let layer = FriInnerLayerProver::new(layer_evaluation); MC::mix_root(channel, layer.merkle_tree.root()); let folding_alpha = channel.draw_felt(); let folded_layer_evaluation = B::fold_line(&layer.evaluation, folding_alpha, twiddles); @@ -262,41 +271,47 @@ impl, MC: MerkleChannel> FriProver { last_layer_poly } - /// Generates a FRI proof and returns it with the opening positions for the committed columns. + /// Returns a FRI proof and the query positions. /// - /// Returned column opening positions are mapped by their log size. - pub fn decommit( - self, - channel: &mut MC::C, - ) -> (FriProof, BTreeMap) { - let max_column_log_size = self.column_log_sizes[0]; + /// Returned query positions are mapped by column commitment domain log size. + pub fn decommit(self, channel: &mut MC::C) -> (FriProof, BTreeMap>) { + let max_column_log_size = self.first_layer.max_column_log_size(); let queries = Queries::generate(channel, max_column_log_size, self.config.n_queries); - let positions = get_opening_positions(&queries, &self.column_log_sizes); + let column_log_sizes = self.first_layer.column_log_sizes(); + let query_positions_by_log_size = + get_query_positions_by_log_size(&queries, column_log_sizes); let proof = self.decommit_on_queries(&queries); - (proof, positions) + (proof, query_positions_by_log_size) } /// # Panics /// /// Panics if the queries were sampled on the wrong domain size. fn decommit_on_queries(self, queries: &Queries) -> FriProof { - let max_column_log_size = self.column_log_sizes[0]; - assert_eq!(queries.log_domain_size, max_column_log_size); - let first_layer_queries = queries.fold(CIRCLE_TO_LINE_FOLD_STEP); - let inner_layers = self - .inner_layers + let Self { + config: _, + first_layer, + inner_layers, + last_layer_poly, + } = self; + + let first_layer_proof = first_layer.decommit(queries); + + let inner_layer_proofs = inner_layers .into_iter() - .scan(first_layer_queries, |layer_queries, layer| { - let layer_proof = layer.decommit(layer_queries); - *layer_queries = layer_queries.fold(FOLD_STEP); - Some(layer_proof) - }) + .scan( + queries.fold(CIRCLE_TO_LINE_FOLD_STEP), + |layer_queries, layer| { + let layer_proof = layer.decommit(layer_queries); + *layer_queries = layer_queries.fold(FOLD_STEP); + Some(layer_proof) + }, + ) .collect(); - let last_layer_poly = self.last_layer_poly; - FriProof { - inner_layers, + first_layer: first_layer_proof, + inner_layers: inner_layer_proofs, last_layer_poly, } } @@ -304,17 +319,16 @@ impl, MC: MerkleChannel> FriProver { pub struct FriVerifier { config: FriConfig, - /// Alpha used to fold all circle polynomials to univariate polynomials. - circle_poly_alpha: SecureField, - /// Domain size queries should be sampled from. - expected_query_log_domain_size: u32, - /// The list of degree bounds of all committed circle polynomials. - column_bounds: Vec, - inner_layers: Vec>, + // TODO(andrew): The first layer currently commits to all input polynomials. Consider allowing + // flexibility to only commit to input polynomials on a per-log-size basis. This allows + // flexibility for cases where committing to the first layer, for a specific log size, isn't + // necessary. FRI would simply return more query positions for the "uncommitted" log sizes. + first_layer: FriFirstLayerVerifier, + inner_layers: Vec>, last_layer_domain: LineDomain, last_layer_poly: LinePoly, /// The queries used for decommitment. Initialized when calling - /// [`FriVerifier::column_opening_positions`]. + /// [`FriVerifier::sample_query_positions()`]. queries: Option, } @@ -343,12 +357,23 @@ impl FriVerifier { ) -> Result { assert!(column_bounds.is_sorted_by_key(|b| Reverse(*b))); + MC::mix_root(channel, proof.first_layer.commitment); + let max_column_bound = column_bounds[0]; - let expected_query_log_domain_size = - max_column_bound.log_degree_bound + config.log_blowup_factor; + let column_commitment_domains = column_bounds + .iter() + .map(|bound| { + let commitment_domain_log_size = bound.log_degree_bound + config.log_blowup_factor; + CanonicCoset::new(commitment_domain_log_size).circle_domain() + }) + .collect(); - // Circle polynomials can all be folded with the same alpha. - let circle_poly_alpha = channel.draw_felt(); + let first_layer = FriFirstLayerVerifier { + column_bounds, + column_commitment_domains, + proof: proof.first_layer, + folding_alpha: channel.draw_felt(), + }; let mut inner_layers = Vec::new(); let mut layer_bound = max_column_bound.fold_to_line(); @@ -359,12 +384,10 @@ impl FriVerifier { for (layer_index, proof) in proof.inner_layers.into_iter().enumerate() { MC::mix_root(channel, proof.commitment); - let folding_alpha = channel.draw_felt(); - - inner_layers.push(FriLayerVerifier { + inner_layers.push(FriInnerLayerVerifier { degree_bound: layer_bound, domain: layer_domain, - folding_alpha, + folding_alpha: channel.draw_felt(), layer_index, proof, }); @@ -390,9 +413,7 @@ impl FriVerifier { Ok(Self { config, - circle_poly_alpha, - column_bounds, - expected_query_log_domain_size, + first_layer, inner_layers, last_layer_domain, last_layer_poly, @@ -402,7 +423,7 @@ impl FriVerifier { /// Verifies the decommitment stage of FRI. /// - /// The decommitment values need to be provided in the same order as their commitment. + /// The query evals need to be provided in the same order as their commitment. /// /// # Panics /// @@ -413,54 +434,68 @@ impl FriVerifier { // TODO(andrew): Finish docs. pub fn decommit( mut self, - decommitted_values: Vec, + first_layer_query_evals: ColumnVec>, ) -> Result<(), FriVerificationError> { let queries = self.queries.take().expect("queries not sampled"); - self.decommit_on_queries(&queries, decommitted_values) + self.decommit_on_queries(&queries, first_layer_query_evals) } fn decommit_on_queries( self, queries: &Queries, - decommitted_values: Vec, + first_layer_query_evals: ColumnVec>, ) -> Result<(), FriVerificationError> { - assert_eq!(queries.log_domain_size, self.expected_query_log_domain_size); - assert_eq!(decommitted_values.len(), self.column_bounds.len()); - + let (inner_layer_queries, first_layer_folded_evals) = + self.decommit_first_layer(queries, first_layer_query_evals)?; let (last_layer_queries, last_layer_query_evals) = - self.decommit_inner_layers(queries, decommitted_values)?; - + self.decommit_inner_layers(&inner_layer_queries, first_layer_folded_evals)?; self.decommit_last_layer(last_layer_queries, last_layer_query_evals) } + /// Verifies the first layer decommitment. + /// + /// Returns the queries and first layer folded column evaluations needed for + /// verifying the remaining layers. + fn decommit_first_layer( + &self, + queries: &Queries, + first_layer_query_evals: ColumnVec>, + ) -> Result<(Queries, ColumnVec>), FriVerificationError> { + self.first_layer + .verify_and_fold(queries, first_layer_query_evals) + } + /// Verifies all inner layer decommitments. /// /// Returns the queries and query evaluations needed for verifying the last FRI layer. fn decommit_inner_layers( &self, queries: &Queries, - decommitted_values: Vec, + first_layer_folded_evals: ColumnVec>, ) -> Result<(Queries, Vec), FriVerificationError> { - let circle_poly_alpha = self.circle_poly_alpha; - let circle_poly_alpha_sq = circle_poly_alpha * circle_poly_alpha; + let first_layer_fold_alpha = self.first_layer.folding_alpha; + let first_layer_fold_alpha_pow_fold_factor = first_layer_fold_alpha.square(); - let mut decommitted_values = decommitted_values.into_iter(); - let mut column_bounds = self.column_bounds.iter().copied().peekable(); - let mut layer_queries = queries.fold(CIRCLE_TO_LINE_FOLD_STEP); + let mut layer_queries = queries.clone(); let mut layer_query_evals = vec![SecureField::zero(); layer_queries.len()]; + let mut first_layer_folded_evals = first_layer_folded_evals.into_iter(); + let mut first_layer_column_bounds = self.first_layer.column_bounds.iter().peekable(); for layer in self.inner_layers.iter() { - // Check for column evals that need to folded into this layer. - while column_bounds + // Check for evals committed in the first layer that need to be folded into this layer. + while first_layer_column_bounds .next_if(|b| b.fold_to_line() == layer.degree_bound) .is_some() { - let sparse_evaluation = decommitted_values.next().unwrap(); - let folded_evals = sparse_evaluation.fold(circle_poly_alpha); - assert_eq!(folded_evals.len(), layer_query_evals.len()); - - for (layer_eval, folded_eval) in zip(&mut layer_query_evals, folded_evals) { - *layer_eval = *layer_eval * circle_poly_alpha_sq + folded_eval; + let folded_column_evals = first_layer_folded_evals.next().unwrap(); + + for (curr_layer_eval, folded_column_eval) in + zip_eq(&mut layer_query_evals, folded_column_evals) + { + // TODO(andrew): As Ilya pointed out using the first layer's folding + // alpha here might not be sound. Investigate. + *curr_layer_eval *= first_layer_fold_alpha_pow_fold_factor; + *curr_layer_eval += folded_column_eval; } } @@ -469,8 +504,8 @@ impl FriVerifier { } // Check all values have been consumed. - assert!(column_bounds.is_empty()); - assert!(decommitted_values.is_empty()); + assert!(first_layer_column_bounds.is_empty()); + assert!(first_layer_folded_evals.is_empty()); Ok((layer_queries, layer_query_evals)) } @@ -498,60 +533,55 @@ impl FriVerifier { Ok(()) } - /// Samples queries and returns the opening positions for each unique column size. - /// - /// The order of the opening positions corresponds to the order of the column commitment. - pub fn column_query_positions( - &mut self, - channel: &mut MC::C, - ) -> BTreeMap { + /// Samples and returns query positions mapped by column log size. + pub fn sample_query_positions(&mut self, channel: &mut MC::C) -> BTreeMap> { let column_log_sizes = self - .column_bounds + .first_layer + .column_commitment_domains .iter() - .dedup() - .map(|b| b.log_degree_bound + self.config.log_blowup_factor) - .collect_vec(); - let queries = Queries::generate(channel, column_log_sizes[0], self.config.n_queries); - let positions = get_opening_positions(&queries, &column_log_sizes); + .map(|domain| domain.log_size()) + .collect::>(); + let max_column_log_size = *column_log_sizes.iter().max().unwrap(); + let queries = Queries::generate(channel, max_column_log_size, self.config.n_queries); + let query_positions_by_log_size = + get_query_positions_by_log_size(&queries, column_log_sizes); self.queries = Some(queries); - positions + query_positions_by_log_size } } -/// Returns the column opening positions needed for verification. +/// Returns the column query positions mapped by sample domain log size. /// -/// The column log sizes must be unique and in descending order. Returned -/// column opening positions are mapped by their log size. -fn get_opening_positions( +/// The column log sizes must be unique and in descending order. +/// Returned column query positions are mapped by their log size. +fn get_query_positions_by_log_size( queries: &Queries, - column_log_sizes: &[u32], -) -> BTreeMap { - let mut prev_log_size = column_log_sizes[0]; - assert!(prev_log_size == queries.log_domain_size); - let mut prev_queries = queries.clone(); - let mut positions = BTreeMap::new(); - positions.insert(prev_log_size, prev_queries.opening_positions(FOLD_STEP)); - for log_size in column_log_sizes.iter().skip(1) { - let n_folds = prev_log_size - log_size; - let queries = prev_queries.fold(n_folds); - positions.insert(*log_size, queries.opening_positions(FOLD_STEP)); - prev_log_size = *log_size; - prev_queries = queries; - } - positions + column_log_sizes: BTreeSet, +) -> BTreeMap> { + column_log_sizes + .into_iter() + .map(|column_log_size| { + let column_queries = queries.fold(queries.log_domain_size - column_log_size); + (column_log_size, column_queries.positions) + }) + .collect() } #[derive(Clone, Copy, Debug, Error)] pub enum FriVerificationError { #[error("proof contains an invalid number of FRI layers")] InvalidNumFriLayers, - #[error("queries do not resolve to their commitment in layer {layer}")] + #[error("evaluations are invalid in the first layer")] + FirstLayerEvaluationsInvalid, + #[error("queries do not resolve to their commitment in the first layer")] + FirstLayerCommitmentInvalid { error: MerkleVerificationError }, + #[error("queries do not resolve to their commitment in inner layer {inner_layer}")] InnerLayerCommitmentInvalid { - layer: usize, + inner_layer: usize, error: MerkleVerificationError, }, - #[error("evaluations are invalid in layer {layer}")] - InnerLayerEvaluationsInvalid { layer: usize }, + #[error("evaluations are invalid in inner layer {inner_layer}")] + InnerLayerEvaluationsInvalid { inner_layer: usize }, #[error("degree of last layer is invalid")] LastLayerDegreeInvalid, #[error("evaluations in the last layer are invalid")] @@ -609,6 +639,7 @@ impl LinePolyDegreeBound { /// A FRI proof. #[derive(Debug, Serialize, Deserialize)] pub struct FriProof { + pub first_layer: FriLayerProof, pub inner_layers: Vec>, pub last_layer_poly: LinePoly, } @@ -620,19 +651,116 @@ pub const FOLD_STEP: u32 = 1; /// Number of folds when folding a circle polynomial to univariate polynomial. pub const CIRCLE_TO_LINE_FOLD_STEP: u32 = 1; -/// Stores a subset of evaluations in a fri layer with their corresponding merkle decommitments. -/// -/// The subset corresponds to the set of evaluations needed by a FRI verifier. +/// Proof of an individual FRI layer. #[derive(Debug, Serialize, Deserialize)] pub struct FriLayerProof { - /// The subset stored corresponds to the set of evaluations the verifier doesn't have but needs - /// to fold and verify the merkle decommitment. - pub evals_subset: Vec, + /// Values that the verifier needs but cannot deduce from previous computations, in the + /// order they are needed. This complements the values that were queried. These must be + /// supplied directly to the verifier. + pub fri_witness: Vec, pub decommitment: MerkleDecommitment, pub commitment: H::Hash, } -struct FriLayerVerifier { +struct FriFirstLayerVerifier { + /// The list of degree bounds of all circle polynomials commited in the first layer. + column_bounds: Vec, + /// The commitment domain all the circle polynomials in the first layer. + column_commitment_domains: Vec, + folding_alpha: SecureField, + proof: FriLayerProof, +} + +impl FriFirstLayerVerifier { + /// Verifies the layer's merkle decommitment and returns the the folded queries and query evals. + /// + /// # Errors + /// + /// An `Err` will be returned if: + /// * The proof doesn't store enough evaluations. + /// * The merkle decommitment is invalid. + /// + /// # Panics + /// + /// Panics if: + /// * The queries are sampled on the wrong domain. + /// * There are an invalid number of provided column evals. + fn verify_and_fold( + &self, + queries: &Queries, + query_evals_by_column: ColumnVec>, + ) -> Result<(Queries, ColumnVec>), FriVerificationError> { + // Columns are provided in descending order by size. + let max_column_log_size = self.column_commitment_domains[0].log_size(); + assert_eq!(queries.log_domain_size, max_column_log_size); + + let mut fri_witness = self.proof.fri_witness.iter().copied(); + let mut decommitment_positions_by_log_size = BTreeMap::new(); + let mut all_column_decommitment_values = Vec::new(); + let mut folded_evals_by_column = Vec::new(); + + for (&column_domain, column_query_evals) in + zip_eq(&self.column_commitment_domains, query_evals_by_column) + { + let column_queries = queries.fold(queries.log_domain_size - column_domain.log_size()); + + let (column_decommitment_positions, sparse_evaluation) = + compute_decommitment_positions_and_rebuild_evals( + &column_queries, + &column_query_evals, + &mut fri_witness, + CIRCLE_TO_LINE_FOLD_STEP, + ) + .map_err(|InsufficientWitnessError| { + FriVerificationError::FirstLayerEvaluationsInvalid + })?; + + // Columns of the same size have the same decommitment positions. + decommitment_positions_by_log_size + .insert(column_domain.log_size(), column_decommitment_positions); + + // Prepare values in the structure needed for merkle decommitment. + let column_decommitment_values: SecureColumnByCoords = sparse_evaluation + .subset_evals + .iter() + .flatten() + .copied() + .collect(); + + all_column_decommitment_values.extend(column_decommitment_values.columns); + + let folded_evals = sparse_evaluation.fold_circle(self.folding_alpha, column_domain); + folded_evals_by_column.push(folded_evals); + } + + // Check all proof evals have been consumed. + if !fri_witness.is_empty() { + return Err(FriVerificationError::FirstLayerEvaluationsInvalid); + } + + let merkle_verifier = MerkleVerifier::new( + self.proof.commitment, + self.column_commitment_domains + .iter() + .flat_map(|column_domain| [column_domain.log_size(); SECURE_EXTENSION_DEGREE]) + .collect(), + ); + + merkle_verifier + .verify( + &decommitment_positions_by_log_size, + all_column_decommitment_values, + self.proof.decommitment.clone(), + ) + .map_err(|error| FriVerificationError::FirstLayerCommitmentInvalid { error })?; + + let folded_queries = queries.fold(CIRCLE_TO_LINE_FOLD_STEP); + + Ok((folded_queries, folded_evals_by_column)) + } +} + +struct FriInnerLayerVerifier { degree_bound: LinePolyDegreeBound, domain: LineDomain, folding_alpha: SecureField, @@ -640,267 +768,331 @@ struct FriLayerVerifier { proof: FriLayerProof, } -impl FriLayerVerifier { +impl FriInnerLayerVerifier { /// Verifies the layer's merkle decommitment and returns the the folded queries and query evals. /// /// # Errors /// /// An `Err` will be returned if: - /// * The proof doesn't store enough evaluations. + /// * The proof doesn't store the correct number of evaluations. /// * The merkle decommitment is invalid. /// /// # Panics /// - /// Panics if the number of queries doesn't match the number of evals. + /// Panics if: + /// * The number of queries doesn't match the number of evals. + /// * The queries are sampled on the wrong domain. fn verify_and_fold( &self, queries: Queries, evals_at_queries: Vec, ) -> Result<(Queries, Vec), FriVerificationError> { - let decommitment = self.proof.decommitment.clone(); - let commitment = self.proof.commitment; + assert_eq!(queries.log_domain_size, self.domain.log_size()); - // Extract the evals needed for decommitment and folding. - let sparse_evaluation = self.extract_evaluation(&queries, &evals_at_queries)?; + let mut fri_witness = self.proof.fri_witness.iter().copied(); - // TODO: When leaf values are removed from the decommitment, also remove this block. - let actual_decommitment_evals: SecureColumnByCoords = sparse_evaluation - .subline_evals - .iter() - .flat_map(|e| e.values.into_iter()) - .collect(); + let (decommitment_positions, sparse_evaluation) = + compute_decommitment_positions_and_rebuild_evals( + &queries, + &evals_at_queries, + &mut fri_witness, + FOLD_STEP, + ) + .map_err(|InsufficientWitnessError| { + FriVerificationError::InnerLayerEvaluationsInvalid { + inner_layer: self.layer_index, + } + })?; - let folded_queries = queries.fold(FOLD_STEP); + // Check all proof evals have been consumed. + if !fri_witness.is_empty() { + return Err(FriVerificationError::InnerLayerEvaluationsInvalid { + inner_layer: self.layer_index, + }); + } - // Positions of all the decommitment evals. - let decommitment_positions = folded_queries + let decommitment_values: SecureColumnByCoords = sparse_evaluation + .subset_evals .iter() - .flat_map(|folded_query| { - let start = folded_query << FOLD_STEP; - let end = start + (1 << FOLD_STEP); - start..end - }) - .collect::>(); + .flatten() + .copied() + .collect(); let merkle_verifier = MerkleVerifier::new( - commitment, + self.proof.commitment, vec![self.domain.log_size(); SECURE_EXTENSION_DEGREE], ); + merkle_verifier .verify( - [(self.domain.log_size(), decommitment_positions)] - .into_iter() - .collect(), - actual_decommitment_evals.columns.to_vec(), - decommitment, + &BTreeMap::from_iter([(self.domain.log_size(), decommitment_positions)]), + decommitment_values.columns.to_vec(), + self.proof.decommitment.clone(), ) .map_err(|e| FriVerificationError::InnerLayerCommitmentInvalid { - layer: self.layer_index, + inner_layer: self.layer_index, error: e, })?; - let evals_at_folded_queries = sparse_evaluation.fold(self.folding_alpha); + let folded_queries = queries.fold(FOLD_STEP); + let folded_evals = sparse_evaluation.fold_line(self.folding_alpha, self.domain); - Ok((folded_queries, evals_at_folded_queries)) + Ok((folded_queries, folded_evals)) } +} - /// Returns the evaluations needed for decommitment. - /// - /// # Errors - /// - /// Returns an `Err` if the proof doesn't store enough evaluations. - /// - /// # Panics - /// - /// Panics if the number of queries doesn't match the number of evals. - fn extract_evaluation( - &self, - queries: &Queries, - evals_at_queries: &[SecureField], - ) -> Result { - // Evals provided by the verifier. - let mut evals_at_queries = evals_at_queries.iter().copied(); - - // Evals stored in the proof. - let mut proof_evals = self.proof.evals_subset.iter().copied(); - - let mut all_subline_evals = Vec::new(); - - // Group queries by the subline they reside in. - for subline_queries in queries.group_by(|a, b| a >> FOLD_STEP == b >> FOLD_STEP) { - let subline_start = (subline_queries[0] >> FOLD_STEP) << FOLD_STEP; - let subline_end = subline_start + (1 << FOLD_STEP); - - let mut subline_evals = Vec::new(); - let mut subline_queries = subline_queries.iter().peekable(); - - // Insert the evals. - for eval_position in subline_start..subline_end { - let eval = match subline_queries.next_if_eq(&&eval_position) { - Some(_) => evals_at_queries.next().unwrap(), - None => proof_evals.next().ok_or( - FriVerificationError::InnerLayerEvaluationsInvalid { - layer: self.layer_index, - }, - )?, - }; - - subline_evals.push(eval); - } +/// Commitment to the first FRI layer. +/// +/// The first layer commits to all circle polynomials (possibly of mixed degree) involved in FRI. +struct FriFirstLayerProver<'a, B: FriOps + MerkleOps, H: MerkleHasher> { + columns: &'a [SecureEvaluation], + merkle_tree: MerkleProver, +} - // Construct the domain. - // TODO(andrew): Create a constructor for LineDomain. - let subline_initial_index = bit_reverse_index(subline_start, self.domain.log_size()); - let subline_initial = self.domain.coset().index_at(subline_initial_index); - let subline_domain = LineDomain::new(Coset::new(subline_initial, FOLD_STEP)); +impl<'a, B: FriOps + MerkleOps, H: MerkleHasher> FriFirstLayerProver<'a, B, H> { + fn new(columns: &'a [SecureEvaluation]) -> Self { + let coordinate_columns = extract_coordinate_columns(columns); + let merkle_tree = MerkleProver::commit(coordinate_columns); - all_subline_evals.push(LineEvaluation::new( - subline_domain, - subline_evals.into_iter().collect(), - )); + FriFirstLayerProver { + columns, + merkle_tree, } + } - // Check all proof evals have been consumed. - if !proof_evals.is_empty() { - return Err(FriVerificationError::InnerLayerEvaluationsInvalid { - layer: self.layer_index, - }); + /// Returns the sizes of all circle polynomial commitment domains. + fn column_log_sizes(&self) -> BTreeSet { + self.columns.iter().map(|e| e.domain.log_size()).collect() + } + + fn max_column_log_size(&self) -> u32 { + *self.column_log_sizes().iter().max().unwrap() + } + + fn decommit(self, queries: &Queries) -> FriLayerProof { + let max_column_log_size = *self.column_log_sizes().iter().max().unwrap(); + assert_eq!(queries.log_domain_size, max_column_log_size); + + let mut fri_witness = Vec::new(); + let mut decommitment_positions_by_log_size = BTreeMap::new(); + + for column in self.columns { + let column_log_size = column.domain.log_size(); + let column_queries = queries.fold(queries.log_domain_size - column_log_size); + + let (column_decommitment_positions, column_witness) = + compute_decommitment_positions_and_witness_evals( + column, + &column_queries.positions, + CIRCLE_TO_LINE_FOLD_STEP, + ); + + decommitment_positions_by_log_size + .insert(column_log_size, column_decommitment_positions); + fri_witness.extend(column_witness); + } + + let (_evals, decommitment) = self.merkle_tree.decommit( + &decommitment_positions_by_log_size, + extract_coordinate_columns(self.columns), + ); + + let commitment = self.merkle_tree.root(); + + FriLayerProof { + fri_witness, + decommitment, + commitment, } + } +} - Ok(SparseLineEvaluation::new(all_subline_evals)) +/// Extracts all base field coordinate columns from each secure column. +fn extract_coordinate_columns( + columns: &[SecureEvaluation], +) -> Vec<&Col> { + let mut coordinate_columns = Vec::new(); + + for secure_column in columns { + for coordinate_column in secure_column.columns.iter() { + coordinate_columns.push(coordinate_column); + } } + + coordinate_columns } /// A FRI layer comprises of a merkle tree that commits to evaluations of a polynomial. /// /// The polynomial evaluations are viewed as evaluation of a polynomial on multiple distinct cosets /// of size two. Each leaf of the merkle tree commits to a single coset evaluation. -// TODO(andrew): Support different step sizes. -struct FriLayerProver, H: MerkleHasher> { +// TODO(andrew): Support different step sizes and update docs. +// TODO(andrew): The docs are wrong. Each leaf of the merkle tree commits to a single +// QM31 value. This is inefficient and should be changed. +struct FriInnerLayerProver, H: MerkleHasher> { evaluation: LineEvaluation, merkle_tree: MerkleProver, } -impl, H: MerkleHasher> FriLayerProver { +impl, H: MerkleHasher> FriInnerLayerProver { fn new(evaluation: LineEvaluation) -> Self { let merkle_tree = MerkleProver::commit(evaluation.values.columns.iter().collect_vec()); - #[allow(unreachable_code)] - FriLayerProver { + FriInnerLayerProver { evaluation, merkle_tree, } } - /// Generates a decommitment of the subline evaluations at the specified positions. fn decommit(self, queries: &Queries) -> FriLayerProof { - let mut decommit_positions = Vec::new(); - let mut evals_subset = Vec::new(); - - // Group queries by the subline they reside in. - // TODO(andrew): Explain what a "subline" is at the top of the module. - for query_group in queries.group_by(|a, b| a >> FOLD_STEP == b >> FOLD_STEP) { - let subline_start = (query_group[0] >> FOLD_STEP) << FOLD_STEP; - let subline_end = subline_start + (1 << FOLD_STEP); - - let mut subline_queries = query_group.iter().peekable(); - - for eval_position in subline_start..subline_end { - // Add decommitment position. - decommit_positions.push(eval_position); - - // Skip evals the verifier can calculate. - if subline_queries.next_if_eq(&&eval_position).is_some() { - continue; - } - - let eval = self.evaluation.values.at(eval_position); - evals_subset.push(eval); - } - } - - let commitment = self.merkle_tree.root(); - // TODO(andrew): Use _evals. + let (decommitment_positions, fri_witness) = + compute_decommitment_positions_and_witness_evals( + &self.evaluation.values, + queries, + FOLD_STEP, + ); + + let layer_log_size = self.evaluation.domain().log_size(); let (_evals, decommitment) = self.merkle_tree.decommit( - &[(self.evaluation.len().ilog2(), decommit_positions)] - .into_iter() - .collect(), + &BTreeMap::from_iter([(layer_log_size, decommitment_positions)]), self.evaluation.values.columns.iter().collect_vec(), ); + let commitment = self.merkle_tree.root(); + FriLayerProof { - evals_subset, + fri_witness, decommitment, commitment, } } } -/// Holds a foldable subset of circle polynomial evaluations. -#[derive(Debug, Clone)] -pub struct SparseCircleEvaluation { - subcircle_evals: Vec>, -} +/// Returns a column's merkle tree decommitment positions and the evals the verifier can't +/// deduce from previous computations but requires for decommitment and folding. +fn compute_decommitment_positions_and_witness_evals( + column: &SecureColumnByCoords, + query_positions: &[usize], + fold_step: u32, +) -> (Vec, Vec) { + let mut decommitment_positions = Vec::new(); + let mut witness_evals = Vec::new(); + + // Group queries by the folding coset they reside in. + for subset_queries in query_positions.group_by(|a, b| a >> fold_step == b >> fold_step) { + let subset_start = (subset_queries[0] >> fold_step) << fold_step; + let subset_decommitment_positions = subset_start..subset_start + (1 << fold_step); + let mut subset_queries_iter = subset_queries.iter().peekable(); + + for position in subset_decommitment_positions { + // Add decommitment position. + decommitment_positions.push(position); + + // Skip evals the verifier can calculate. + if subset_queries_iter.next_if_eq(&&position).is_some() { + continue; + } -impl SparseCircleEvaluation { - /// # Panics - /// - /// Panics if the evaluation domain sizes don't equal the folding factor. - pub fn new( - subcircle_evals: Vec>, - ) -> Self { - let folding_factor = 1 << CIRCLE_TO_LINE_FOLD_STEP; - assert!(subcircle_evals.iter().all(|e| e.len() == folding_factor)); - Self { subcircle_evals } + let eval = column.at(position); + witness_evals.push(eval); + } } - fn fold(self, alpha: SecureField) -> Vec { - self.subcircle_evals - .into_iter() - .map(|e| { - let buffer_domain = LineDomain::new(e.domain.half_coset); - let mut buffer = LineEvaluation::new_zero(buffer_domain); - fold_circle_into_line( - &mut buffer, - &SecureEvaluation::new(e.domain, e.values.into_iter().collect()), - alpha, - ); - buffer.values.at(0) - }) - .collect() - } + (decommitment_positions, witness_evals) } -impl<'a> IntoIterator for &'a mut SparseCircleEvaluation { - type Item = &'a mut CircleEvaluation; - type IntoIter = - std::slice::IterMut<'a, CircleEvaluation>; +/// Returns a column's merkle tree decommitment positions and re-builds the evaluations needed by +/// the verifier for folding and decommitment. +/// +/// # Panics +/// +/// Panics if the number of queries doesn't match the number of query evals. +fn compute_decommitment_positions_and_rebuild_evals( + queries: &Queries, + query_evals: &[QM31], + mut witness_evals: impl Iterator, + fold_step: u32, +) -> Result<(Vec, SparseEvaluation), InsufficientWitnessError> { + let mut query_evals = query_evals.iter().copied(); + + let mut decommitment_positions = Vec::new(); + let mut subset_evals = Vec::new(); + let mut subset_domain_index_initials = Vec::new(); + + // Group queries by the subset they reside in. + for subset_queries in queries.group_by(|a, b| a >> fold_step == b >> fold_step) { + let subset_start = (subset_queries[0] >> fold_step) << fold_step; + let subset_decommitment_positions = subset_start..subset_start + (1 << fold_step); + decommitment_positions.extend(subset_decommitment_positions.clone()); + + let mut subset_queries_iter = subset_queries.iter().copied().peekable(); + + let subset_eval = subset_decommitment_positions + .map(|position| match subset_queries_iter.next_if_eq(&position) { + Some(_) => Ok(query_evals.next().unwrap()), + None => witness_evals.next().ok_or(InsufficientWitnessError), + }) + .collect::>()?; - fn into_iter(self) -> Self::IntoIter { - self.subcircle_evals.iter_mut() + subset_evals.push(subset_eval); + subset_domain_index_initials.push(bit_reverse_index(subset_start, queries.log_domain_size)); } + + let sparse_evaluation = SparseEvaluation::new(subset_evals, subset_domain_index_initials); + + Ok((decommitment_positions, sparse_evaluation)) } -/// Holds a small foldable subset of univariate SecureField polynomial evaluations. -/// Evaluation is held at the CPU backend. -#[derive(Debug, Clone)] -struct SparseLineEvaluation { - subline_evals: Vec>, +#[derive(Debug)] +struct InsufficientWitnessError; + +/// Foldable subsets of evaluations on a [`CirclePoly`] or [`LinePoly`]. +/// +/// [`CirclePoly`]: crate::core::poly::circle::CirclePoly +struct SparseEvaluation { + // TODO(andrew): Perhaps subset isn't the right word. Coset, Subgroup? + subset_evals: Vec>, + subset_domain_initial_indexes: Vec, } -impl SparseLineEvaluation { +impl SparseEvaluation { /// # Panics /// - /// Panics if the evaluation domain sizes don't equal the folding factor. - fn new(subline_evals: Vec>) -> Self { - let folding_factor = 1 << FOLD_STEP; - assert!(subline_evals.iter().all(|e| e.len() == folding_factor)); - Self { subline_evals } + /// Panics if a subset size doesn't equal `2^FOLD_STEP` or there aren't the same number of + /// domain indexes as subsets. + fn new(subset_evals: Vec>, subset_domain_initial_indexes: Vec) -> Self { + let fold_factor = 1 << FOLD_STEP; + assert!(subset_evals.iter().all(|e| e.len() == fold_factor)); + assert_eq!(subset_evals.len(), subset_domain_initial_indexes.len()); + Self { + subset_evals, + subset_domain_initial_indexes, + } } - fn fold(self, alpha: SecureField) -> Vec { - self.subline_evals - .into_iter() - .map(|e| fold_line(&e, alpha).values.at(0)) + fn fold_line(self, fold_alpha: SecureField, source_domain: LineDomain) -> Vec { + zip(self.subset_evals, self.subset_domain_initial_indexes) + .map(|(eval, domain_initial_index)| { + let fold_domain_initial = source_domain.coset().index_at(domain_initial_index); + let fold_domain = LineDomain::new(Coset::new(fold_domain_initial, FOLD_STEP)); + let eval = LineEvaluation::new(fold_domain, eval.into_iter().collect()); + fold_line(&eval, fold_alpha).values.at(0) + }) + .collect() + } + + fn fold_circle(self, fold_alpha: SecureField, source_domain: CircleDomain) -> Vec { + zip(self.subset_evals, self.subset_domain_initial_indexes) + .map(|(eval, domain_initial_index)| { + let fold_domain_initial = source_domain.index_at(domain_initial_index); + let fold_domain = CircleDomain::new(Coset::new( + fold_domain_initial, + CIRCLE_TO_LINE_FOLD_STEP - 1, + )); + let eval = SecureEvaluation::new(fold_domain, eval.into_iter().collect()); + let mut buffer = LineEvaluation::new_zero(LineDomain::new(fold_domain.half_coset)); + fold_circle_into_line(&mut buffer, &eval, fold_alpha); + buffer.values.at(0) + }) .collect() } } @@ -968,13 +1160,14 @@ pub fn fold_circle_into_line( #[cfg(test)] mod tests { + use std::assert_matches::assert_matches; use std::iter::zip; use itertools::Itertools; use num_traits::{One, Zero}; - use super::{get_opening_positions, FriVerificationError, SparseCircleEvaluation}; - use crate::core::backend::cpu::{CpuCircleEvaluation, CpuCirclePoly}; + use super::FriVerificationError; + use crate::core::backend::cpu::CpuCirclePoly; use crate::core::backend::{ColumnOps, CpuBackend}; use crate::core::circle::{CirclePointIndex, Coset}; use crate::core::fields::m31::BaseField; @@ -986,28 +1179,23 @@ mod tests { }; use crate::core::poly::circle::{CircleDomain, PolyOps, SecureEvaluation}; use crate::core::poly::line::{LineDomain, LineEvaluation, LinePoly}; - use crate::core::poly::{BitReversedOrder, NaturalOrder}; - use crate::core::queries::{Queries, SparseSubCircleDomain}; + use crate::core::poly::BitReversedOrder; + use crate::core::queries::Queries; use crate::core::test_utils::test_channel; - use crate::core::utils::bit_reverse_index; use crate::core::vcs::blake2_merkle::Blake2sMerkleChannel; /// Default blowup factor used for tests. const LOG_BLOWUP_FACTOR: u32 = 2; - type FriProver = super::FriProver; + type FriProver<'a> = super::FriProver<'a, CpuBackend, Blake2sMerkleChannel>; type FriVerifier = super::FriVerifier; #[test] fn fold_line_works() { const DEGREE: usize = 8; // Coefficients are bit-reversed. - let even_coeffs: [SecureField; DEGREE / 2] = [1, 2, 1, 3] - .map(BaseField::from_u32_unchecked) - .map(SecureField::from); - let odd_coeffs: [SecureField; DEGREE / 2] = [3, 5, 4, 1] - .map(BaseField::from_u32_unchecked) - .map(SecureField::from); + let even_coeffs: [SecureField; DEGREE / 2] = [1, 2, 1, 3].map(SecureField::from); + let odd_coeffs: [SecureField; DEGREE / 2] = [3, 5, 4, 1].map(SecureField::from); let poly = LinePoly::new([even_coeffs, odd_coeffs].concat()); let even_poly = LinePoly::new(even_coeffs.to_vec()); let odd_poly = LinePoly::new(odd_coeffs.to_vec()); @@ -1055,48 +1243,38 @@ mod tests { const LOG_EXPECTED_BLOWUP_FACTOR: u32 = LOG_BLOWUP_FACTOR; const LOG_INVALID_BLOWUP_FACTOR: u32 = LOG_BLOWUP_FACTOR - 1; let config = FriConfig::new(2, LOG_EXPECTED_BLOWUP_FACTOR, 3); - let evaluation = polynomial_evaluation(6, LOG_INVALID_BLOWUP_FACTOR); + let column = &[polynomial_evaluation(6, LOG_INVALID_BLOWUP_FACTOR)]; + let twiddles = CpuBackend::precompute_twiddles(column[0].domain.half_coset); - FriProver::commit( - &mut test_channel(), - config, - &[evaluation.clone()], - &CpuBackend::precompute_twiddles(evaluation.domain.half_coset), - ); + FriProver::commit(&mut test_channel(), config, column, &twiddles); } #[test] #[should_panic = "not canonic"] - fn committing_evaluation_from_invalid_domain_fails() { + fn committing_column_from_invalid_domain_fails() { let invalid_domain = CircleDomain::new(Coset::new(CirclePointIndex::generator(), 3)); assert!(!invalid_domain.is_canonic(), "must be an invalid domain"); - let evaluation = SecureEvaluation::new( + let config = FriConfig::new(2, 2, 3); + let column = SecureEvaluation::new( invalid_domain, - vec![SecureField::one(); 1 << 4].into_iter().collect(), + [SecureField::one(); 1 << 4].into_iter().collect(), ); + let twiddles = CpuBackend::precompute_twiddles(column.domain.half_coset); + let columns = &[column]; - FriProver::commit( - &mut test_channel(), - FriConfig::new(2, 2, 3), - &[evaluation.clone()], - &CpuBackend::precompute_twiddles(evaluation.domain.half_coset), - ); + FriProver::commit(&mut test_channel(), config, columns, &twiddles); } #[test] fn valid_proof_passes_verification() -> Result<(), FriVerificationError> { - const LOG_DEGREE: u32 = 3; - let evaluation = polynomial_evaluation(LOG_DEGREE, LOG_BLOWUP_FACTOR); - let log_domain_size = evaluation.domain.log_size(); - let queries = Queries::from_positions(vec![5], log_domain_size); + const LOG_DEGREE: u32 = 4; + let column = polynomial_evaluation(LOG_DEGREE, LOG_BLOWUP_FACTOR); + let twiddles = CpuBackend::precompute_twiddles(column.domain.half_coset); + let queries = Queries::from_positions(vec![5], column.domain.log_size()); let config = FriConfig::new(1, LOG_BLOWUP_FACTOR, queries.len()); - let decommitment_value = query_polynomial(&evaluation, &queries); - let prover = FriProver::commit( - &mut test_channel(), - config, - &[evaluation.clone()], - &CpuBackend::precompute_twiddles(evaluation.domain.half_coset), - ); + let decommitment_value = query_polynomial(&column, &queries); + let columns = &[column]; + let prover = FriProver::commit(&mut test_channel(), config, columns, &twiddles); let proof = prover.decommit_on_queries(&queries); let bound = vec![CirclePolyDegreeBound::new(LOG_DEGREE)]; let verifier = FriVerifier::commit(&mut test_channel(), config, proof, bound).unwrap(); @@ -1109,17 +1287,13 @@ mod tests { { const LOG_DEGREE: u32 = 3; const LAST_LAYER_LOG_BOUND: u32 = 0; - let evaluation = polynomial_evaluation(LOG_DEGREE, LOG_BLOWUP_FACTOR); - let log_domain_size = evaluation.domain.log_size(); - let queries = Queries::from_positions(vec![5], log_domain_size); + let column = polynomial_evaluation(LOG_DEGREE, LOG_BLOWUP_FACTOR); + let twiddles = CpuBackend::precompute_twiddles(column.domain.half_coset); + let queries = Queries::from_positions(vec![5], column.domain.log_size()); let config = FriConfig::new(LAST_LAYER_LOG_BOUND, LOG_BLOWUP_FACTOR, queries.len()); - let decommitment_value = query_polynomial(&evaluation, &queries); - let prover = FriProver::commit( - &mut test_channel(), - config, - &[evaluation.clone()], - &CpuBackend::precompute_twiddles(evaluation.domain.half_coset), - ); + let decommitment_value = query_polynomial(&column, &queries); + let columns = &[column]; + let prover = FriProver::commit(&mut test_channel(), config, columns, &twiddles); let proof = prover.decommit_on_queries(&queries); let bound = vec![CirclePolyDegreeBound::new(LOG_DEGREE)]; let verifier = FriVerifier::commit(&mut test_channel(), config, proof, bound).unwrap(); @@ -1130,62 +1304,56 @@ mod tests { #[test] fn valid_mixed_degree_proof_passes_verification() -> Result<(), FriVerificationError> { const LOG_DEGREES: [u32; 3] = [6, 5, 4]; - let evaluations = LOG_DEGREES.map(|log_d| polynomial_evaluation(log_d, LOG_BLOWUP_FACTOR)); - let log_domain_size = evaluations[0].domain.log_size(); + let columns = LOG_DEGREES.map(|log_d| polynomial_evaluation(log_d, LOG_BLOWUP_FACTOR)); + let twiddles = CpuBackend::precompute_twiddles(columns[0].domain.half_coset); + let log_domain_size = columns[0].domain.log_size(); let queries = Queries::from_positions(vec![7, 70], log_domain_size); let config = FriConfig::new(2, LOG_BLOWUP_FACTOR, queries.len()); - let prover = FriProver::commit( - &mut test_channel(), - config, - &evaluations, - &CpuBackend::precompute_twiddles(evaluations[0].domain.half_coset), - ); - let decommitment_values = evaluations.map(|p| query_polynomial(&p, &queries)).to_vec(); + let prover = FriProver::commit(&mut test_channel(), config, &columns, &twiddles); let proof = prover.decommit_on_queries(&queries); + let query_evals = columns.map(|p| query_polynomial(&p, &queries)).to_vec(); let bounds = LOG_DEGREES.map(CirclePolyDegreeBound::new).to_vec(); let verifier = FriVerifier::commit(&mut test_channel(), config, proof, bounds).unwrap(); - verifier.decommit_on_queries(&queries, decommitment_values) + verifier.decommit_on_queries(&queries, query_evals) } #[test] - fn valid_mixed_degree_end_to_end_proof_passes_verification() -> Result<(), FriVerificationError> - { + fn mixed_degree_proof_with_queries_sampled_from_channel_passes_verification( + ) -> Result<(), FriVerificationError> { const LOG_DEGREES: [u32; 3] = [6, 5, 4]; - let evaluations = LOG_DEGREES.map(|log_d| polynomial_evaluation(log_d, LOG_BLOWUP_FACTOR)); + let columns = LOG_DEGREES.map(|log_d| polynomial_evaluation(log_d, LOG_BLOWUP_FACTOR)); + let twiddles = CpuBackend::precompute_twiddles(columns[0].domain.half_coset); let config = FriConfig::new(2, LOG_BLOWUP_FACTOR, 3); - let prover = FriProver::commit( - &mut test_channel(), - config, - &evaluations, - &CpuBackend::precompute_twiddles(evaluations[0].domain.half_coset), - ); - let (proof, prover_opening_positions) = prover.decommit(&mut test_channel()); - let decommitment_values = zip(&evaluations, prover_opening_positions.values().rev()) - .map(|(poly, positions)| open_polynomial(poly, positions)) - .collect(); + let prover = FriProver::commit(&mut test_channel(), config, &columns, &twiddles); + let (proof, prover_query_positions_by_log_size) = prover.decommit(&mut test_channel()); + let query_evals_by_column = columns.map(|eval| { + let query_positions = &prover_query_positions_by_log_size[&eval.domain.log_size()]; + query_polynomial_at_positions(&eval, query_positions) + }); let bounds = LOG_DEGREES.map(CirclePolyDegreeBound::new).to_vec(); let mut verifier = FriVerifier::commit(&mut test_channel(), config, proof, bounds).unwrap(); - let verifier_opening_positions = verifier.column_query_positions(&mut test_channel()); + let verifier_query_positions_by_log_size = + verifier.sample_query_positions(&mut test_channel()); - assert_eq!(prover_opening_positions, verifier_opening_positions); - verifier.decommit(decommitment_values) + assert_eq!( + prover_query_positions_by_log_size, + verifier_query_positions_by_log_size + ); + verifier.decommit(query_evals_by_column.to_vec()) } #[test] fn proof_with_removed_layer_fails_verification() { const LOG_DEGREE: u32 = 6; let evaluation = polynomial_evaluation(6, LOG_BLOWUP_FACTOR); + let twiddles = CpuBackend::precompute_twiddles(evaluation.domain.half_coset); let log_domain_size = evaluation.domain.log_size(); let queries = Queries::from_positions(vec![1], log_domain_size); let config = FriConfig::new(2, LOG_BLOWUP_FACTOR, queries.len()); - let prover = FriProver::commit( - &mut test_channel(), - config, - &[evaluation.clone()], - &CpuBackend::precompute_twiddles(evaluation.domain.half_coset), - ); + let columns = &[evaluation]; + let prover = FriProver::commit(&mut test_channel(), config, columns, &twiddles); let proof = prover.decommit_on_queries(&queries); let bound = vec![CirclePolyDegreeBound::new(LOG_DEGREE)]; // Set verifier's config to expect one extra layer than prover config. @@ -1204,15 +1372,12 @@ mod tests { fn proof_with_added_layer_fails_verification() { const LOG_DEGREE: u32 = 6; let evaluation = polynomial_evaluation(LOG_DEGREE, LOG_BLOWUP_FACTOR); + let twiddles = CpuBackend::precompute_twiddles(evaluation.domain.half_coset); let log_domain_size = evaluation.domain.log_size(); let queries = Queries::from_positions(vec![1], log_domain_size); let config = FriConfig::new(2, LOG_BLOWUP_FACTOR, queries.len()); - let prover = FriProver::commit( - &mut test_channel(), - config, - &[evaluation.clone()], - &CpuBackend::precompute_twiddles(evaluation.domain.half_coset), - ); + let columns = &[evaluation]; + let prover = FriProver::commit(&mut test_channel(), config, columns, &twiddles); let proof = prover.decommit_on_queries(&queries); let bound = vec![CirclePolyDegreeBound::new(LOG_DEGREE)]; // Set verifier's config to expect one less layer than prover config. @@ -1231,56 +1396,50 @@ mod tests { fn proof_with_invalid_inner_layer_evaluation_fails_verification() { const LOG_DEGREE: u32 = 6; let evaluation = polynomial_evaluation(LOG_DEGREE, LOG_BLOWUP_FACTOR); + let twiddles = CpuBackend::precompute_twiddles(evaluation.domain.half_coset); let log_domain_size = evaluation.domain.log_size(); let queries = Queries::from_positions(vec![5], log_domain_size); let config = FriConfig::new(2, LOG_BLOWUP_FACTOR, queries.len()); let decommitment_value = query_polynomial(&evaluation, &queries); - let prover = FriProver::commit( - &mut test_channel(), - config, - &[evaluation.clone()], - &CpuBackend::precompute_twiddles(evaluation.domain.half_coset), - ); + let columns = &[evaluation]; + let prover = FriProver::commit(&mut test_channel(), config, columns, &twiddles); let bound = vec![CirclePolyDegreeBound::new(LOG_DEGREE)]; let mut proof = prover.decommit_on_queries(&queries); // Remove an evaluation from the second layer's proof. - proof.inner_layers[1].evals_subset.pop(); + proof.inner_layers[1].fri_witness.pop(); let verifier = FriVerifier::commit(&mut test_channel(), config, proof, bound).unwrap(); let verification_result = verifier.decommit_on_queries(&queries, vec![decommitment_value]); - assert!(matches!( + assert_matches!( verification_result, - Err(FriVerificationError::InnerLayerEvaluationsInvalid { layer: 1 }) - )); + Err(FriVerificationError::InnerLayerEvaluationsInvalid { inner_layer: 1 }) + ); } #[test] fn proof_with_invalid_inner_layer_decommitment_fails_verification() { const LOG_DEGREE: u32 = 6; let evaluation = polynomial_evaluation(LOG_DEGREE, LOG_BLOWUP_FACTOR); + let twiddles = CpuBackend::precompute_twiddles(evaluation.domain.half_coset); let log_domain_size = evaluation.domain.log_size(); let queries = Queries::from_positions(vec![5], log_domain_size); let config = FriConfig::new(2, LOG_BLOWUP_FACTOR, queries.len()); let decommitment_value = query_polynomial(&evaluation, &queries); - let prover = FriProver::commit( - &mut test_channel(), - config, - &[evaluation.clone()], - &CpuBackend::precompute_twiddles(evaluation.domain.half_coset), - ); + let columns = &[evaluation]; + let prover = FriProver::commit(&mut test_channel(), config, columns, &twiddles); let bound = vec![CirclePolyDegreeBound::new(LOG_DEGREE)]; let mut proof = prover.decommit_on_queries(&queries); // Modify the committed values in the second layer. - proof.inner_layers[1].evals_subset[0] += BaseField::one(); + proof.inner_layers[1].fri_witness[0] += BaseField::one(); let verifier = FriVerifier::commit(&mut test_channel(), config, proof, bound).unwrap(); let verification_result = verifier.decommit_on_queries(&queries, vec![decommitment_value]); - assert!(matches!( + assert_matches!( verification_result, - Err(FriVerificationError::InnerLayerCommitmentInvalid { layer: 1, .. }) - )); + Err(FriVerificationError::InnerLayerCommitmentInvalid { inner_layer: 1, .. }) + ); } #[test] @@ -1288,15 +1447,12 @@ mod tests { const LOG_DEGREE: u32 = 6; const LOG_MAX_LAST_LAYER_DEGREE: u32 = 2; let evaluation = polynomial_evaluation(LOG_DEGREE, LOG_BLOWUP_FACTOR); + let twiddles = CpuBackend::precompute_twiddles(evaluation.domain.half_coset); let log_domain_size = evaluation.domain.log_size(); let queries = Queries::from_positions(vec![1, 7, 8], log_domain_size); let config = FriConfig::new(LOG_MAX_LAST_LAYER_DEGREE, LOG_BLOWUP_FACTOR, queries.len()); - let prover = FriProver::commit( - &mut test_channel(), - config, - &[evaluation.clone()], - &CpuBackend::precompute_twiddles(evaluation.domain.half_coset), - ); + let columns = &[evaluation]; + let prover = FriProver::commit(&mut test_channel(), config, columns, &twiddles); let bound = vec![CirclePolyDegreeBound::new(LOG_DEGREE)]; let mut proof = prover.decommit_on_queries(&queries); let bad_last_layer_coeffs = vec![One::one(); 1 << (LOG_MAX_LAST_LAYER_DEGREE + 1)]; @@ -1314,16 +1470,13 @@ mod tests { fn proof_with_invalid_last_layer_fails_verification() { const LOG_DEGREE: u32 = 6; let evaluation = polynomial_evaluation(LOG_DEGREE, LOG_BLOWUP_FACTOR); + let twiddles = CpuBackend::precompute_twiddles(evaluation.domain.half_coset); let log_domain_size = evaluation.domain.log_size(); let queries = Queries::from_positions(vec![1, 7, 8], log_domain_size); let config = FriConfig::new(2, LOG_BLOWUP_FACTOR, queries.len()); let decommitment_value = query_polynomial(&evaluation, &queries); - let prover = FriProver::commit( - &mut test_channel(), - config, - &[evaluation.clone()], - &CpuBackend::precompute_twiddles(evaluation.domain.half_coset), - ); + let columns = &[evaluation]; + let prover = FriProver::commit(&mut test_channel(), config, columns, &twiddles); let bound = vec![CirclePolyDegreeBound::new(LOG_DEGREE)]; let mut proof = prover.decommit_on_queries(&queries); // Compromise the last layer polynomial's first coefficient. @@ -1332,10 +1485,10 @@ mod tests { let verification_result = verifier.decommit_on_queries(&queries, vec![decommitment_value]); - assert!(matches!( + assert_matches!( verification_result, Err(FriVerificationError::LastLayerEvaluationsInvalid) - )); + ); } #[test] @@ -1343,16 +1496,13 @@ mod tests { fn decommit_queries_on_invalid_domain_fails_verification() { const LOG_DEGREE: u32 = 3; let evaluation = polynomial_evaluation(LOG_DEGREE, LOG_BLOWUP_FACTOR); + let twiddles = CpuBackend::precompute_twiddles(evaluation.domain.half_coset); let log_domain_size = evaluation.domain.log_size(); let queries = Queries::from_positions(vec![5], log_domain_size); let config = FriConfig::new(1, LOG_BLOWUP_FACTOR, queries.len()); let decommitment_value = query_polynomial(&evaluation, &queries); - let prover = FriProver::commit( - &mut test_channel(), - config, - &[evaluation.clone()], - &CpuBackend::precompute_twiddles(evaluation.domain.half_coset), - ); + let columns = &[evaluation]; + let prover = FriProver::commit(&mut test_channel(), config, columns, &twiddles); let proof = prover.decommit_on_queries(&queries); let bound = vec![CirclePolyDegreeBound::new(LOG_DEGREE)]; let verifier = FriVerifier::commit(&mut test_channel(), config, proof, bound).unwrap(); @@ -1384,40 +1534,18 @@ mod tests { (degree + 1).ilog2() } - // TODO: Remove after SubcircleDomain integration. fn query_polynomial( polynomial: &SecureEvaluation, queries: &Queries, - ) -> SparseCircleEvaluation { - let polynomial_log_size = polynomial.domain.log_size(); - let positions = - get_opening_positions(queries, &[queries.log_domain_size, polynomial_log_size]); - open_polynomial(polynomial, &positions[&polynomial_log_size]) + ) -> Vec { + let queries = queries.fold(queries.log_domain_size - polynomial.domain.log_size()); + query_polynomial_at_positions(polynomial, &queries.positions) } - fn open_polynomial( + fn query_polynomial_at_positions( polynomial: &SecureEvaluation, - positions: &SparseSubCircleDomain, - ) -> SparseCircleEvaluation { - let coset_evals = positions - .iter() - .map(|position| { - let coset_domain = position.to_circle_domain(&polynomial.domain); - let evals = coset_domain - .iter_indices() - .map(|p| { - polynomial.at(bit_reverse_index( - polynomial.domain.find(p).unwrap(), - polynomial.domain.log_size(), - )) - }) - .collect(); - let coset_eval = - CpuCircleEvaluation::::new(coset_domain, evals); - coset_eval.bit_reverse() - }) - .collect(); - - SparseCircleEvaluation::new(coset_evals) + query_positions: &[usize], + ) -> Vec { + query_positions.iter().map(|p| polynomial.at(*p)).collect() } } diff --git a/crates/prover/src/core/pcs/prover.rs b/crates/prover/src/core/pcs/prover.rs index ed2f67376..ef27f706d 100644 --- a/crates/prover/src/core/pcs/prover.rs +++ b/crates/prover/src/core/pcs/prover.rs @@ -125,16 +125,13 @@ impl<'a, B: BackendForChannel, MC: MerkleChannel> CommitmentSchemeProver<'a, channel.mix_u64(proof_of_work); // FRI decommitment phase. - let (fri_proof, fri_query_domains) = fri_prover.decommit(channel); + let (fri_proof, query_positions_per_log_size) = fri_prover.decommit(channel); // Decommit the FRI queries on the merkle trees. - let decommitment_results = self.trees.as_ref().map(|tree| { - let queries = fri_query_domains - .iter() - .map(|(&log_size, domain)| (log_size, domain.flatten())) - .collect(); - tree.decommit(&queries) - }); + let decommitment_results = self + .trees + .as_ref() + .map(|tree| tree.decommit(&query_positions_per_log_size)); let queried_values = decommitment_results.as_ref().map(|(v, _)| v.clone()); let decommitments = decommitment_results.map(|(_, d)| d); diff --git a/crates/prover/src/core/pcs/quotients.rs b/crates/prover/src/core/pcs/quotients.rs index 1a41e8303..aca0901c6 100644 --- a/crates/prover/src/core/pcs/quotients.rs +++ b/crates/prover/src/core/pcs/quotients.rs @@ -9,14 +9,13 @@ use crate::core::backend::cpu::quotients::{accumulate_row_quotients, quotient_co use crate::core::circle::CirclePoint; use crate::core::fields::m31::BaseField; use crate::core::fields::qm31::SecureField; -use crate::core::fri::SparseCircleEvaluation; use crate::core::poly::circle::{ CanonicCoset, CircleDomain, CircleEvaluation, PolyOps, SecureEvaluation, }; use crate::core::poly::BitReversedOrder; use crate::core::prover::VerificationError; -use crate::core::queries::SparseSubCircleDomain; use crate::core::utils::bit_reverse_index; +use crate::core::ColumnVec; pub trait QuotientOps: PolyOps { /// Accumulates the quotients of the columns at the given domain. @@ -104,22 +103,22 @@ pub fn fri_answers( column_log_sizes: Vec, samples: &[Vec], random_coeff: SecureField, - query_domain_per_log_size: BTreeMap, + query_positions_per_log_size: &BTreeMap>, queried_values_per_column: &[Vec], -) -> Result, VerificationError> { +) -> Result>, VerificationError> { izip!(column_log_sizes, samples, queried_values_per_column) .sorted_by_key(|(log_size, ..)| Reverse(*log_size)) .group_by(|(log_size, ..)| *log_size) .into_iter() .map(|(log_size, tuples)| { - let (_, samples, queried_valued_per_column): (Vec<_>, Vec<_>, Vec<_>) = + let (_, samples, queried_values_per_column): (Vec<_>, Vec<_>, Vec<_>) = multiunzip(tuples); fri_answers_for_log_size( log_size, &samples, random_coeff, - &query_domain_per_log_size[&log_size], - &queried_valued_per_column, + &query_positions_per_log_size[&log_size], + &queried_values_per_column, ) }) .collect() @@ -129,59 +128,37 @@ pub fn fri_answers_for_log_size( log_size: u32, samples: &[&Vec], random_coeff: SecureField, - query_domain: &SparseSubCircleDomain, + query_positions: &[usize], queried_values_per_column: &[&Vec], -) -> Result { - let commitment_domain = CanonicCoset::new(log_size).circle_domain(); - let sample_batches = ColumnSampleBatch::new_vec(samples); +) -> Result, VerificationError> { for queried_values in queried_values_per_column { - if queried_values.len() != query_domain.flatten().len() { + if queried_values.len() != query_positions.len() { return Err(VerificationError::InvalidStructure( "Insufficient number of queried values".to_string(), )); } } - let mut queried_values_per_column = queried_values_per_column - .iter() - .map(|q| q.iter()) - .collect_vec(); - - let mut evals = Vec::new(); - for subdomain in query_domain.iter() { - let domain = subdomain.to_circle_domain(&commitment_domain); - let quotient_constants = quotient_constants(&sample_batches, random_coeff, domain); - let mut column_evals = Vec::new(); - for queried_values in queried_values_per_column.iter_mut() { - let eval = CircleEvaluation::new( - domain, - queried_values.take(domain.size()).copied().collect_vec(), - ); - column_evals.push(eval); - } - let mut values = Vec::new(); - for row in 0..domain.size() { - let domain_point = domain.at(bit_reverse_index(row, log_size)); - let value = accumulate_row_quotients( - &sample_batches, - &column_evals.iter().collect_vec(), - "ient_constants, - row, - domain_point, - ); - values.push(value); - } - let eval = CircleEvaluation::new(domain, values); - evals.push(eval); - } + let sample_batches = ColumnSampleBatch::new_vec(samples); + let quotient_constants = quotient_constants(&sample_batches, random_coeff); + let commitment_domain = CanonicCoset::new(log_size).circle_domain(); + let mut quotient_evals_at_queries = Vec::new(); - let res = SparseCircleEvaluation::new(evals); - if !queried_values_per_column.iter().all(|x| x.is_empty()) { - return Err(VerificationError::InvalidStructure( - "Too many queried values".to_string(), + for (row, &query_position) in query_positions.iter().enumerate() { + let domain_point = commitment_domain.at(bit_reverse_index(query_position, log_size)); + let queried_values_at_row = queried_values_per_column + .iter() + .map(|col| col[row]) + .collect_vec(); + quotient_evals_at_queries.push(accumulate_row_quotients( + &sample_batches, + &queried_values_at_row, + "ient_constants, + domain_point, )); } - Ok(res) + + Ok(quotient_evals_at_queries) } #[cfg(test)] diff --git a/crates/prover/src/core/pcs/verifier.rs b/crates/prover/src/core/pcs/verifier.rs index 7f740aaac..200fe98d5 100644 --- a/crates/prover/src/core/pcs/verifier.rs +++ b/crates/prover/src/core/pcs/verifier.rs @@ -83,8 +83,8 @@ impl CommitmentSchemeVerifier { return Err(VerificationError::ProofOfWork); } - // Get FRI query domains. - let fri_query_domains = fri_verifier.column_query_positions(channel); + // Get FRI query positions. + let query_positions_per_log_size = fri_verifier.sample_query_positions(channel); // Verify merkle decommitments. self.trees @@ -92,11 +92,7 @@ impl CommitmentSchemeVerifier { .zip_eq(proof.decommitments) .zip_eq(proof.queried_values.clone()) .map(|((tree, decommitment), queried_values)| { - let queries = fri_query_domains - .iter() - .map(|(&log_size, domain)| (log_size, domain.flatten())) - .collect(); - tree.verify(queries, queried_values, decommitment) + tree.verify(&query_positions_per_log_size, queried_values, decommitment) }) .0 .into_iter() @@ -116,11 +112,12 @@ impl CommitmentSchemeVerifier { self.column_log_sizes().flatten().into_iter().collect(), &samples, random_coeff, - fri_query_domains, + &query_positions_per_log_size, &proof.queried_values.flatten(), )?; fri_verifier.decommit(fri_answers)?; + Ok(()) } } diff --git a/crates/prover/src/core/poly/line.rs b/crates/prover/src/core/poly/line.rs index 62b24ac97..2bf640c63 100644 --- a/crates/prover/src/core/poly/line.rs +++ b/crates/prover/src/core/poly/line.rs @@ -110,9 +110,10 @@ type LineDomainIterator = /// A univariate polynomial defined on a [LineDomain]. #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize)] pub struct LinePoly { - /// Coefficients of the polynomial in [line_ifft] algorithm's basis. + /// Coefficients of the polynomial in [`line_ifft`] algorithm's basis. /// /// The coefficients are stored in bit-reversed order. + #[allow(rustdoc::private_intra_doc_links)] coeffs: Vec, /// The number of coefficients stored as `log2(len(coeffs))`. log_size: u32, diff --git a/crates/prover/src/core/prover/mod.rs b/crates/prover/src/core/prover/mod.rs index 30e7cbd54..20e70c533 100644 --- a/crates/prover/src/core/prover/mod.rs +++ b/crates/prover/src/core/prover/mod.rs @@ -201,6 +201,7 @@ impl StarkProof { } = commitment_scheme_proof; let FriProof { + first_layer, inner_layers, last_layer_poly, } = fri_proof; @@ -209,20 +210,24 @@ impl StarkProof { let mut inner_layers_hashes_size = 0; for FriLayerProof { - evals_subset, + fri_witness, decommitment, commitment, } in inner_layers { - inner_layers_samples_size += evals_subset.size_estimate(); + inner_layers_samples_size += fri_witness.size_estimate(); inner_layers_hashes_size += decommitment.size_estimate() + commitment.size_estimate(); } StarkProofSizeBreakdown { oods_samples: sampled_values.size_estimate(), queries_values: queried_values.size_estimate(), - fri_samples: last_layer_poly.size_estimate() + inner_layers_samples_size, - fri_decommitments: inner_layers_hashes_size, + fri_samples: last_layer_poly.size_estimate() + + inner_layers_samples_size + + first_layer.fri_witness.size_estimate(), + fri_decommitments: inner_layers_hashes_size + + first_layer.decommitment.size_estimate() + + first_layer.commitment.size_estimate(), trace_decommitments: commitments.size_estimate() + decommitments.size_estimate(), } } @@ -237,6 +242,7 @@ impl Deref for StarkProof { } /// Size estimate (in bytes) for different parts of the proof. +#[derive(Debug)] pub struct StarkProofSizeBreakdown { pub oods_samples: usize, pub queries_values: usize, @@ -292,21 +298,22 @@ impl SizeEstimate for MerkleDecommitment { impl SizeEstimate for FriLayerProof { fn size_estimate(&self) -> usize { let Self { - evals_subset, + fri_witness, decommitment, commitment, } = self; - evals_subset.size_estimate() + decommitment.size_estimate() + commitment.size_estimate() + fri_witness.size_estimate() + decommitment.size_estimate() + commitment.size_estimate() } } impl SizeEstimate for FriProof { fn size_estimate(&self) -> usize { let Self { + first_layer, inner_layers, last_layer_poly, } = self; - inner_layers.size_estimate() + last_layer_poly.size_estimate() + first_layer.size_estimate() + inner_layers.size_estimate() + last_layer_poly.size_estimate() } } diff --git a/crates/prover/src/core/queries.rs b/crates/prover/src/core/queries.rs index 934edfd75..cd9546e6a 100644 --- a/crates/prover/src/core/queries.rs +++ b/crates/prover/src/core/queries.rs @@ -4,16 +4,15 @@ use std::ops::Deref; use itertools::Itertools; use super::channel::Channel; -use super::circle::Coset; -use super::poly::circle::CircleDomain; -use super::utils::bit_reverse_index; pub const UPPER_BOUND_QUERY_BYTES: usize = 4; -/// An ordered set of query indices over a bit reversed [CircleDomain]. +/// An ordered set of query positions. #[derive(Debug, Clone)] pub struct Queries { + /// Query positions sorted in ascending order. pub positions: Vec, + /// Size of the domain from which the queries were sampled. pub log_domain_size: u32, } @@ -40,17 +39,6 @@ impl Queries { } } - // TODO docs - #[allow(clippy::missing_safety_doc)] - pub fn from_positions(positions: Vec, log_domain_size: u32) -> Self { - assert!(positions.is_sorted()); - assert!(positions.iter().all(|p| *p < (1 << log_domain_size))); - Self { - positions, - log_domain_size, - } - } - /// Calculates the matching query indices in a folded domain (i.e each domain point is doubled) /// given `self` (the queries of the original domain) and the number of folds between domains. pub fn fold(&self, n_folds: u32) -> Self { @@ -61,18 +49,13 @@ impl Queries { } } - pub fn opening_positions(&self, fri_step_size: u32) -> SparseSubCircleDomain { - assert!(fri_step_size > 0); - SparseSubCircleDomain { - domains: self - .iter() - .map(|q| SubCircleDomain { - coset_index: q >> fri_step_size, - log_size: fri_step_size, - }) - .dedup() - .collect(), - large_domain_log_size: self.log_domain_size, + #[cfg(test)] + pub fn from_positions(positions: Vec, log_domain_size: u32) -> Self { + assert!(positions.is_sorted()); + assert!(positions.iter().all(|p| *p < (1 << log_domain_size))); + Self { + positions, + log_domain_size, } } } @@ -85,51 +68,6 @@ impl Deref for Queries { } } -#[derive(Debug, Eq, PartialEq)] -pub struct SparseSubCircleDomain { - pub domains: Vec, - pub large_domain_log_size: u32, -} - -impl SparseSubCircleDomain { - pub fn flatten(&self) -> Vec { - self.iter() - .flat_map(|sub_circle_domain| sub_circle_domain.to_decommitment_positions()) - .collect() - } -} - -impl Deref for SparseSubCircleDomain { - type Target = Vec; - - fn deref(&self) -> &Self::Target { - &self.domains - } -} - -/// Represents a circle domain relative to a larger circle domain. The `initial_index` is the bit -/// reversed query index in the larger domain. -#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)] -pub struct SubCircleDomain { - pub coset_index: usize, - pub log_size: u32, -} - -impl SubCircleDomain { - /// Calculates the decommitment positions needed for each query given the fri step size. - pub fn to_decommitment_positions(&self) -> Vec { - (self.coset_index << self.log_size..(self.coset_index + 1) << self.log_size).collect() - } - - /// Returns the represented [CircleDomain]. - pub fn to_circle_domain(&self, query_domain: &CircleDomain) -> CircleDomain { - let query = bit_reverse_index(self.coset_index << self.log_size, query_domain.log_size()); - let initial_index = query_domain.index_at(query); - let half_coset = Coset::new(initial_index, self.log_size - 1); - CircleDomain::new(half_coset) - } -} - #[cfg(test)] mod tests { use crate::core::channel::Blake2sChannel; @@ -183,55 +121,4 @@ mod tests { ); } } - - #[test] - pub fn test_conjugate_queries() { - let channel = &mut Blake2sChannel::default(); - let log_domain_size = 7; - let domain = CanonicCoset::new(log_domain_size).circle_domain(); - let mut values = domain.iter().collect::>(); - bit_reverse(&mut values); - - // Test random queries one by one because the conjugate queries are sorted. - for _ in 0..100 { - let query = Queries::generate(channel, log_domain_size, 1); - let conjugate_query = query[0] ^ 1; - let query_and_conjugate = query.opening_positions(1).flatten(); - let mut expected_query_and_conjugate = vec![query[0], conjugate_query]; - expected_query_and_conjugate.sort(); - assert_eq!(query_and_conjugate, expected_query_and_conjugate); - assert_eq!(values[query[0]], values[conjugate_query].conjugate()); - } - } - - #[test] - pub fn test_decommitment_positions() { - let channel = &mut Blake2sChannel::default(); - let log_domain_size = 31; - let n_queries = 100; - let fri_step_size = 3; - - let queries = Queries::generate(channel, log_domain_size, n_queries); - let queries_with_added_positions = queries.opening_positions(fri_step_size).flatten(); - - assert!(queries_with_added_positions.is_sorted()); - assert_eq!( - queries_with_added_positions.len(), - n_queries * (1 << fri_step_size) - ); - } - - #[test] - pub fn test_dedup_decommitment_positions() { - let log_domain_size = 7; - - // Generate all possible queries. - let queries = Queries { - positions: (0..1 << log_domain_size).collect(), - log_domain_size, - }; - let queries_with_conjugates = queries.opening_positions(log_domain_size - 2).flatten(); - - assert_eq!(*queries, *queries_with_conjugates); - } } diff --git a/crates/prover/src/core/vcs/blake2_merkle.rs b/crates/prover/src/core/vcs/blake2_merkle.rs index 293ed4ab3..8401716fa 100644 --- a/crates/prover/src/core/vcs/blake2_merkle.rs +++ b/crates/prover/src/core/vcs/blake2_merkle.rs @@ -69,7 +69,7 @@ mod tests { fn test_merkle_success() { let (queries, decommitment, values, verifier) = prepare_merkle::(); - verifier.verify(queries, values, decommitment).unwrap(); + verifier.verify(&queries, values, decommitment).unwrap(); } #[test] @@ -78,7 +78,7 @@ mod tests { decommitment.hash_witness[4] = Blake2sHash::default(); assert_eq!( - verifier.verify(queries, values, decommitment).unwrap_err(), + verifier.verify(&queries, values, decommitment).unwrap_err(), MerkleVerificationError::RootMismatch ); } @@ -89,7 +89,7 @@ mod tests { values[3][2] = BaseField::zero(); assert_eq!( - verifier.verify(queries, values, decommitment).unwrap_err(), + verifier.verify(&queries, values, decommitment).unwrap_err(), MerkleVerificationError::RootMismatch ); } @@ -100,7 +100,7 @@ mod tests { decommitment.hash_witness.pop(); assert_eq!( - verifier.verify(queries, values, decommitment).unwrap_err(), + verifier.verify(&queries, values, decommitment).unwrap_err(), MerkleVerificationError::WitnessTooShort ); } @@ -111,7 +111,7 @@ mod tests { decommitment.hash_witness.push(Blake2sHash::default()); assert_eq!( - verifier.verify(queries, values, decommitment).unwrap_err(), + verifier.verify(&queries, values, decommitment).unwrap_err(), MerkleVerificationError::WitnessTooLong ); } @@ -122,7 +122,7 @@ mod tests { values[3].push(BaseField::zero()); assert_eq!( - verifier.verify(queries, values, decommitment).unwrap_err(), + verifier.verify(&queries, values, decommitment).unwrap_err(), MerkleVerificationError::ColumnValuesTooLong ); } @@ -133,7 +133,7 @@ mod tests { values[3].pop(); assert_eq!( - verifier.verify(queries, values, decommitment).unwrap_err(), + verifier.verify(&queries, values, decommitment).unwrap_err(), MerkleVerificationError::ColumnValuesTooShort ); } diff --git a/crates/prover/src/core/vcs/poseidon252_merkle.rs b/crates/prover/src/core/vcs/poseidon252_merkle.rs index 75632876a..5ffba1ea6 100644 --- a/crates/prover/src/core/vcs/poseidon252_merkle.rs +++ b/crates/prover/src/core/vcs/poseidon252_merkle.rs @@ -95,7 +95,7 @@ mod tests { #[test] fn test_merkle_success() { let (queries, decommitment, values, verifier) = prepare_merkle::(); - verifier.verify(queries, values, decommitment).unwrap(); + verifier.verify(&queries, values, decommitment).unwrap(); } #[test] @@ -105,7 +105,7 @@ mod tests { decommitment.hash_witness[4] = FieldElement252::default(); assert_eq!( - verifier.verify(queries, values, decommitment).unwrap_err(), + verifier.verify(&queries, values, decommitment).unwrap_err(), MerkleVerificationError::RootMismatch ); } @@ -117,7 +117,7 @@ mod tests { values[3][2] = BaseField::zero(); assert_eq!( - verifier.verify(queries, values, decommitment).unwrap_err(), + verifier.verify(&queries, values, decommitment).unwrap_err(), MerkleVerificationError::RootMismatch ); } @@ -129,7 +129,7 @@ mod tests { decommitment.hash_witness.pop(); assert_eq!( - verifier.verify(queries, values, decommitment).unwrap_err(), + verifier.verify(&queries, values, decommitment).unwrap_err(), MerkleVerificationError::WitnessTooShort ); } @@ -141,7 +141,7 @@ mod tests { decommitment.hash_witness.push(FieldElement252::default()); assert_eq!( - verifier.verify(queries, values, decommitment).unwrap_err(), + verifier.verify(&queries, values, decommitment).unwrap_err(), MerkleVerificationError::WitnessTooLong ); } @@ -153,7 +153,7 @@ mod tests { values[3].push(BaseField::zero()); assert_eq!( - verifier.verify(queries, values, decommitment).unwrap_err(), + verifier.verify(&queries, values, decommitment).unwrap_err(), MerkleVerificationError::ColumnValuesTooLong ); } @@ -165,7 +165,7 @@ mod tests { values[3].pop(); assert_eq!( - verifier.verify(queries, values, decommitment).unwrap_err(), + verifier.verify(&queries, values, decommitment).unwrap_err(), MerkleVerificationError::ColumnValuesTooShort ); } diff --git a/crates/prover/src/core/vcs/verifier.rs b/crates/prover/src/core/vcs/verifier.rs index 372631e8b..163fed2f1 100644 --- a/crates/prover/src/core/vcs/verifier.rs +++ b/crates/prover/src/core/vcs/verifier.rs @@ -52,7 +52,7 @@ impl MerkleVerifier { /// Returns `Ok(())` if the decommitment is successfully verified. pub fn verify( &self, - queries_per_log_size: BTreeMap>, + queries_per_log_size: &BTreeMap>, queried_values: ColumnVec>, decommitment: MerkleDecommitment, ) -> Result<(), MerkleVerificationError> {