Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Allow duplicate query positions #224

Merged
merged 1 commit into from
Oct 23, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
* Added support for proven security estimation in `no_std` context (#218).
* [BREAKING] refactored `verify()` function to take `AcceptableOptions` as a parameter (#219).
* Increased min version of `rustc` to 1.73 (#221).
* Allowed duplicate query positions (#224).

## 0.6.5 (2023-08-09) - math crate only
* Added conditional support for serde on field elements (#209)
Expand Down
8 changes: 8 additions & 0 deletions air/src/proof/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,9 @@ const MAX_PROXIMITY_PARAMETER: u64 = 1000;
pub struct StarkProof {
/// Basic metadata about the execution of the computation described by this proof.
pub context: Context,
/// Number of unique queries made by the verifier. This will be different from the
/// context.options.num_queries if the same position in the domain was queried more than once.
pub num_unique_queries: u8,
/// Commitments made by the prover during the commit phase of the protocol.
pub commitments: Commitments,
/// Decommitments of extended execution trace values (for all trace segments) at position
Expand Down Expand Up @@ -129,6 +132,7 @@ impl StarkProof {
pub fn to_bytes(&self) -> Vec<u8> {
let mut result = Vec::new();
self.context.write_into(&mut result);
result.push(self.num_unique_queries);
self.commitments.write_into(&mut result);
self.trace_queries.write_into(&mut result);
self.constraint_queries.write_into(&mut result);
Expand All @@ -148,6 +152,9 @@ impl StarkProof {
// parse the context
let context = Context::read_from(&mut source)?;

// parse the number of unique queries made by the verifier
let num_unique_queries = source.read_u8()?;

// parse the commitments
let commitments = Commitments::read_from(&mut source)?;

Expand All @@ -161,6 +168,7 @@ impl StarkProof {
// parse the rest of the proof
let proof = StarkProof {
context,
num_unique_queries,
commitments,
trace_queries,
constraint_queries: Queries::read_from(&mut source)?,
Expand Down
11 changes: 3 additions & 8 deletions crypto/src/random/default.rs
Original file line number Diff line number Diff line change
Expand Up @@ -153,12 +153,12 @@ impl<B: StarkField, H: ElementHasher<BaseField = B>> RandomCoin for DefaultRando
Err(RandomCoinError::FailedToDrawFieldElement(1000))
}

/// Returns a vector of unique integers selected from the range [0, domain_size) after reseeding
/// Returns a vector of integers selected from the range [0, domain_size) after reseeding
/// the PRNG with the specified `nonce` by setting the new seed to hash(`seed` || `nonce`).
///
/// # Errors
/// Returns an error if the specified number of unique integers could not be generated
/// after 1000 calls to the PRNG.
/// Returns an error if the specified number of integers could not be generated after 1000
/// calls to the PRNG.
///
/// # Panics
/// Panics if:
Expand All @@ -182,10 +182,8 @@ impl<B: StarkField, H: ElementHasher<BaseField = B>> RandomCoin for DefaultRando
///
/// assert_eq!(num_values, values.len());
///
/// let mut value_set = HashSet::new();
/// for value in values {
/// assert!(value < domain_size);
/// assert!(value_set.insert(value));
/// }
/// ```
fn draw_integers(
Expand Down Expand Up @@ -214,9 +212,6 @@ impl<B: StarkField, H: ElementHasher<BaseField = B>> RandomCoin for DefaultRando
// into the specified domain
let value = (u64::from_le_bytes(bytes) & v_mask) as usize;

if values.contains(&value) {
continue;
}
values.push(value);
if values.len() == num_values {
break;
Expand Down
6 changes: 3 additions & 3 deletions crypto/src/random/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,12 +47,12 @@ pub trait RandomCoin: Sync {
/// PRNG.
fn draw<E: FieldElement<BaseField = Self::BaseField>>(&mut self) -> Result<E, RandomCoinError>;

/// Returns a vector of unique integers selected from the range [0, domain_size) after it reseeds
/// Returns a vector of integers selected from the range [0, domain_size) after it reseeds
/// the coin with a nonce.
///
/// # Errors
/// Returns an error if the specified number of unique integers could not be generated
/// after 1000 calls to the PRNG.
/// Returns an error if the specified number of integers could not be generated after 1000
/// calls to the PRNG.
///
/// # Panics
/// Panics if:
Expand Down
18 changes: 15 additions & 3 deletions prover/src/channel.rs
Original file line number Diff line number Diff line change
Expand Up @@ -136,13 +136,21 @@ where
/// Returns a set of positions in the LDE domain against which the evaluations of trace and
/// constraint composition polynomials should be queried.
///
/// The positions are drawn from the public coin uniformly at random.
/// The positions are drawn from the public coin uniformly at random. Duplicate positions
/// are removed from the returned vector.
pub fn get_query_positions(&mut self) -> Vec<usize> {
let num_queries = self.context.options().num_queries();
let lde_domain_size = self.context.lde_domain_size();
self.public_coin
let mut positions = self
.public_coin
.draw_integers(num_queries, lde_domain_size, self.pow_nonce)
.expect("failed to draw query position")
.expect("failed to draw query position");

// remove any duplicate positions from the list
positions.sort_unstable();
positions.dedup();

positions
}

/// Determines a nonce, which when hashed with the current seed of the public coin results
Expand Down Expand Up @@ -174,7 +182,10 @@ where
trace_queries: Vec<Queries>,
constraint_queries: Queries,
fri_proof: FriProof,
num_query_positions: usize,
) -> StarkProof {
assert!(num_query_positions <= u8::MAX as usize, "num_query_positions too big");

StarkProof {
context: self.context,
commitments: self.commitments,
Expand All @@ -183,6 +194,7 @@ where
constraint_queries,
fri_proof,
pow_nonce: self.pow_nonce,
num_unique_queries: num_query_positions as u8,
}
}
}
Expand Down
9 changes: 7 additions & 2 deletions prover/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -439,7 +439,7 @@ pub trait Prover {
let query_positions = channel.get_query_positions();
#[cfg(feature = "std")]
debug!(
"Determined {} query positions in {} ms",
"Determined {} unique query positions in {} ms",
query_positions.len(),
now.elapsed().as_millis()
);
Expand All @@ -461,7 +461,12 @@ pub trait Prover {
let constraint_queries = constraint_commitment.query(&query_positions);

// build the proof object
let proof = channel.build_proof(trace_queries, constraint_queries, fri_proof);
let proof = channel.build_proof(
trace_queries,
constraint_queries,
fri_proof,
query_positions.len(),
);
#[cfg(feature = "std")]
debug!("Built proof object in {} ms", now.elapsed().as_millis());

Expand Down
11 changes: 6 additions & 5 deletions verifier/src/channel.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ impl<E: FieldElement, H: ElementHasher<BaseField = E::BaseField>> VerifierChanne
) -> Result<Self, VerifierError> {
let StarkProof {
context,
num_unique_queries,
commitments,
trace_queries,
constraint_queries,
Expand All @@ -77,8 +78,9 @@ impl<E: FieldElement, H: ElementHasher<BaseField = E::BaseField>> VerifierChanne
.map_err(|err| VerifierError::ProofDeserializationError(err.to_string()))?;

// --- parse trace and constraint queries -------------------------------------------------
let trace_queries = TraceQueries::new(trace_queries, air)?;
let constraint_queries = ConstraintQueries::new(constraint_queries, air)?;
let trace_queries = TraceQueries::new(trace_queries, air, num_unique_queries as usize)?;
let constraint_queries =
ConstraintQueries::new(constraint_queries, air, num_unique_queries as usize)?;

// --- parse FRI proofs -------------------------------------------------------------------
let fri_num_partitions = fri_proof.num_partitions();
Expand Down Expand Up @@ -242,6 +244,7 @@ impl<E: FieldElement, H: ElementHasher<BaseField = E::BaseField>> TraceQueries<E
pub fn new<A: Air<BaseField = E::BaseField>>(
mut queries: Vec<Queries>,
air: &A,
num_queries: usize,
) -> Result<Self, VerifierError> {
assert_eq!(
queries.len(),
Expand All @@ -251,8 +254,6 @@ impl<E: FieldElement, H: ElementHasher<BaseField = E::BaseField>> TraceQueries<E
queries.len()
);

let num_queries = air.options().num_queries();

// parse main trace segment queries; parsing also validates that hashes of each table row
// form the leaves of Merkle authentication paths in the proofs
let main_segment_width = air.trace_layout().main_trace_width();
Expand Down Expand Up @@ -318,8 +319,8 @@ impl<E: FieldElement, H: ElementHasher<BaseField = E::BaseField>> ConstraintQuer
pub fn new<A: Air<BaseField = E::BaseField>>(
queries: Queries,
air: &A,
num_queries: usize,
) -> Result<Self, VerifierError> {
let num_queries = air.options().num_queries();
let constraint_frame_width = air.context().num_constraint_composition_columns();

let (query_proofs, evaluations) = queries
Expand Down
7 changes: 6 additions & 1 deletion verifier/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -264,10 +264,15 @@ where
// interactive version of the protocol, the verifier sends these query positions to the prover,
// and the prover responds with decommitments against these positions for trace and constraint
// composition polynomial evaluations.
let query_positions = public_coin
let mut query_positions = public_coin
.draw_integers(air.options().num_queries(), air.lde_domain_size(), pow_nonce)
.map_err(|_| VerifierError::RandomCoinError)?;

// remove any potential duplicates from the positions as the prover will send openings only
// for unique queries
query_positions.sort_unstable();
query_positions.dedup();

// read evaluations of trace and constraint composition polynomials at the queried positions;
// this also checks that the read values are valid against trace and constraint commitments
let (queried_main_trace_states, queried_aux_trace_states) =
Expand Down
Loading