Skip to content

Commit

Permalink
Preinitialize segments in all contexts (#466)
Browse files Browse the repository at this point in the history
* Preinitialize segments in all contexts

* Apply comment
  • Loading branch information
hratoanina authored and Nashtare committed Aug 6, 2024
1 parent 5cace10 commit fd0f7d5
Show file tree
Hide file tree
Showing 2 changed files with 74 additions and 65 deletions.
12 changes: 10 additions & 2 deletions evm_arithmetization/src/memory/columns.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,12 +43,20 @@ pub(crate) struct MemoryColumnsView<T> {
pub virtual_first_change: T,

/// Used to lower the degree of the zero-initializing constraints.
/// Contains `next_segment * addr_changed * next_is_read`.
/// Contains `preinitialized_segments * addr_changed * next_is_read`.
pub initialize_aux: T,

/// Used to allow pre-initialization of some context 0 segments.
/// Used to allow pre-initialization of some segments.
/// Contains `(next_segment - Segment::Code) * (next_segment -
/// Segment::TrieData)
/// * preinitialized_segments_aux`.
pub preinitialized_segments: T,

/// Used to allow pre-initialization of more segments.
/// Contains `(next_segment - Segment::AccountsLinkedList) * (next_segment -
/// Segment::StorageLinkedList)`.
pub preinitialized_segments_aux: T,

/// Contains `row_index` + 1 if and only if context `row_index` is stale,
/// and zero if not.
pub stale_contexts: T,
Expand Down
127 changes: 64 additions & 63 deletions evm_arithmetization/src/memory/memory_stark.rs
Original file line number Diff line number Diff line change
Expand Up @@ -189,13 +189,18 @@ pub(crate) fn generate_first_change_flags_and_rc<F: RichField>(
row.range_check
);

row.preinitialized_segments_aux = (next_segment
- F::from_canonical_usize(Segment::AccountsLinkedList.unscale()))
* (next_segment - F::from_canonical_usize(Segment::StorageLinkedList.unscale()));

row.preinitialized_segments = (next_segment
- F::from_canonical_usize(Segment::Code.unscale()))
* (next_segment - F::from_canonical_usize(Segment::TrieData.unscale()))
* row.preinitialized_segments_aux;

let address_changed =
row.context_first_change + row.segment_first_change + row.virtual_first_change;
row.initialize_aux = next_segment * address_changed * next_is_read;
row.preinitialized_segments = (next_segment
- F::from_canonical_usize(Segment::TrieData.unscale()))
* (next_segment - F::from_canonical_usize(Segment::AccountsLinkedList.unscale()))
* (next_segment - F::from_canonical_usize(Segment::StorageLinkedList.unscale()))
row.initialize_aux = row.preinitialized_segments * address_changed * next_is_read;
}
}

Expand Down Expand Up @@ -481,7 +486,9 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for MemoryStark<F
let timestamp_inv = lv.timestamp_inv;
let is_stale = lv.is_stale;
let mem_after_filter = lv.mem_after_filter;
let initialize_aux = lv.initialize_aux;
let preinitialized_segments = lv.preinitialized_segments;
let preinitialized_segments_aux = lv.preinitialized_segments_aux;

let next_timestamp = nv.timestamp;
let next_is_read = nv.is_read;
Expand Down Expand Up @@ -544,22 +551,27 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for MemoryStark<F
+ address_unchanged * (next_timestamp - timestamp);
yield_constr.constraint_transition(range_check - computed_range_check);

// Validate initialize_aux. It contains next_segment * addr_changed *
// next_is_read.
let initialize_aux = lv.initialize_aux;
// Validate `preinitialized_segments_aux`.
yield_constr.constraint_transition(
initialize_aux - next_addr_segment * not_address_unchanged * next_is_read,
preinitialized_segments_aux
- (next_addr_segment
- P::Scalar::from_canonical_usize(Segment::AccountsLinkedList.unscale()))
* (next_addr_segment
- P::Scalar::from_canonical_usize(Segment::StorageLinkedList.unscale())),
);

// Validate preinitialized_segments.
// Validate `preinitialized_segments`.
yield_constr.constraint_transition(
preinitialized_segments
- (next_addr_segment
- P::Scalar::from_canonical_usize(Segment::TrieData.unscale()))
* (next_addr_segment
- P::Scalar::from_canonical_usize(Segment::AccountsLinkedList.unscale()))
- (next_addr_segment - P::Scalar::from_canonical_usize(Segment::Code.unscale()))
* (next_addr_segment
- P::Scalar::from_canonical_usize(Segment::StorageLinkedList.unscale())),
- P::Scalar::from_canonical_usize(Segment::TrieData.unscale()))
* preinitialized_segments_aux,
);

// Validate `initialize_aux`.
yield_constr.constraint_transition(
initialize_aux - preinitialized_segments * not_address_unchanged * next_is_read,
);

for i in 0..8 {
Expand All @@ -570,25 +582,16 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for MemoryStark<F
// By default, memory is initialized with 0. This means that if the first
// operation of a new address is a read, then its value must be 0.
// There are exceptions, though: this constraint zero-initializes everything but
// the code segment and context 0.
yield_constr
.constraint_transition(next_addr_context * initialize_aux * next_values_limbs[i]);
// We don't want to exclude the entirety of context 0. This constraint
// zero-initializes all segments except the specified ones (segment
// 0 is already included in initialize_aux). There is overlap with
// the previous constraint, but this is not a problem.
yield_constr.constraint_transition(
preinitialized_segments * initialize_aux * next_values_limbs[i],
);
// the preinitialized segments.
yield_constr.constraint_transition(initialize_aux * next_values_limbs[i]);
}

// Validate `mem_after_filter`. Its value is `filter * address_changed * (1 -
// is_stale)`
// Validate `mem_after_filter`.
yield_constr.constraint_transition(
mem_after_filter + filter * not_address_unchanged * (is_stale - P::ONES),
);

// Validate timestamp_inv. Since it's used as a CTL filter, its value must be
// Validate `timestamp_inv`. Since it's used as a CTL filter, its value must be
// checked.
yield_constr.constraint(timestamp * (timestamp * timestamp_inv - P::ONES));

Expand Down Expand Up @@ -622,7 +625,9 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for MemoryStark<F
let timestamp_inv = lv.timestamp_inv;
let is_stale = lv.is_stale;
let mem_after_filter = lv.mem_after_filter;
let initialize_aux = lv.initialize_aux;
let preinitialized_segments = lv.preinitialized_segments;
let preinitialized_segments_aux = lv.preinitialized_segments_aux;

let next_addr_context = nv.addr_context;
let next_addr_segment = nv.addr_segment;
Expand Down Expand Up @@ -728,38 +733,47 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for MemoryStark<F
let range_check_diff = builder.sub_extension(range_check, computed_range_check);
yield_constr.constraint_transition(builder, range_check_diff);

// Validate initialize_aux. It contains next_segment * addr_changed *
// next_is_read.
let initialize_aux = lv.initialize_aux;
let computed_initialize_aux = builder.mul_extension(not_address_unchanged, next_is_read);
let computed_initialize_aux =
builder.mul_extension(next_addr_segment, computed_initialize_aux);
let new_first_read_constraint =
builder.sub_extension(initialize_aux, computed_initialize_aux);
yield_constr.constraint_transition(builder, new_first_read_constraint);

// Validate preinitialized_segments.
let segment_trie_data = builder.add_const_extension(
next_addr_segment,
F::NEG_ONE * F::from_canonical_usize(Segment::TrieData.unscale()),
);
// Validate `preinitialized_segments_aux`.
let segment_accounts_list = builder.add_const_extension(
next_addr_segment,
F::NEG_ONE * F::from_canonical_usize(Segment::AccountsLinkedList.unscale()),
-F::from_canonical_usize(Segment::AccountsLinkedList.unscale()),
);
let segment_storage_list = builder.add_const_extension(
next_addr_segment,
F::NEG_ONE * F::from_canonical_usize(Segment::StorageLinkedList.unscale()),
-F::from_canonical_usize(Segment::StorageLinkedList.unscale()),
);
let segment_aux_prod = builder.mul_extension(segment_accounts_list, segment_storage_list);
let preinitialized_segments_aux_constraint =
builder.sub_extension(preinitialized_segments_aux, segment_aux_prod);
yield_constr.constraint_transition(builder, preinitialized_segments_aux_constraint);

// Validate `preinitialized_segments`.
let segment_code = builder.add_const_extension(
next_addr_segment,
-F::from_canonical_usize(Segment::Code.unscale()),
);
let segment_trie_data = builder.add_const_extension(
next_addr_segment,
-F::from_canonical_usize(Segment::TrieData.unscale()),
);

let segment_prod = builder.mul_many_extension([
segment_code,
segment_trie_data,
segment_accounts_list,
segment_storage_list,
preinitialized_segments_aux,
]);
let preinitialized_segments_constraint =
builder.sub_extension(preinitialized_segments, segment_prod);
yield_constr.constraint_transition(builder, preinitialized_segments_constraint);

// Validate `initialize_aux`.
let computed_initialize_aux = builder.mul_extension(not_address_unchanged, next_is_read);
let computed_initialize_aux =
builder.mul_extension(preinitialized_segments, computed_initialize_aux);
let new_first_read_constraint =
builder.sub_extension(initialize_aux, computed_initialize_aux);
yield_constr.constraint_transition(builder, new_first_read_constraint);

for i in 0..8 {
// Enumerate purportedly-ordered log.
let value_diff = builder.sub_extension(next_values_limbs[i], value_limbs[i]);
Expand All @@ -769,25 +783,12 @@ impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for MemoryStark<F
// By default, memory is initialized with 0. This means that if the first
// operation of a new address is a read, then its value must be 0.
// There are exceptions, though: this constraint zero-initializes everything but
// the code segment and context 0.
let context_zero_initializing_constraint =
builder.mul_extension(next_values_limbs[i], initialize_aux);
let initializing_constraint =
builder.mul_extension(next_addr_context, context_zero_initializing_constraint);
yield_constr.constraint_transition(builder, initializing_constraint);
// We don't want to exclude the entirety of context 0. This constraint
// zero-initializes all segments except the specified ones (segment
// 0 is already included in initialize_aux). There is overlap with
// the previous constraint, but this is not a problem.
let zero_init_constraint = builder.mul_extension(
preinitialized_segments,
context_zero_initializing_constraint,
);
// the preinitialized segments.
let zero_init_constraint = builder.mul_extension(initialize_aux, next_values_limbs[i]);
yield_constr.constraint_transition(builder, zero_init_constraint);
}

// Validate `mem_after_filter`. Its value is `filter * address_changed * (1 -
// is_stale)`
// Validate `mem_after_filter`.
{
let rhs = builder.mul_extension(filter, not_address_unchanged);
let rhs = builder.mul_sub_extension(rhs, is_stale, rhs);
Expand Down

0 comments on commit fd0f7d5

Please sign in to comment.