Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Introduce a burn address for cdk_erigon #463

Merged
merged 18 commits into from
Aug 30, 2024
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions evm_arithmetization/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ parallel = [
"starky/parallel",
]
polygon_pos = []
cdk_erigon = []

[[bin]]
name = "assemble"
Expand Down
2 changes: 2 additions & 0 deletions evm_arithmetization/benches/fibonacci_25m_gas.rs
Original file line number Diff line number Diff line change
Expand Up @@ -178,6 +178,8 @@ fn prepare_setup() -> anyhow::Result<GenerationInputs> {

Ok(GenerationInputs {
signed_txn: Some(txn.to_vec()),
#[cfg(feature = "cdk_erigon")]
burn_addr: None,
withdrawals: vec![],
tries: tries_before,
trie_roots_after,
Expand Down
10 changes: 9 additions & 1 deletion evm_arithmetization/src/cpu/kernel/aggregator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,15 @@ pub static KERNEL_FILES: [&str; NUMBER_KERNEL_FILES] = [
pub static KERNEL: Lazy<Kernel> = Lazy::new(combined_kernel);

pub(crate) fn combined_kernel_from_files<const N: usize>(files: [&str; N]) -> Kernel {
let parsed_files = files.iter().map(|f| parse(f, HashSet::new())).collect_vec();
#[cfg(feature = "cdk_erigon")]
let active_features = HashSet::from(["cdk_erigon"]);
#[cfg(not(feature = "cdk_erigon"))]
let active_features = HashSet::new();
LindaGuiga marked this conversation as resolved.
Show resolved Hide resolved

let parsed_files = files
.iter()
.map(|f| parse(f, active_features.clone()))
.collect_vec();
LindaGuiga marked this conversation as resolved.
Show resolved Hide resolved
assemble(parsed_files, evm_constants(), true)
}

Expand Down
93 changes: 79 additions & 14 deletions evm_arithmetization/src/cpu/kernel/asm/core/process_txn.asm
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,11 @@ global buy_gas:
// stack: deduct_eth_status, retdest
%jumpi(panic)
// stack: retdest
#[cfg(feature = cdk_erigon)]
{
%add_max_burnt_eth
// stack: retdest
}

global increment_sender_nonce:
%mload_txn_field(@TXN_FIELD_ORIGIN)
Expand Down Expand Up @@ -324,46 +329,83 @@ process_message_txn_fail:
%transfer_eth %jumpi(panic)
%jump(process_message_txn_after_call_contd)

%macro pay_coinbase_and_refund_sender
// stack: leftover_gas
global pay_coinbase_and_refund_sender:
// stack: leftover_gas, retdest
LindaGuiga marked this conversation as resolved.
Show resolved Hide resolved
DUP1
// stack: leftover_gas, leftover_gas
// stack: leftover_gas, leftover_gas, retdest
%mload_txn_field(@TXN_FIELD_GAS_LIMIT)
SUB
// stack: used_gas, leftover_gas
// stack: used_gas, leftover_gas, retdest
%mload_global_metadata(@GLOBAL_METADATA_REFUND_COUNTER)
// stack: refund, used_gas, leftover_gas
// stack: refund, used_gas, leftover_gas, retdest
DUP2 %div_const(@MAX_REFUND_QUOTIENT) // max_refund = used_gas/5
// stack: max_refund, refund, used_gas, leftover_gas
// stack: max_refund, refund, used_gas, leftover_gas, retdest
%min
%stack (refund, used_gas, leftover_gas) -> (leftover_gas, refund, refund, used_gas)
ADD
// stack: leftover_gas', refund, used_gas
// stack: leftover_gas', refund, used_gas, retdest
SWAP2
// stack: used_gas, refund, leftover_gas'
// stack: used_gas, refund, leftover_gas', retdest
SUB
// stack: used_gas', leftover_gas'
// stack: used_gas', leftover_gas', retdest

// Pay the coinbase.
%mload_txn_field(@TXN_FIELD_COMPUTED_PRIORITY_FEE_PER_GAS)
MUL
// stack: used_gas_tip, leftover_gas'
%mload_global_metadata(@GLOBAL_METADATA_BLOCK_BENEFICIARY)
// stack: coinbase, used_gas_tip, leftover_gas'
// stack: coinbase, used_gas_tip, leftover_gas', retdest
%add_eth
// stack: leftover_gas'
// stack: leftover_gas', retdest
DUP1

// Refund gas to the origin.
%mload_txn_field(@TXN_FIELD_COMPUTED_FEE_PER_GAS)
MUL
// stack: leftover_gas_cost, leftover_gas'
// stack: leftover_gas_cost, leftover_gas', retdest
%mload_txn_field(@TXN_FIELD_ORIGIN)
// stack: origin, leftover_gas_cost, leftover_gas'
// stack: origin, leftover_gas_cost, leftover_gas', retdest
%add_eth
// stack: leftover_gas'
// stack: leftover_gas', retdest

#[cfg(feature = cdk_erigon)]
{
%deduct_extra_burn_fees
}
SWAP1 JUMP

%macro pay_coinbase_and_refund_sender
// stack: leftover_gas
%stack (leftover_gas) -> (leftover_gas, %%after)
%jump(pay_coinbase_and_refund_sender)
%%after:
%endmacro

#[cfg(feature = cdk_erigon)]
{
%macro deduct_extra_burn_fees
// stack: leftover_gas'
// Deduct the extra burn fees from the burn target.
%mload_global_metadata(@GLOBAL_METADATA_BURN_ADDR) DUP1
%eq_const(@U256_MAX) %jumpi(%%deduct_extra_burn_fees_pop)
// stack: burn_target, leftover_gas'
DUP2
%mload_global_metadata(@GLOBAL_METADATA_BLOCK_BASE_FEE)
MUL
// stack: refund_base_cost, burn_target, leftover_gas'
SWAP1
%deduct_eth
// stack: deduct_status, leftover_gas'
%jumpi(panic)
%jump(%%deduct_extra_burn_fees_end)
%%deduct_extra_burn_fees_pop:
// stack: burn_target, leftover_gas'
POP
%%deduct_extra_burn_fees_end:
// stack: leftover_gas'
%endmacro
}
LindaGuiga marked this conversation as resolved.
Show resolved Hide resolved

// Sets @TXN_FIELD_MAX_FEE_PER_GAS and @TXN_FIELD_MAX_PRIORITY_FEE_PER_GAS.
%macro compute_fees
// stack: (empty)
Expand Down Expand Up @@ -395,6 +437,29 @@ process_message_txn_fail:
// stack: gas_limit - intrinsic_gas
%endmacro

#[cfg(feature = cdk_erigon)]
{
%macro add_max_burnt_eth
LindaGuiga marked this conversation as resolved.
Show resolved Hide resolved
// stack: (empty)
%mload_global_metadata(@GLOBAL_METADATA_BURN_ADDR)
// If there is no burn target, we skip the transfer.
DUP1 %eq_const(@U256_MAX) %jumpi(%%add_max_burnt_eth_pop)
// stack: burn_target
%mload_global_metadata(@GLOBAL_METADATA_BLOCK_BASE_FEE)
%mload_txn_field(@TXN_FIELD_GAS_LIMIT)
MUL
// stack: max_burnt_cost, burn_target
SWAP1
%add_eth
%jump(%%add_max_burnt_eth_end)
%%add_max_burnt_eth_pop:
// stack: burn_target
POP
%%add_max_burnt_eth_end:
// stack: (empty)
%endmacro
}

create_contract_account_fault:
%revert_checkpoint
// stack: address, retdest
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -111,10 +111,13 @@ pub(crate) enum GlobalMetadata {
BlobVersionedHashesRlpLen,
// Number of blob versioned hashes contained in the current type-3 transaction.
BlobVersionedHashesLen,

/// Address where the base fee to be burnt is sent.
BurnAddr,
}

impl GlobalMetadata {
pub(crate) const COUNT: usize = 55;
pub(crate) const COUNT: usize = 56;

/// Unscales this virtual offset by their respective `Segment` value.
pub(crate) const fn unscale(&self) -> usize {
Expand Down Expand Up @@ -178,6 +181,7 @@ impl GlobalMetadata {
Self::BlobVersionedHashesRlpStart,
Self::BlobVersionedHashesRlpLen,
Self::BlobVersionedHashesLen,
Self::BurnAddr,
]
}

Expand Down Expand Up @@ -239,6 +243,7 @@ impl GlobalMetadata {
Self::BlobVersionedHashesRlpStart => "GLOBAL_METADATA_BLOB_VERSIONED_HASHES_RLP_START",
Self::BlobVersionedHashesRlpLen => "GLOBAL_METADATA_BLOB_VERSIONED_HASHES_RLP_LEN",
Self::BlobVersionedHashesLen => "GLOBAL_METADATA_BLOB_VERSIONED_HASHES_LEN",
Self::BurnAddr => "GLOBAL_METADATA_BURN_ADDR",
}
}
}
7 changes: 6 additions & 1 deletion evm_arithmetization/src/cpu/kernel/tests/add11.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ use crate::generation::TrieInputs;
use crate::proof::{BlockHashes, BlockMetadata, TrieRoots};
use crate::testing_utils::{
beacon_roots_account_nibbles, beacon_roots_contract_from_storage, ger_account_nibbles,
preinitialized_state_and_storage_tries, update_beacon_roots_account_storage,
init_logger, preinitialized_state_and_storage_tries, update_beacon_roots_account_storage,
GLOBAL_EXIT_ROOT_ACCOUNT,
};
use crate::GenerationInputs;
Expand Down Expand Up @@ -181,6 +181,8 @@ fn test_add11_yml() {

let inputs = GenerationInputs {
signed_txn: Some(txn.to_vec()),
#[cfg(feature = "cdk_erigon")]
burn_addr: None,
withdrawals: vec![],
global_exit_roots: vec![],
tries: tries_before,
Expand Down Expand Up @@ -208,6 +210,7 @@ fn test_add11_yml() {

#[test]
fn test_add11_yml_with_exception() {
init_logger();
LindaGuiga marked this conversation as resolved.
Show resolved Hide resolved
// In this test, we make sure that the user code throws a stack underflow
// exception.
let beneficiary = hex!("2adc25665018aa1fe0e6bc666dac8fc2697ff9ba");
Expand Down Expand Up @@ -362,6 +365,8 @@ fn test_add11_yml_with_exception() {

let inputs = GenerationInputs {
signed_txn: Some(txn.to_vec()),
#[cfg(feature = "cdk_erigon")]
burn_addr: None,
withdrawals: vec![],
global_exit_roots: vec![],
tries: tries_before,
Expand Down
74 changes: 60 additions & 14 deletions evm_arithmetization/src/fixed_recursive_verifier.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,8 @@ use starky::stark::Stark;
use crate::all_stark::{all_cross_table_lookups, AllStark, Table, NUM_TABLES};
use crate::generation::GenerationInputs;
use crate::get_challenges::observe_public_values_target;
#[cfg(feature = "cdk_erigon")]
use crate::proof::BurnAddrTarget;
use crate::proof::{
AllProof, BlockHashesTarget, BlockMetadataTarget, ExtraBlockData, ExtraBlockDataTarget,
PublicValues, PublicValuesTarget, TrieRoots, TrieRootsTarget,
Expand All @@ -48,7 +50,7 @@ use crate::recursive_verifier::{
recursive_stark_circuit, set_public_value_targets, PlonkWrapperCircuit, PublicInputs,
StarkWrapperCircuit,
};
use crate::util::h256_limbs;
use crate::util::{h256_limbs, u256_limbs};
LindaGuiga marked this conversation as resolved.
Show resolved Hide resolved

/// The recursion threshold. We end a chain of recursive proofs once we reach
/// this size.
Expand Down Expand Up @@ -784,7 +786,20 @@ where
lhs_public_values.trie_roots_after,
rhs_public_values.trie_roots_before,
);

#[cfg(feature = "cdk_erigon")]
{
// Connect the burn address targets.
BurnAddrTarget::connect(
&mut builder,
lhs_public_values.burn_addr,
rhs_public_values.burn_addr,
);
BurnAddrTarget::connect(
&mut builder,
public_values.burn_addr,
rhs_public_values.burn_addr,
);
}
Self::connect_extra_public_values(
&mut builder,
&public_values.extra_block_data,
Expand Down Expand Up @@ -931,6 +946,12 @@ where
public_values.extra_block_data,
agg_pv.extra_block_data,
);
#[cfg(feature = "cdk_erigon")]
// Connect the burn address targets.
{
BurnAddrTarget::connect(&mut builder, parent_pv.burn_addr, agg_pv.burn_addr);
BurnAddrTarget::connect(&mut builder, public_values.burn_addr, agg_pv.burn_addr);
}

// Make connections between block proofs, and check initial and final block
// values.
Expand Down Expand Up @@ -1405,6 +1426,8 @@ where
let agg_public_values = PublicValues {
trie_roots_before: lhs_public_values.trie_roots_before,
trie_roots_after: rhs_public_values.trie_roots_after,
#[cfg(feature = "cdk_erigon")]
burn_addr: lhs_public_values.burn_addr,
extra_block_data: ExtraBlockData {
checkpoint_state_trie_root: lhs_public_values
.extra_block_data
Expand Down Expand Up @@ -1526,13 +1549,29 @@ where
nonzero_pis.insert(key, value);
}

#[cfg(not(feature = "cdk_erigon"))]
let burn_addr_offset = 0;
#[cfg(feature = "cdk_erigon")]
let burn_addr_offset = BurnAddrTarget::SIZE;
LindaGuiga marked this conversation as resolved.
Show resolved Hide resolved

#[cfg(feature = "cdk_erigon")]
{
let burn_addr_keys =
TrieRootsTarget::SIZE * 2..TrieRootsTarget::SIZE * 2 + BurnAddrTarget::SIZE;
for (key, &value) in burn_addr_keys.zip_eq(&u256_limbs(public_values.burn_addr)) {
nonzero_pis.insert(key, value);
}
}
// Initialize the checkpoint state root extra data.
let checkpoint_state_trie_keys =
TrieRootsTarget::SIZE * 2 + BlockMetadataTarget::SIZE + BlockHashesTarget::SIZE
..TrieRootsTarget::SIZE * 2
+ BlockMetadataTarget::SIZE
+ BlockHashesTarget::SIZE
+ 8;
let checkpoint_state_trie_keys = burn_addr_offset
+ TrieRootsTarget::SIZE * 2
+ BlockMetadataTarget::SIZE
+ BlockHashesTarget::SIZE
..burn_addr_offset
+ TrieRootsTarget::SIZE * 2
+ BlockMetadataTarget::SIZE
+ BlockHashesTarget::SIZE
+ 8;
for (key, &value) in checkpoint_state_trie_keys.zip_eq(&h256_limbs::<F>(
public_values.extra_block_data.checkpoint_state_trie_root,
)) {
Expand All @@ -1541,18 +1580,25 @@ where

// Initialize checkpoint block hashes.
// These will be all zeros the initial genesis checkpoint.
let block_hashes_keys = TrieRootsTarget::SIZE * 2 + BlockMetadataTarget::SIZE
..TrieRootsTarget::SIZE * 2 + BlockMetadataTarget::SIZE + BlockHashesTarget::SIZE
- 8;
let block_hashes_keys =
burn_addr_offset + TrieRootsTarget::SIZE * 2 + BlockMetadataTarget::SIZE
..burn_addr_offset
+ TrieRootsTarget::SIZE * 2
+ BlockMetadataTarget::SIZE
+ BlockHashesTarget::SIZE
- 8;

for i in 0..public_values.block_hashes.prev_hashes.len() - 1 {
let targets = h256_limbs::<F>(public_values.block_hashes.prev_hashes[i]);
for j in 0..8 {
nonzero_pis.insert(block_hashes_keys.start + 8 * (i + 1) + j, targets[j]);
}
}
let block_hashes_current_start =
TrieRootsTarget::SIZE * 2 + BlockMetadataTarget::SIZE + BlockHashesTarget::SIZE - 8;
let block_hashes_current_start = burn_addr_offset
+ TrieRootsTarget::SIZE * 2
+ BlockMetadataTarget::SIZE
+ BlockHashesTarget::SIZE
- 8;
let cur_targets = h256_limbs::<F>(public_values.block_hashes.prev_hashes[255]);
for i in 0..8 {
nonzero_pis.insert(block_hashes_current_start + i, cur_targets[i]);
Expand All @@ -1561,7 +1607,7 @@ where
// Initialize the checkpoint block number.
// Subtraction would result in an invalid proof for genesis, but we shouldn't
// try proving this block anyway.
let block_number_key = TrieRootsTarget::SIZE * 2 + 6;
let block_number_key = burn_addr_offset + TrieRootsTarget::SIZE * 2 + 6;
nonzero_pis.insert(
block_number_key,
F::from_canonical_u64(public_values.block_metadata.block_number.low_u64() - 1),
Expand Down
Loading
Loading