Skip to content
This repository has been archived by the owner on Aug 21, 2024. It is now read-only.

Commit

Permalink
chore: move get_tx_weights out of the Bouncer (#1940)
Browse files Browse the repository at this point in the history
  • Loading branch information
Yoni-Starkware authored Jun 4, 2024
1 parent f770f03 commit 09b6617
Showing 1 changed file with 50 additions and 56 deletions.
106 changes: 50 additions & 56 deletions crates/blockifier/src/bouncer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -218,12 +218,23 @@ impl Bouncer {
) -> TransactionExecutorResult<()> {
// The countings here should be linear in the transactional state changes and execution info
// rather than the cumulative state attributes.
let state_changes_keys = tx_state_changes_keys.difference(&self.state_changes_keys);
let tx_weights = self.get_tx_weights(
let marginal_state_changes_keys =
tx_state_changes_keys.difference(&self.state_changes_keys);
let marginal_executed_class_hashes = tx_execution_summary
.executed_class_hashes
.difference(&self.executed_class_hashes)
.cloned()
.collect();
let n_marginal_visited_storage_entries = tx_execution_summary
.visited_storage_entries
.difference(&self.visited_storage_entries)
.count();
let tx_weights = get_tx_weights(
state_reader,
tx_execution_summary,
&marginal_executed_class_hashes,
n_marginal_visited_storage_entries,
tx_resources,
&state_changes_keys,
&marginal_state_changes_keys,
)?;

// Check if the transaction is too large to fit any block.
Expand All @@ -241,84 +252,67 @@ impl Bouncer {
Err(TransactionExecutorError::BlockFull)?
}

self._update(tx_weights, tx_execution_summary, &state_changes_keys);
self._update(tx_weights, tx_execution_summary, &marginal_state_changes_keys);

Ok(())
}

pub fn get_tx_weights<S: StateReader>(
&mut self,
state_reader: &S,
tx_execution_summary: &ExecutionSummary,
tx_resources: &TransactionResources,
state_changes_keys: &StateChangesKeys,
) -> TransactionExecutorResult<BouncerWeights> {
let (message_segment_length, gas_usage) =
tx_resources.starknet_resources.calculate_message_l1_resources();

let mut additional_os_resources = get_casm_hash_calculation_resources(
state_reader,
&self.executed_class_hashes,
&tx_execution_summary.executed_class_hashes,
)?;
additional_os_resources += &get_particia_update_resources(
&self.visited_storage_entries,
&tx_execution_summary.visited_storage_entries,
);

let vm_resources = &additional_os_resources + &tx_resources.vm_resources;

Ok(BouncerWeights {
gas: gas_usage,
message_segment_length,
n_events: tx_resources.starknet_resources.n_events,
n_steps: vm_resources.total_n_steps(),
builtin_count: BuiltinCount::from(vm_resources.prover_builtins()),
state_diff_size: get_onchain_data_segment_length(&state_changes_keys.count()),
})
}

#[cfg(test)]
pub fn set_accumulated_weights(&mut self, weights: BouncerWeights) {
self.accumulated_weights = weights;
}
}

/// Returns the estimated VM resources for Casm hash calculation (done by the OS), of the newly
/// executed classes by the current transaction.
pub fn get_tx_weights<S: StateReader>(
state_reader: &S,
executed_class_hashes: &HashSet<ClassHash>,
n_visited_storage_entries: usize,
tx_resources: &TransactionResources,
state_changes_keys: &StateChangesKeys,
) -> TransactionExecutorResult<BouncerWeights> {
let (message_segment_length, gas_usage) =
tx_resources.starknet_resources.calculate_message_l1_resources();

let mut additional_os_resources =
get_casm_hash_calculation_resources(state_reader, executed_class_hashes)?;
additional_os_resources += &get_particia_update_resources(n_visited_storage_entries);

let vm_resources = &additional_os_resources + &tx_resources.vm_resources;

Ok(BouncerWeights {
gas: gas_usage,
message_segment_length,
n_events: tx_resources.starknet_resources.n_events,
n_steps: vm_resources.total_n_steps(),
builtin_count: BuiltinCount::from(vm_resources.prover_builtins()),
state_diff_size: get_onchain_data_segment_length(&state_changes_keys.count()),
})
}

/// Returns the estimated Cairo resources for Casm hash calculation (done by the OS), of the given
/// classes.
pub fn get_casm_hash_calculation_resources<S: StateReader>(
state_reader: &S,
block_executed_class_hashes: &HashSet<ClassHash>,
tx_executed_class_hashes: &HashSet<ClassHash>,
executed_class_hashes: &HashSet<ClassHash>,
) -> TransactionExecutorResult<ExecutionResources> {
let newly_executed_class_hashes: HashSet<&ClassHash> =
tx_executed_class_hashes.difference(block_executed_class_hashes).collect();

let mut casm_hash_computation_resources = ExecutionResources::default();

for class_hash in newly_executed_class_hashes {
for class_hash in executed_class_hashes {
let class = state_reader.get_compiled_contract_class(*class_hash)?;
casm_hash_computation_resources += &class.estimate_casm_hash_computation_resources();
}

Ok(casm_hash_computation_resources)
}

/// Returns the estimated VM resources for Patricia tree updates, or hash invocations
/// (done by the OS), required by the execution of the current transaction.
/// Returns the estimated Cairo resources for Patricia tree updates, or hash invocations
/// (done by the OS), required for accessing (read/write) the given storage entries.
// For each tree: n_visited_leaves * log(n_initialized_leaves)
// as the height of a Patricia tree with N uniformly distributed leaves is ~log(N),
// and number of visited leaves includes reads and writes.
pub fn get_particia_update_resources(
block_visited_storage_entries: &HashSet<StorageEntry>,
tx_visited_storage_entries: &HashSet<StorageEntry>,
) -> ExecutionResources {
let newly_visited_storage_entries: HashSet<&StorageEntry> =
tx_visited_storage_entries.difference(block_visited_storage_entries).collect();
let n_newly_visited_leaves = newly_visited_storage_entries.len();

pub fn get_particia_update_resources(n_visited_storage_entries: usize) -> ExecutionResources {
const TREE_HEIGHT_UPPER_BOUND: usize = 24;
let n_updates = n_newly_visited_leaves * TREE_HEIGHT_UPPER_BOUND;
let n_updates = n_visited_storage_entries * TREE_HEIGHT_UPPER_BOUND;

ExecutionResources {
// TODO(Yoni, 1/5/2024): re-estimate this.
Expand Down

0 comments on commit 09b6617

Please sign in to comment.