-
Notifications
You must be signed in to change notification settings - Fork 44
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
perf: compute subtrees in parallel when computing proof #169
Open
estensen
wants to merge
17
commits into
ralexstokes:main
Choose a base branch
from
estensen:bench-proof
base: main
Could not load branches
Branch not found: {{ refName }}
Loading
Could not load tags
Nothing to show
Loading
Are you sure you want to change the base?
Some commits from the old base branch may be removed from the timeline,
and old review comments may become outdated.
Open
Changes from 15 commits
Commits
Show all changes
17 commits
Select commit
Hold shift + click to select a range
1231321
feat: add bench for computing proof
estensen 812c79e
feat: benchmark mainnet block
estensen 5dc3a33
feat: support hashtree with feature
estensen 32c3bcf
chore: ignore flamegraph and DS_STORE
estensen f00ed20
chore: benchmark block just above 256 txs
estensen 1d4dac8
chore: don't bench multiple indices
estensen 26e2a94
perf: process subtrees in parallel
estensen 5f06bc8
fix: return when no pairs to process
estensen 7acc300
fix: return early when less than two chunks
estensen 34fbbde
chore: temporary remove hashtree feature
estensen 713f04d
chore: extract functions
estensen 843d070
chore: compute merkle tree serially when less than 8 leaves
estensen 74acc3d
perf: process 4 subtrees in parallel instead of 2
estensen 8e689cb
chore: only compute merkle tree in parallel when >4 CPUs are available
estensen e0ca957
perf: process 8 subtrees in parallel instead of 4
estensen aaea182
perf: don't create extra buffers for subtrees
estensen 6f00708
Revert "perf: don't create extra buffers for subtrees"
estensen File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -2,3 +2,7 @@ target | |
Cargo.lock | ||
consensus-spec-tests | ||
cobertura.xml | ||
flamegraph.svg | ||
|
||
# macOS | ||
.DS_Store |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Large diffs are not rendered by default.
Oops, something went wrong.
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,96 @@ | ||
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion}; | ||
use ssz_rs::{List, PathElement, Prove}; | ||
use std::{convert::TryFrom, env, fs::File, hint::black_box, io::BufReader, path::Path}; | ||
|
||
// https://github.com/ethereum/consensus-specs/blob/85b4d003668731cbad63d6b6ba53fcc7d042cba1/specs/bellatrix/beacon-chain.md?plain=1#L69-L76 | ||
const MAX_BYTES_PER_TRANSACTION: usize = 1_073_741_824; // 1 GiB | ||
const MAX_TRANSACTIONS_PER_PAYLOAD: usize = 1_048_576; // 2^20 | ||
|
||
// Test blocks just above and below 256, a power of 2. | ||
// 21315748.json contains 247 transactions. | ||
// 21327802.json contains 261 transactions. | ||
const TRANSACTIONS_JSON_PATHS: &[&str] = &["benches/21315748.json", "benches/21327802.json"]; | ||
|
||
/// Represents the structure of the JSON file. | ||
/// Each transaction is a hex-encoded string prefixed with "0x". | ||
type TransactionsJson = Vec<String>; | ||
|
||
/// Reads transaction data from a local JSON file. | ||
fn load_transactions<P: AsRef<Path>>( | ||
file_path: P, | ||
) -> List<List<u8, MAX_BYTES_PER_TRANSACTION>, MAX_TRANSACTIONS_PER_PAYLOAD> { | ||
// Open the JSON file | ||
let current_dir = env::current_dir().expect("Failed to get current working directory"); | ||
let file = File::open(&file_path).unwrap_or_else(|e| { | ||
panic!( | ||
"Failed to open JSON file at {:?}. Current working directory: {:?}. Error: {}", | ||
file_path.as_ref(), | ||
current_dir, | ||
e | ||
) | ||
}); | ||
let reader = BufReader::new(file); | ||
|
||
// Deserialize the JSON into a Vec<String> | ||
let transactions_json: TransactionsJson = | ||
serde_json::from_reader(reader).expect("Failed to parse JSON"); | ||
|
||
// Convert each hex string to Vec<u8> and then to List<u8, MAX_BYTES_PER_TRANSACTION> | ||
let mut inner: Vec<List<u8, MAX_BYTES_PER_TRANSACTION>> = | ||
Vec::with_capacity(transactions_json.len()); | ||
|
||
for (i, tx_hex) in transactions_json.into_iter().enumerate() { | ||
// Remove "0x" prefix | ||
let tx_hex_trimmed = tx_hex.strip_prefix("0x").unwrap_or(&tx_hex); | ||
|
||
// Decode hex string to Vec<u8> | ||
let tx_bytes = hex::decode(tx_hex_trimmed) | ||
.unwrap_or_else(|_| panic!("Failed to decode hex string at index {}", i)); | ||
|
||
// Convert Vec<u8> to List<u8, MAX_BYTES_PER_TRANSACTION> | ||
let tx_list = List::<u8, MAX_BYTES_PER_TRANSACTION>::try_from(tx_bytes).expect(&format!( | ||
"Failed to convert Vec<u8> to List<u8, {}> at index {}", | ||
MAX_BYTES_PER_TRANSACTION, i | ||
)); | ||
|
||
inner.push(tx_list); | ||
} | ||
|
||
let outer = | ||
List::<List<u8, MAX_BYTES_PER_TRANSACTION>, MAX_TRANSACTIONS_PER_PAYLOAD>::try_from(inner) | ||
.expect("Failed to convert Vec<List<u8, MAX_BYTES_PER_TRANSACTION>> to outer List"); | ||
|
||
outer | ||
} | ||
|
||
fn bench_prove(c: &mut Criterion) { | ||
for &file_path_str in TRANSACTIONS_JSON_PATHS { | ||
let file_path = Path::new(file_path_str); | ||
|
||
// Generate the nested List from the JSON file | ||
let outer = load_transactions(file_path); | ||
let size = outer.len(); | ||
|
||
// Determine indices to benchmark (first, middle, last) | ||
let index = size / 2; | ||
|
||
let mut group = | ||
c.benchmark_group(format!("Prove Benchmark - File: {} - size {}", file_path_str, size)); | ||
// Reduce sample size for larger benchmarks to ensure completion | ||
group.sample_size(10); | ||
|
||
let path = vec![PathElement::from(index)]; | ||
|
||
group.bench_with_input(BenchmarkId::from_parameter(index), &path, |b, path| { | ||
b.iter(|| { | ||
let proof = outer.prove(black_box(path)).expect("Failed to generate proof"); | ||
black_box(proof) | ||
}) | ||
}); | ||
|
||
group.finish(); | ||
} | ||
} | ||
|
||
criterion_group!(benches, bench_prove); | ||
criterion_main!(benches); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,23 @@ | ||
use super::BYTES_PER_CHUNK; | ||
|
||
use ::sha2::{Digest, Sha256}; | ||
|
||
#[inline] | ||
fn hash_chunks_sha256(left: impl AsRef<[u8]>, right: impl AsRef<[u8]>) -> [u8; BYTES_PER_CHUNK] { | ||
let mut hasher = Sha256::new(); | ||
hasher.update(left.as_ref()); | ||
hasher.update(right.as_ref()); | ||
hasher.finalize_reset().into() | ||
} | ||
|
||
/// Function that hashes 2 [BYTES_PER_CHUNK] (32) len byte slices together. Depending on the feature | ||
/// flags, this will either use: | ||
/// - sha256 (default) | ||
/// - TODO: sha256 with assembly support (with the "sha2-asm" feature flag) | ||
/// - TODO: hashtree (with the "hashtree" feature flag) | ||
#[inline] | ||
pub fn hash_chunks(left: impl AsRef<[u8]>, right: impl AsRef<[u8]>) -> [u8; BYTES_PER_CHUNK] { | ||
debug_assert!(left.as_ref().len() == BYTES_PER_CHUNK); | ||
debug_assert!(right.as_ref().len() == BYTES_PER_CHUNK); | ||
hash_chunks_sha256(left, right) | ||
} |
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Can you move this test data file so that we have a path like
benches/test_data/block_transactions/21315748.json
?