diff --git a/ethportal-api/src/lib.rs b/ethportal-api/src/lib.rs index f44356813..52d20e1cf 100644 --- a/ethportal-api/src/lib.rs +++ b/ethportal-api/src/lib.rs @@ -32,9 +32,7 @@ pub use types::{ content_key::{ beacon::{BeaconContentKey, LightClientBootstrapKey, LightClientUpdatesByRangeKey}, error::ContentKeyError, - history::{ - BlockBodyKey, BlockHeaderKey, BlockReceiptsKey, EpochAccumulatorKey, HistoryContentKey, - }, + history::{BlockBodyKey, BlockReceiptsKey, HistoryContentKey}, overlay::{IdentityContentKey, OverlayContentKey}, state::StateContentKey, }, diff --git a/ethportal-api/src/types/content_key/history.rs b/ethportal-api/src/types/content_key/history.rs index 6db52d11b..407944bad 100644 --- a/ethportal-api/src/types/content_key/history.rs +++ b/ethportal-api/src/types/content_key/history.rs @@ -2,7 +2,7 @@ use alloy_primitives::B256; use rand::{seq::SliceRandom, RngCore}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use sha2::{Digest as Sha2Digest, Sha256}; -use ssz::{Decode, DecodeError}; +use ssz::{Decode, DecodeError, Encode}; use ssz_derive::{Decode, Encode}; use std::{fmt, hash::Hash}; @@ -14,36 +14,41 @@ use crate::{ // Prefixes for the different types of history content keys: // https://github.com/ethereum/portal-network-specs/blob/638aca50c913a749d0d762264d9a4ac72f1a9966/history-network.md -pub const HISTORY_BLOCK_HEADER_KEY_PREFIX: u8 = 0x00; +pub const HISTORY_BLOCK_HEADER_BY_HASH_KEY_PREFIX: u8 = 0x00; pub const HISTORY_BLOCK_BODY_KEY_PREFIX: u8 = 0x01; pub const HISTORY_BLOCK_RECEIPTS_KEY_PREFIX: u8 = 0x02; -pub const HISTORY_BLOCK_EPOCH_ACCUMULATOR_KEY_PREFIX: u8 = 0x03; +pub const HISTORY_BLOCK_HEADER_BY_NUMBER_KEY_PREFIX: u8 = 0x03; /// A content key in the history overlay network. #[derive(Clone, Debug, Eq, PartialEq)] pub enum HistoryContentKey { - /// A block header with accumulator proof. - BlockHeaderWithProof(BlockHeaderKey), + /// A block header by hash. + BlockHeaderByHash(BlockHeaderByHashKey), + /// A block header by number. + BlockHeaderByNumber(BlockHeaderByNumberKey), /// A block body. BlockBody(BlockBodyKey), /// The transaction receipts for a block. BlockReceipts(BlockReceiptsKey), - /// An epoch header accumulator. - EpochAccumulator(EpochAccumulatorKey), } impl HistoryContentKey { pub fn random() -> anyhow::Result { - let mut random_bytes: Vec = vec![0u8; 32]; - rand::thread_rng().fill_bytes(&mut random_bytes[..]); let random_prefix = [ - HISTORY_BLOCK_HEADER_KEY_PREFIX, + HISTORY_BLOCK_HEADER_BY_HASH_KEY_PREFIX, HISTORY_BLOCK_BODY_KEY_PREFIX, HISTORY_BLOCK_RECEIPTS_KEY_PREFIX, - HISTORY_BLOCK_EPOCH_ACCUMULATOR_KEY_PREFIX, + HISTORY_BLOCK_HEADER_BY_NUMBER_KEY_PREFIX, ] .choose(&mut rand::thread_rng()) .ok_or_else(|| anyhow::Error::msg("Failed to choose random prefix"))?; + let mut random_bytes: Vec = + if *random_prefix == HISTORY_BLOCK_HEADER_BY_NUMBER_KEY_PREFIX { + vec![0u8; 8] + } else { + vec![0u8; 32] + }; + rand::thread_rng().fill_bytes(&mut random_bytes[..]); random_bytes.insert(0, *random_prefix); let random_bytes: RawContentKey = random_bytes.into(); Self::try_from(random_bytes).map_err(anyhow::Error::msg) @@ -75,14 +80,14 @@ impl<'de> Deserialize<'de> for HistoryContentKey { } } -/// A key for a block header. +/// A key for a block header by hash. #[derive(Clone, Debug, Decode, Encode, Eq, PartialEq, Default)] -pub struct BlockHeaderKey { +pub struct BlockHeaderByHashKey { /// Hash of the block. pub block_hash: [u8; 32], } -impl From for BlockHeaderKey { +impl From for BlockHeaderByHashKey { fn from(block_hash: B256) -> Self { Self { block_hash: block_hash.0, @@ -90,6 +95,19 @@ impl From for BlockHeaderKey { } } +/// A key for a block header by number. +#[derive(Clone, Debug, Decode, Encode, Eq, PartialEq, Default)] +pub struct BlockHeaderByNumberKey { + /// Number of the block. + pub block_number: u64, +} + +impl From for BlockHeaderByNumberKey { + fn from(block_number: u64) -> Self { + Self { block_number } + } +} + /// A key for a block body. #[derive(Clone, Debug, Decode, Encode, Eq, PartialEq)] pub struct BlockBodyKey { @@ -112,12 +130,6 @@ pub struct BlockReceiptsKey { pub block_hash: [u8; 32], } -/// A key for an epoch header accumulator. -#[derive(Clone, Debug, Decode, Encode, Eq, PartialEq)] -pub struct EpochAccumulatorKey { - pub epoch_hash: B256, -} - impl TryFrom for HistoryContentKey { type Error = ContentKeyError; @@ -129,8 +141,8 @@ impl TryFrom for HistoryContentKey { }); }; match selector { - HISTORY_BLOCK_HEADER_KEY_PREFIX => BlockHeaderKey::from_ssz_bytes(key) - .map(Self::BlockHeaderWithProof) + HISTORY_BLOCK_HEADER_BY_HASH_KEY_PREFIX => BlockHeaderByHashKey::from_ssz_bytes(key) + .map(Self::BlockHeaderByHash) .map_err(|e| ContentKeyError::from_decode_error(e, value)), HISTORY_BLOCK_BODY_KEY_PREFIX => BlockBodyKey::from_ssz_bytes(key) .map(Self::BlockBody) @@ -138,9 +150,11 @@ impl TryFrom for HistoryContentKey { HISTORY_BLOCK_RECEIPTS_KEY_PREFIX => BlockReceiptsKey::from_ssz_bytes(key) .map(Self::BlockReceipts) .map_err(|e| ContentKeyError::from_decode_error(e, value)), - HISTORY_BLOCK_EPOCH_ACCUMULATOR_KEY_PREFIX => EpochAccumulatorKey::from_ssz_bytes(key) - .map(Self::EpochAccumulator) - .map_err(|e| ContentKeyError::from_decode_error(e, value)), + HISTORY_BLOCK_HEADER_BY_NUMBER_KEY_PREFIX => { + BlockHeaderByNumberKey::from_ssz_bytes(key) + .map(Self::BlockHeaderByNumber) + .map_err(|e| ContentKeyError::from_decode_error(e, value)) + } _ => Err(ContentKeyError::from_decode_error( DecodeError::UnionSelectorInvalid(selector), value, @@ -152,8 +166,8 @@ impl TryFrom for HistoryContentKey { impl fmt::Display for HistoryContentKey { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let s = match self { - Self::BlockHeaderWithProof(header) => format!( - "BlockHeaderWithProof {{ block_hash: {} }}", + Self::BlockHeaderByHash(header) => format!( + "BlockHeaderByHash {{ block_hash: {} }}", hex_encode_compact(header.block_hash) ), Self::BlockBody(body) => format!( @@ -166,10 +180,10 @@ impl fmt::Display for HistoryContentKey { hex_encode_compact(receipts.block_hash) ) } - Self::EpochAccumulator(acc) => { + Self::BlockHeaderByNumber(header) => { format!( - "EpochAccumulator {{ epoch_hash: {} }}", - hex_encode_compact(acc.epoch_hash.0) + "BlockHeaderByNumber {{ block_number: {} }}", + header.block_number ) } }; @@ -189,8 +203,8 @@ impl OverlayContentKey for HistoryContentKey { let mut bytes: Vec = Vec::new(); match self { - HistoryContentKey::BlockHeaderWithProof(k) => { - bytes.push(HISTORY_BLOCK_HEADER_KEY_PREFIX); + HistoryContentKey::BlockHeaderByHash(k) => { + bytes.push(HISTORY_BLOCK_HEADER_BY_HASH_KEY_PREFIX); bytes.extend_from_slice(&k.block_hash); } HistoryContentKey::BlockBody(k) => { @@ -201,9 +215,9 @@ impl OverlayContentKey for HistoryContentKey { bytes.push(HISTORY_BLOCK_RECEIPTS_KEY_PREFIX); bytes.extend_from_slice(&k.block_hash); } - HistoryContentKey::EpochAccumulator(k) => { - bytes.push(HISTORY_BLOCK_EPOCH_ACCUMULATOR_KEY_PREFIX); - bytes.extend_from_slice(&k.epoch_hash.0); + HistoryContentKey::BlockHeaderByNumber(k) => { + bytes.push(HISTORY_BLOCK_HEADER_BY_NUMBER_KEY_PREFIX); + bytes.extend_from_slice(&k.block_number.as_ssz_bytes()); } } @@ -224,7 +238,7 @@ mod test { ]; #[test] - fn block_header() { + fn block_header_by_hash() { const KEY_STR: &str = "0x00d1c390624d3bd4e409a61a858e5dcc5517729a9170d014a6c96530d64dd8621d"; let expected_content_key = hex_decode(KEY_STR).unwrap(); @@ -234,17 +248,47 @@ mod test { 0x6e, 0x38, 0x95, 0xfe, ]; - let header = BlockHeaderKey { + let header = BlockHeaderByHashKey { block_hash: BLOCK_HASH, }; - let key = HistoryContentKey::BlockHeaderWithProof(header); + let key = HistoryContentKey::BlockHeaderByHash(header); + + assert_eq!(key.to_bytes(), expected_content_key); + assert_eq!(key.content_id(), expected_content_id); + assert_eq!( + key.to_string(), + "BlockHeaderByHash { block_hash: 0xd1c3..621d }" + ); + assert_eq!(key.to_hex(), KEY_STR); + } + + #[test] + fn block_header_by_number() { + const BLOCK_NUMBER: u64 = 12345678; + const KEY_STR: &str = "0x034e61bc0000000000"; + let expected_content_key = hex_decode(KEY_STR).unwrap(); + let expected_content_id: [u8; 32] = + hex_decode("0x2113990747a85ab39785d21342fa5db1f68acc0011605c0c73f68fc331643dcf") + .unwrap() + .try_into() + .unwrap(); + + let header = BlockHeaderByNumberKey { + block_number: BLOCK_NUMBER, + }; + + let key = HistoryContentKey::BlockHeaderByNumber(header); + + // round trip + let decoded = HistoryContentKey::try_from(key.to_bytes()).unwrap(); + assert_eq!(decoded, key); assert_eq!(key.to_bytes(), expected_content_key); assert_eq!(key.content_id(), expected_content_id); assert_eq!( key.to_string(), - "BlockHeaderWithProof { block_hash: 0xd1c3..621d }" + "BlockHeaderByNumber { block_number: 12345678 }" ); assert_eq!(key.to_hex(), KEY_STR); } @@ -266,6 +310,10 @@ mod test { let key = HistoryContentKey::BlockBody(body); + // round trip + let decoded = HistoryContentKey::try_from(key.to_bytes()).unwrap(); + assert_eq!(decoded, key); + assert_eq!(key.to_bytes(), expected_content_key); assert_eq!(key.content_id(), expected_content_id); assert_eq!(key.to_string(), "BlockBody { block_hash: 0xd1c3..621d }"); @@ -298,42 +346,28 @@ mod test { assert_eq!(key.to_hex(), KEY_STR); } - // test values sourced from: https://github.com/ethereum/portal-network-specs/blob/master/content-keys-test-vectors.md #[test] - fn epoch_accumulator_key() { - let epoch_hash = - hex_decode("0xe242814b90ed3950e13aac7e56ce116540c71b41d1516605aada26c6c07cc491") - .unwrap(); - const KEY_STR: &str = - "0x03e242814b90ed3950e13aac7e56ce116540c71b41d1516605aada26c6c07cc491"; - let expected_content_key = hex_decode(KEY_STR).unwrap(); - let expected_content_id = - hex_decode("0x9fb2175e76c6989e0fdac3ee10c40d2a81eb176af32e1c16193e3904fe56896e") - .unwrap(); - - let key = HistoryContentKey::EpochAccumulator(EpochAccumulatorKey { - epoch_hash: B256::from_slice(&epoch_hash), + fn ser_de_block_header_by_hash() { + let content_key_json = + "\"0x00d1c390624d3bd4e409a61a858e5dcc5517729a9170d014a6c96530d64dd8621d\""; + let expected_content_key = HistoryContentKey::BlockHeaderByHash(BlockHeaderByHashKey { + block_hash: BLOCK_HASH, }); - // round trip - let decoded = HistoryContentKey::try_from(key.to_bytes()).unwrap(); - assert_eq!(decoded, key); + let content_key: HistoryContentKey = serde_json::from_str(content_key_json).unwrap(); - assert_eq!(key.to_bytes(), expected_content_key); - assert_eq!(key.content_id(), expected_content_id.as_ref() as &[u8]); + assert_eq!(content_key, expected_content_key); assert_eq!( - key.to_string(), - "EpochAccumulator { epoch_hash: 0xe242..c491 }" + serde_json::to_string(&content_key).unwrap(), + content_key_json ); - assert_eq!(key.to_hex(), KEY_STR); } #[test] - fn ser_de_block_header() { - let content_key_json = - "\"0x00d1c390624d3bd4e409a61a858e5dcc5517729a9170d014a6c96530d64dd8621d\""; - let expected_content_key = HistoryContentKey::BlockHeaderWithProof(BlockHeaderKey { - block_hash: BLOCK_HASH, + fn ser_de_block_header_by_number() { + let content_key_json = "\"0x034e61bc0000000000\""; + let expected_content_key = HistoryContentKey::BlockHeaderByNumber(BlockHeaderByNumberKey { + block_number: 12345678, }); let content_key: HistoryContentKey = serde_json::from_str(content_key_json).unwrap(); @@ -389,24 +423,4 @@ mod test { content_key_json ); } - - #[test] - fn ser_de_epoch_accumulator() { - let content_key_json = - "\"0x03e242814b90ed3950e13aac7e56ce116540c71b41d1516605aada26c6c07cc491\""; - let epoch_hash = - hex_decode("0xe242814b90ed3950e13aac7e56ce116540c71b41d1516605aada26c6c07cc491") - .unwrap(); - let expected_content_key = HistoryContentKey::EpochAccumulator(EpochAccumulatorKey { - epoch_hash: B256::from_slice(&epoch_hash), - }); - - let content_key: HistoryContentKey = serde_json::from_str(content_key_json).unwrap(); - - assert_eq!(content_key, expected_content_key); - assert_eq!( - serde_json::to_string(&content_key).unwrap(), - content_key_json - ); - } } diff --git a/ethportal-api/src/types/content_value/history.rs b/ethportal-api/src/types/content_value/history.rs index 2fd53b61a..37651f327 100644 --- a/ethportal-api/src/types/content_value/history.rs +++ b/ethportal-api/src/types/content_value/history.rs @@ -1,8 +1,7 @@ use crate::{ types::{ - cli::HISTORY_NETWORK, - content_value::ContentValue, - execution::{accumulator::EpochAccumulator, header_with_proof::HeaderWithProof}, + cli::HISTORY_NETWORK, content_value::ContentValue, + execution::header_with_proof::HeaderWithProof, }, utils::bytes::hex_encode, BlockBody, ContentValueError, HistoryContentKey, RawContentValue, Receipts, @@ -16,7 +15,6 @@ pub enum HistoryContentValue { BlockHeaderWithProof(HeaderWithProof), BlockBody(BlockBody), Receipts(Receipts), - EpochAccumulator(EpochAccumulator), } impl ContentValue for HistoryContentValue { @@ -27,13 +25,12 @@ impl ContentValue for HistoryContentValue { Self::BlockHeaderWithProof(value) => value.as_ssz_bytes().into(), Self::BlockBody(value) => value.as_ssz_bytes().into(), Self::Receipts(value) => value.as_ssz_bytes().into(), - Self::EpochAccumulator(value) => value.as_ssz_bytes().into(), } } fn decode(key: &Self::TContentKey, buf: &[u8]) -> Result { match key { - HistoryContentKey::BlockHeaderWithProof(_) => { + HistoryContentKey::BlockHeaderByHash(_) | HistoryContentKey::BlockHeaderByNumber(_) => { if let Ok(value) = HeaderWithProof::from_ssz_bytes(buf) { return Ok(Self::BlockHeaderWithProof(value)); } @@ -48,11 +45,6 @@ impl ContentValue for HistoryContentValue { return Ok(Self::Receipts(value)); } } - HistoryContentKey::EpochAccumulator(_) => { - if let Ok(value) = EpochAccumulator::from_ssz_bytes(buf) { - return Ok(Self::EpochAccumulator(value)); - } - } } Err(ContentValueError::UnknownContent { @@ -71,9 +63,6 @@ mod test { use crate::{utils::bytes::hex_decode, HistoryContentValue}; use std::fs; - /// Max number of blocks / epoch = 2 ** 13 - pub const EPOCH_SIZE: usize = 8192; - #[test] fn header_with_proof_encode_decode_fluffy() { let file = @@ -95,15 +84,6 @@ mod test { } } - #[test] - fn ssz_serde_encode_decode_fluffy_epoch_accumulator() { - // values sourced from: https://github.com/status-im/portal-spec-tests - let epoch_acc_ssz = fs::read("../trin-validation/src/assets/fluffy/epoch_acc.bin").unwrap(); - let epoch_acc = EpochAccumulator::from_ssz_bytes(&epoch_acc_ssz).unwrap(); - assert_eq!(epoch_acc.len(), EPOCH_SIZE); - assert_eq!(epoch_acc.as_ssz_bytes(), epoch_acc_ssz); - } - #[test] fn content_value_deserialization_failure_displays_debuggable_data() { let key = HistoryContentKey::random().unwrap(); diff --git a/ethportal-peertest/src/scenarios/basic.rs b/ethportal-peertest/src/scenarios/basic.rs index 1222c8509..f33994cdb 100644 --- a/ethportal-peertest/src/scenarios/basic.rs +++ b/ethportal-peertest/src/scenarios/basic.rs @@ -1,8 +1,10 @@ -use crate::{utils::fixture_header_with_proof, Peertest, PeertestNode}; +use crate::{utils::fixture_header_by_hash, Peertest, PeertestNode}; use alloy_primitives::{B256, U256}; use ethportal_api::{ - types::{distance::Distance, portal_wire::ProtocolId}, - BeaconNetworkApiClient, BlockHeaderKey, ContentValue, Discv5ApiClient, HistoryContentKey, + types::{ + content_key::history::BlockHeaderByHashKey, distance::Distance, portal_wire::ProtocolId, + }, + BeaconNetworkApiClient, ContentValue, Discv5ApiClient, HistoryContentKey, HistoryNetworkApiClient, StateNetworkApiClient, Web3ApiClient, }; use jsonrpsee::async_client::Client; @@ -175,7 +177,7 @@ pub async fn test_find_nodes_zero_distance( pub async fn test_history_store(target: &Client) { info!("Testing portal_historyStore"); - let (content_key, content_value) = fixture_header_with_proof(); + let (content_key, content_value) = fixture_header_by_hash(); let result = HistoryNetworkApiClient::store(target, content_key, content_value.encode()) .await .unwrap(); @@ -184,7 +186,7 @@ pub async fn test_history_store(target: &Client) { pub async fn test_history_local_content_absent(target: &Client) { info!("Testing portal_historyLocalContent absent"); - let content_key = HistoryContentKey::BlockHeaderWithProof(BlockHeaderKey { + let content_key = HistoryContentKey::BlockHeaderByHash(BlockHeaderByHashKey { block_hash: B256::random().into(), }); let error = HistoryNetworkApiClient::local_content(target, content_key) diff --git a/ethportal-peertest/src/scenarios/bridge.rs b/ethportal-peertest/src/scenarios/bridge.rs index 0ddc8e2f4..e71292ea4 100644 --- a/ethportal-peertest/src/scenarios/bridge.rs +++ b/ethportal-peertest/src/scenarios/bridge.rs @@ -1,5 +1,5 @@ use crate::{ - utils::{fixture_header_with_proof_1000010, wait_for_beacon_content, wait_for_history_content}, + utils::{fixture_header_by_hash_1000010, wait_for_beacon_content, wait_for_history_content}, Peertest, }; use ethportal_api::{ @@ -38,7 +38,7 @@ pub async fn test_history_bridge(peertest: &Peertest, portal_client: &HttpClient DEFAULT_GOSSIP_LIMIT, ); bridge.launch().await; - let (content_key, content_value) = fixture_header_with_proof_1000010(); + let (content_key, content_value) = fixture_header_by_hash_1000010(); // Check if the stored content value in bootnode's DB matches the offered let received_content_value = wait_for_history_content(&peertest.bootnode.ipc_client, content_key).await; diff --git a/ethportal-peertest/src/scenarios/find.rs b/ethportal-peertest/src/scenarios/find.rs index b1ba1bd66..0b9bf1d0d 100644 --- a/ethportal-peertest/src/scenarios/find.rs +++ b/ethportal-peertest/src/scenarios/find.rs @@ -4,7 +4,7 @@ use discv5::enr::NodeId; use jsonrpsee::async_client::Client; use tracing::info; -use crate::{utils::fixture_header_with_proof, Peertest}; +use crate::{utils::fixture_header_by_hash, Peertest}; use ethportal_api::{ types::{portal::ContentInfo, portal_wire::ProtocolId}, utils::bytes::hex_decode, @@ -45,7 +45,7 @@ pub async fn test_recursive_find_nodes_random(protocol: ProtocolId, peertest: &P pub async fn test_find_content_return_enr(target: &Client, peertest: &Peertest) { info!("Testing find content returns enrs properly"); - let (content_key, _) = fixture_header_with_proof(); + let (content_key, _) = fixture_header_by_hash(); // check if we can fetch data from routing table match HistoryNetworkApiClient::get_enr( @@ -83,7 +83,7 @@ pub async fn test_find_content_return_enr(target: &Client, peertest: &Peertest) pub async fn test_trace_recursive_find_content(peertest: &Peertest) { info!("Testing trace recursive find content"); - let (content_key, content_value) = fixture_header_with_proof(); + let (content_key, content_value) = fixture_header_by_hash(); let store_result = HistoryNetworkApiClient::store( &peertest.bootnode.ipc_client, content_key.clone(), @@ -138,7 +138,7 @@ pub async fn test_trace_recursive_find_content(peertest: &Peertest) { // This test ensures that when content is not found the correct response is returned. pub async fn test_trace_recursive_find_content_for_absent_content(peertest: &Peertest) { let client = &peertest.nodes[0].ipc_client; - let (content_key, _) = fixture_header_with_proof(); + let (content_key, _) = fixture_header_by_hash(); let error = HistoryNetworkApiClient::trace_recursive_find_content(client, content_key) .await @@ -151,7 +151,7 @@ pub async fn test_trace_recursive_find_content_for_absent_content(peertest: &Pee } pub async fn test_trace_recursive_find_content_local_db(peertest: &Peertest) { - let (content_key, content_value) = fixture_header_with_proof(); + let (content_key, content_value) = fixture_header_by_hash(); let store_result = HistoryNetworkApiClient::store( &peertest.bootnode.ipc_client, diff --git a/ethportal-peertest/src/scenarios/gossip.rs b/ethportal-peertest/src/scenarios/gossip.rs index e7b5e7a05..7cd11b747 100644 --- a/ethportal-peertest/src/scenarios/gossip.rs +++ b/ethportal-peertest/src/scenarios/gossip.rs @@ -4,8 +4,9 @@ use tracing::info; use crate::{ utils::{ - fixture_epoch_acc_1, fixture_epoch_acc_2, fixture_header_with_proof, - wait_for_history_content, + fixture_block_body_15040708, fixture_header_by_hash, + fixture_header_by_hash_with_proof_15040641, fixture_header_by_hash_with_proof_15040708, + fixture_receipts_15040641, wait_for_history_content, }, Peertest, }; @@ -17,7 +18,7 @@ pub async fn test_gossip_with_trace(peertest: &Peertest, target: &Client) { info!("Testing Gossip with tracing"); let _ = target.ping(peertest.bootnode.enr.clone()).await.unwrap(); - let (content_key, content_value) = fixture_header_with_proof(); + let (content_key, content_value) = fixture_header_by_hash(); let result = target .trace_gossip(content_key.clone(), content_value.encode()) .await @@ -90,37 +91,71 @@ pub async fn test_gossip_dropped_with_offer(peertest: &Peertest, target: &Client .unwrap(); let fresh_enr = fresh_target.node_info().await.unwrap().enr; - // Store accumulator_1 locally in client that is not connected to the network - let (acc_key_1, acc_value_1) = fixture_epoch_acc_1(); - let store_result = - HistoryNetworkApiClient::store(&fresh_target, acc_key_1.clone(), acc_value_1.encode()) - .await - .unwrap(); + // Store receipt_1 locally in client that is not connected to the network + let (header_key_1, header_value_1) = fixture_header_by_hash_with_proof_15040641(); + let (receipts_key_1, receipts_value_1) = fixture_receipts_15040641(); + let store_result = HistoryNetworkApiClient::store( + &fresh_target, + header_key_1.clone(), + header_value_1.encode(), + ) + .await + .unwrap(); + assert!(store_result); + let store_result = HistoryNetworkApiClient::store( + &fresh_target, + receipts_key_1.clone(), + receipts_value_1.encode(), + ) + .await + .unwrap(); assert!(store_result); - // check that fresh target has accumulator_1 + // check that fresh target has receipt_1 assert!( - HistoryNetworkApiClient::local_content(&fresh_target, acc_key_1.clone()) + HistoryNetworkApiClient::local_content(&fresh_target, header_key_1.clone()) .await .is_ok() ); - // check that target does not have accumulator_1 assert!( - HistoryNetworkApiClient::local_content(target, acc_key_1.clone()) + HistoryNetworkApiClient::local_content(&fresh_target, receipts_key_1.clone()) + .await + .is_ok() + ); + // check that target does not have receipt_1 + assert!( + HistoryNetworkApiClient::local_content(target, header_key_1.clone()) + .await + .is_err() + ); + assert!( + HistoryNetworkApiClient::local_content(target, receipts_key_1.clone()) .await .is_err() ); - // check that peertest node does not have accumulator_1 + // check that peertest node does not have receipt_1 + assert!(HistoryNetworkApiClient::local_content( + &peertest.nodes[0].ipc_client, + header_key_1.clone() + ) + .await + .is_err()); assert!(HistoryNetworkApiClient::local_content( &peertest.nodes[0].ipc_client, - acc_key_1.clone() + receipts_key_1.clone() ) .await .is_err()); - // check that peertest bootnode does not have accumulator_1 + // check that peertest bootnode does not have receipt_1 assert!(HistoryNetworkApiClient::local_content( &peertest.bootnode.ipc_client, - acc_key_1.clone() + header_key_1.clone() + ) + .await + .is_err()); + assert!(HistoryNetworkApiClient::local_content( + &peertest.bootnode.ipc_client, + receipts_key_1.clone() ) .await .is_err()); @@ -139,50 +174,62 @@ pub async fn test_gossip_dropped_with_offer(peertest: &Peertest, target: &Client .await .unwrap(); - // offer accumulator_2 from target to fresh target + // offer body_2 with receipt from target to fresh target // doesn't store the content locally in target - let (acc_key_2, acc_value_2) = fixture_epoch_acc_2(); + let (header_key_2, header_value_2) = fixture_header_by_hash_with_proof_15040708(); + let (body_key_2, body_value_2) = fixture_block_body_15040708(); + target + .offer( + fresh_enr.clone(), + header_key_2.clone(), + header_value_2.encode(), + ) + .await + .unwrap(); target - .offer(fresh_enr, acc_key_2.clone(), acc_value_2.encode()) + .offer(fresh_enr.clone(), body_key_2.clone(), body_value_2.encode()) .await .unwrap(); - // check that the fresh target has stored accumulator_2 + // check that the fresh target has stored block_2 assert_eq!( - acc_value_2, - wait_for_history_content(&fresh_target, acc_key_2.clone()).await + body_value_2, + wait_for_history_content(&fresh_target, body_key_2.clone()).await ); - // check that the target has both accumulators + // check that the target has block_1 and block_2 assert_eq!( - acc_value_1, - wait_for_history_content(target, acc_key_1.clone()).await + body_value_2, + wait_for_history_content(target, body_key_2.clone()).await ); assert_eq!( - acc_value_2, - wait_for_history_content(target, acc_key_2.clone()).await + receipts_value_1, + wait_for_history_content(target, receipts_key_1.clone()).await ); - // check that the peertest bootnode has both accumulators + + // check that the peertest bootnode has block_1 and block_2 assert_eq!( - acc_value_1, - wait_for_history_content(&peertest.bootnode.ipc_client, acc_key_1.clone()).await + body_value_2, + wait_for_history_content(&peertest.bootnode.ipc_client, body_key_2.clone()).await ); assert_eq!( - acc_value_2, - wait_for_history_content(&peertest.bootnode.ipc_client, acc_key_2.clone()).await + receipts_value_1, + wait_for_history_content(&peertest.bootnode.ipc_client, receipts_key_1.clone()).await ); - // check that the peertest node has both accumulators + + // check that the peertest node has block_1 and block_2 assert_eq!( - acc_value_1, - wait_for_history_content(&peertest.nodes[0].ipc_client, acc_key_1.clone()).await + body_value_2, + wait_for_history_content(&peertest.nodes[0].ipc_client, body_key_2.clone()).await ); assert_eq!( - acc_value_2, - wait_for_history_content(&peertest.nodes[0].ipc_client, acc_key_2.clone()).await + receipts_value_1, + wait_for_history_content(&peertest.nodes[0].ipc_client, receipts_key_1.clone()).await ); + // this must be at end of test, to guarantee that all propagation has concluded - // check that the fresh target has dropped accumulator_1 + // check that the fresh target has dropped block_receipt_1 assert!( - HistoryNetworkApiClient::local_content(&fresh_target, acc_key_1.clone()) + HistoryNetworkApiClient::local_content(&fresh_target, receipts_key_1.clone()) .await .is_err() ); @@ -202,18 +249,33 @@ pub async fn test_gossip_dropped_with_find_content(peertest: &Peertest, target: .await .unwrap(); - // Store accumulator_1 locally in client that is not connected to the network - let (acc_key_1, acc_value_1) = fixture_epoch_acc_1(); + // Store receipts_1 locally in client, without validation, that is not connected to the network + let (receipts_key_1, receipts_value_1) = fixture_receipts_15040641(); + let store_result = HistoryNetworkApiClient::store( + &fresh_target, + receipts_key_1.clone(), + receipts_value_1.encode(), + ) + .await + .unwrap(); + assert!(store_result); + + // Store header_1, header_2, body_2 locally in target + let (header_key_1, header_value_1) = fixture_header_by_hash_with_proof_15040641(); + let (header_key_2, header_value_2) = fixture_header_by_hash_with_proof_15040708(); + let (body_key_2, body_value_2) = fixture_block_body_15040708(); let store_result = - HistoryNetworkApiClient::store(&fresh_target, acc_key_1.clone(), acc_value_1.encode()) + HistoryNetworkApiClient::store(target, header_key_1.clone(), header_value_1.encode()) .await .unwrap(); assert!(store_result); - - // Store accumulator_2 locally in target - let (acc_key_2, acc_value_2) = fixture_epoch_acc_2(); let store_result = - HistoryNetworkApiClient::store(target, acc_key_2.clone(), acc_value_2.encode()) + HistoryNetworkApiClient::store(target, header_key_2.clone(), header_value_2.encode()) + .await + .unwrap(); + assert!(store_result); + let store_result = + HistoryNetworkApiClient::store(target, body_key_2.clone(), body_value_2.encode()) .await .unwrap(); assert!(store_result); @@ -235,46 +297,48 @@ pub async fn test_gossip_dropped_with_find_content(peertest: &Peertest, target: // send find_content request from fresh target to target let _result = fresh_target //.find_content(target.node_info().await.unwrap().enr, acc_key_2.clone()) - .recursive_find_content(acc_key_2.clone()) + .recursive_find_content(body_key_2.clone()) .await .unwrap(); - // check that the fresh target has stored accumulator_2 + // check that the fresh target has stored body_2 stored assert_eq!( - acc_value_2, - wait_for_history_content(&fresh_target, acc_key_2.clone()).await + body_value_2, + wait_for_history_content(&fresh_target, body_key_2.clone()).await ); - // check that the target has both accumulators + // check that the target has block_1 and block_2 assert_eq!( - acc_value_1, - wait_for_history_content(target, acc_key_1.clone()).await + body_value_2, + wait_for_history_content(target, body_key_2.clone()).await ); assert_eq!( - acc_value_2, - wait_for_history_content(target, acc_key_2.clone()).await + receipts_value_1, + wait_for_history_content(target, receipts_key_1.clone()).await ); - // check that the peertest bootnode has both accumulators + // check that the peertest bootnode has block_1 and block_2 assert_eq!( - acc_value_1, - wait_for_history_content(&peertest.bootnode.ipc_client, acc_key_1.clone()).await + body_value_2, + wait_for_history_content(&peertest.bootnode.ipc_client, body_key_2.clone()).await ); assert_eq!( - acc_value_2, - wait_for_history_content(&peertest.bootnode.ipc_client, acc_key_2.clone()).await + receipts_value_1, + wait_for_history_content(&peertest.bootnode.ipc_client, receipts_key_1.clone()).await ); - // check that the peertest node has both accumulators + + // check that the peertest node has block_1 and block_2 assert_eq!( - acc_value_1, - wait_for_history_content(&peertest.nodes[0].ipc_client, acc_key_1.clone()).await + body_value_2, + wait_for_history_content(&peertest.nodes[0].ipc_client, body_key_2.clone()).await ); assert_eq!( - acc_value_2, - wait_for_history_content(&peertest.nodes[0].ipc_client, acc_key_2.clone()).await + receipts_value_1, + wait_for_history_content(&peertest.nodes[0].ipc_client, receipts_key_1.clone()).await ); + // this must be at end of test, to guarantee that all propagation has concluded - // check that the fresh target has dropped accumulator_1 + // check that the fresh target has dropped block_receipt_1 assert!( - HistoryNetworkApiClient::local_content(&fresh_target, acc_key_1.clone()) + HistoryNetworkApiClient::local_content(&fresh_target, receipts_key_1.clone()) .await .is_err() ); diff --git a/ethportal-peertest/src/scenarios/offer_accept.rs b/ethportal-peertest/src/scenarios/offer_accept.rs index ca5317e06..affeaeaea 100644 --- a/ethportal-peertest/src/scenarios/offer_accept.rs +++ b/ethportal-peertest/src/scenarios/offer_accept.rs @@ -4,8 +4,10 @@ use tracing::info; use crate::{ utils::{ - fixture_block_body, fixture_epoch_acc_1, fixture_epoch_acc_2, fixture_header_with_proof, - fixture_receipts, wait_for_history_content, + fixture_block_body, fixture_block_body_15040641, fixture_block_body_15040708, + fixture_header_by_hash, fixture_header_by_hash_with_proof_15040641, + fixture_header_by_hash_with_proof_15040708, fixture_receipts_15040641, + fixture_receipts_15040708, wait_for_history_content, }, Peertest, }; @@ -17,7 +19,7 @@ use ethportal_api::{ pub async fn test_unpopulated_offer(peertest: &Peertest, target: &Client) { info!("Testing Unpopulated OFFER/ACCEPT flow"); - let (content_key, content_value) = fixture_header_with_proof(); + let (content_key, content_value) = fixture_header_by_hash(); // Store content to offer in the testnode db let store_result = target .store(content_key.clone(), content_value.encode()) @@ -51,7 +53,7 @@ pub async fn test_unpopulated_offer_fails_with_missing_content( ) { info!("Testing Unpopulated OFFER/ACCEPT flow with missing content"); - let (content_key, _content_value) = fixture_header_with_proof(); + let (content_key, _content_value) = fixture_header_by_hash(); // validate that wire offer fails if content not available locally match target @@ -73,7 +75,7 @@ pub async fn test_unpopulated_offer_fails_with_missing_content( pub async fn test_populated_offer(peertest: &Peertest, target: &Client) { info!("Testing Populated Offer/ACCEPT flow"); - let (content_key, content_value) = fixture_header_with_proof(); + let (content_key, content_value) = fixture_header_by_hash(); let result = target .offer( Enr::from_str(&peertest.bootnode.enr.to_base64()).unwrap(), @@ -97,7 +99,7 @@ pub async fn test_populated_offer_with_trace(peertest: &Peertest, target: &Clien info!("Testing Populated Offer/ACCEPT flow with trace"); // store header for validation - let (content_key, content_value) = fixture_header_with_proof(); + let (content_key, content_value) = fixture_header_by_hash(); let store_result = peertest .bootnode .ipc_client @@ -131,7 +133,7 @@ pub async fn test_offer_propagates_gossip(peertest: &Peertest, target: &Client) info!("Testing populated offer propagates gossip"); // get content values to gossip - let (content_key, content_value) = fixture_header_with_proof(); + let (content_key, content_value) = fixture_header_by_hash(); // use populated offer which means content will *not* be stored in the target's local db target .offer( @@ -159,35 +161,42 @@ pub async fn test_offer_propagates_gossip(peertest: &Peertest, target: &Client) pub async fn test_offer_propagates_gossip_with_large_content(peertest: &Peertest, target: &Client) { info!("Testing populated offer propagates gossips single large content"); - // 512kb epoch accumulator - let (content_key, content_value) = fixture_epoch_acc_1(); + + let (header_key, header_value) = fixture_header_by_hash_with_proof_15040708(); + // 763kb block body + let (body_key, body_value) = fixture_block_body_15040708(); // Store content to offer in the testnode db let store_result = target - .store(content_key.clone(), content_value.encode()) + .store(header_key, header_value.encode()) + .await + .unwrap(); + assert!(store_result); + let store_result = target + .store(body_key.clone(), body_value.encode()) .await .unwrap(); assert!(store_result); target .wire_offer( peertest.bootnode.ipc_client.node_info().await.unwrap().enr, - vec![content_key.clone()], + vec![body_key.clone()], ) .await .unwrap(); // validate that every node in the network now has a local copy of the accumulator assert_eq!( - content_value, - wait_for_history_content(target, content_key.clone()).await, + body_value, + wait_for_history_content(target, body_key.clone()).await, ); assert_eq!( - content_value, - wait_for_history_content(&peertest.nodes[0].ipc_client, content_key.clone()).await, + body_value, + wait_for_history_content(&peertest.nodes[0].ipc_client, body_key.clone()).await, ); assert_eq!( - content_value, - wait_for_history_content(&peertest.bootnode.ipc_client, content_key).await, + body_value, + wait_for_history_content(&peertest.bootnode.ipc_client, body_key).await, ); } @@ -198,10 +207,9 @@ pub async fn test_offer_propagates_gossip_multiple_content_values( ) { info!("Testing populated offer propagates gossips multiple content values simultaneously"); // get content values to gossip - let (header_key, header_value) = fixture_header_with_proof(); - let (body_key, body_value) = fixture_block_body(); - let (receipts_key, receipts_value) = fixture_receipts(); - let (acc_key_1, acc_value_1) = fixture_epoch_acc_1(); + let (header_key, header_value) = fixture_header_by_hash_with_proof_15040708(); + let (body_key, body_value) = fixture_block_body_15040708(); + let (receipts_key, receipts_value) = fixture_receipts_15040708(); // offer header content for validation later target @@ -238,17 +246,12 @@ pub async fn test_offer_propagates_gossip_multiple_content_values( .await .unwrap(); assert!(store_result); - let store_result = target - .store(acc_key_1.clone(), acc_value_1.encode()) - .await - .unwrap(); - assert!(store_result); // here everythings stored in target target .wire_offer( peertest.bootnode.ipc_client.node_info().await.unwrap().enr, - vec![body_key.clone(), receipts_key.clone(), acc_key_1.clone()], + vec![body_key.clone(), receipts_key.clone()], ) .await .unwrap(); @@ -279,20 +282,6 @@ pub async fn test_offer_propagates_gossip_multiple_content_values( receipts_value, wait_for_history_content(&peertest.nodes[0].ipc_client, receipts_key.clone()).await, ); - - // check that acc_content_1 is available - assert_eq!( - acc_value_1, - wait_for_history_content(target, acc_key_1.clone()).await, - ); - assert_eq!( - acc_value_1, - wait_for_history_content(&peertest.bootnode.ipc_client, acc_key_1.clone()).await, - ); - assert_eq!( - acc_value_1, - wait_for_history_content(&peertest.nodes[0].ipc_client, acc_key_1).await, - ); } // multiple content values, > 1mb payload @@ -303,53 +292,114 @@ pub async fn test_offer_propagates_gossip_multiple_large_content_values( info!("Testing populated offer propagates gossips multiple large content simultaneously"); // get content values to gossip - let (acc_key_1, acc_value_1) = fixture_epoch_acc_1(); - let (acc_key_2, acc_value_2) = fixture_epoch_acc_2(); + let (header_key_1, header_value_1) = fixture_header_by_hash_with_proof_15040708(); + let (body_key_1, body_value_1) = fixture_block_body_15040708(); + let (receipts_key_1, receipts_value_1) = fixture_receipts_15040708(); + + // Store content to offer in the testnode db + let store_result = target + .store(header_key_1.clone(), header_value_1.encode()) + .await + .unwrap(); + assert!(store_result); + let store_result = target + .store(body_key_1.clone(), body_value_1.encode()) + .await + .unwrap(); + assert!(store_result); + let store_result = target + .store(receipts_key_1.clone(), receipts_value_1.encode()) + .await + .unwrap(); + assert!(store_result); + + let (header_key_2, header_value_2) = fixture_header_by_hash_with_proof_15040641(); + let (body_key_2, body_value_2) = fixture_block_body_15040641(); + let (receipts_key_2, receipts_value_2) = fixture_receipts_15040641(); // Store content to offer in the testnode db let store_result = target - .store(acc_key_1.clone(), acc_value_1.encode()) + .store(header_key_2.clone(), header_value_2.encode()) + .await + .unwrap(); + assert!(store_result); + let store_result = target + .store(body_key_2.clone(), body_value_2.encode()) .await .unwrap(); assert!(store_result); let store_result = target - .store(acc_key_2.clone(), acc_value_2.encode()) + .store(receipts_key_2.clone(), receipts_value_2.encode()) .await .unwrap(); assert!(store_result); + target .wire_offer( peertest.bootnode.ipc_client.node_info().await.unwrap().enr, - vec![acc_key_1.clone(), acc_key_2.clone()], + vec![ + body_key_1.clone(), + receipts_key_1.clone(), + body_key_2.clone(), + receipts_key_2.clone(), + ], ) .await .unwrap(); - // check that acc_content_1 is available + // check that body_1 is available + assert_eq!( + body_value_1, + wait_for_history_content(target, body_key_1.clone()).await, + ); + assert_eq!( + body_value_1, + wait_for_history_content(&peertest.bootnode.ipc_client, body_key_1.clone()).await, + ); + assert_eq!( + body_value_1, + wait_for_history_content(&peertest.nodes[0].ipc_client, body_key_1).await, + ); + + // check that receipts_1 is available + assert_eq!( + receipts_value_1, + wait_for_history_content(target, receipts_key_1.clone()).await, + ); + assert_eq!( + receipts_value_1, + wait_for_history_content(&peertest.bootnode.ipc_client, receipts_key_1.clone()).await, + ); + assert_eq!( + receipts_value_1, + wait_for_history_content(&peertest.nodes[0].ipc_client, receipts_key_1).await, + ); + + // check that body_2 is available assert_eq!( - acc_value_1, - wait_for_history_content(target, acc_key_1.clone()).await, + body_value_2, + wait_for_history_content(target, body_key_2.clone()).await, ); assert_eq!( - acc_value_1, - wait_for_history_content(&peertest.bootnode.ipc_client, acc_key_1.clone()).await, + body_value_2, + wait_for_history_content(&peertest.bootnode.ipc_client, body_key_2.clone()).await, ); assert_eq!( - acc_value_1, - wait_for_history_content(&peertest.nodes[0].ipc_client, acc_key_1).await, + body_value_2, + wait_for_history_content(&peertest.nodes[0].ipc_client, body_key_2).await, ); - // check that acc_content_2 is available + // check that receipts_2 is available assert_eq!( - acc_value_2, - wait_for_history_content(target, acc_key_2.clone()).await, + receipts_value_2, + wait_for_history_content(target, receipts_key_2.clone()).await, ); assert_eq!( - acc_value_2, - wait_for_history_content(&peertest.bootnode.ipc_client, acc_key_2.clone()).await, + receipts_value_2, + wait_for_history_content(&peertest.bootnode.ipc_client, receipts_key_2.clone()).await, ); assert_eq!( - acc_value_2, - wait_for_history_content(&peertest.nodes[0].ipc_client, acc_key_2).await, + receipts_value_2, + wait_for_history_content(&peertest.nodes[0].ipc_client, receipts_key_2).await, ); } diff --git a/ethportal-peertest/src/scenarios/paginate.rs b/ethportal-peertest/src/scenarios/paginate.rs index ac0e0bae3..af4ad2962 100644 --- a/ethportal-peertest/src/scenarios/paginate.rs +++ b/ethportal-peertest/src/scenarios/paginate.rs @@ -1,6 +1,9 @@ -use ethportal_api::{BlockHeaderKey, ContentValue, HistoryContentKey, HistoryNetworkApiClient}; +use ethportal_api::{ + types::content_key::history::BlockHeaderByHashKey, ContentValue, HistoryContentKey, + HistoryNetworkApiClient, +}; -use crate::{utils::fixture_header_with_proof, Peertest}; +use crate::{utils::fixture_header_by_hash, Peertest}; pub async fn test_paginate_local_storage(peertest: &Peertest) { let ipc_client = &peertest.bootnode.ipc_client; @@ -11,13 +14,15 @@ pub async fn test_paginate_local_storage(peertest: &Peertest) { let mut content_keys: Vec = (0..20_u8) .map(|_| { - serde_json::to_string(&HistoryContentKey::BlockHeaderWithProof(BlockHeaderKey { - block_hash: rand::random(), - })) + serde_json::to_string(&HistoryContentKey::BlockHeaderByHash( + BlockHeaderByHashKey { + block_hash: rand::random(), + }, + )) .unwrap() }) .collect(); - let (_, content_value) = fixture_header_with_proof(); + let (_, content_value) = fixture_header_by_hash(); for content_key in content_keys.clone().into_iter() { // Store content to offer in the testnode db let store_result = ipc_client diff --git a/ethportal-peertest/src/scenarios/state.rs b/ethportal-peertest/src/scenarios/state.rs index e66aeb73b..0059bb27f 100644 --- a/ethportal-peertest/src/scenarios/state.rs +++ b/ethportal-peertest/src/scenarios/state.rs @@ -46,7 +46,7 @@ pub async fn test_state_gossip_contract_bytecode(peertest: &Peertest, target: &C async fn test_state_offer(fixture: &StateFixture, target: &Client, peer: &PeertestNode) { // Make sure that peer has block header let history_content_key = - HistoryContentKey::BlockHeaderWithProof(fixture.block_header.hash().into()); + HistoryContentKey::BlockHeaderByHash(fixture.block_header.hash().into()); let history_content_value = HistoryContentValue::BlockHeaderWithProof(HeaderWithProof { header: fixture.block_header.clone(), proof: BlockHeaderProof::None(SszNone::default()), diff --git a/ethportal-peertest/src/scenarios/utp.rs b/ethportal-peertest/src/scenarios/utp.rs index 395ee31ce..b6fe36261 100644 --- a/ethportal-peertest/src/scenarios/utp.rs +++ b/ethportal-peertest/src/scenarios/utp.rs @@ -1,5 +1,5 @@ use crate::{ - utils::{fixture_block_body, fixture_header_with_proof}, + utils::{fixture_block_body, fixture_header_by_hash}, Peertest, }; use discv5::enr::NodeId; @@ -13,7 +13,7 @@ pub async fn test_recursive_utp(peertest: &Peertest) { info!("Test recursive utp"); // store header_with_proof to validate block body - let (content_key, content_value) = fixture_header_with_proof(); + let (content_key, content_value) = fixture_header_by_hash(); let store_result = peertest.nodes[0] .ipc_client .store(content_key.clone(), content_value.encode()) @@ -54,7 +54,7 @@ pub async fn test_trace_recursive_utp(peertest: &Peertest) { info!("Test trace recursive utp"); // store header_with_proof to validate block body - let (content_key, content_value) = fixture_header_with_proof(); + let (content_key, content_value) = fixture_header_by_hash(); let store_result = peertest.nodes[0] .ipc_client .store(content_key.clone(), content_value.encode()) diff --git a/ethportal-peertest/src/scenarios/validation.rs b/ethportal-peertest/src/scenarios/validation.rs index 8b57fdf0c..3630d37dc 100644 --- a/ethportal-peertest/src/scenarios/validation.rs +++ b/ethportal-peertest/src/scenarios/validation.rs @@ -1,21 +1,59 @@ use crate::{ - utils::{fixture_block_body, fixture_header_with_proof, fixture_receipts}, + utils::{ + fixture_block_body, fixture_header_by_hash, fixture_header_by_number, fixture_receipts, + }, Peertest, }; use alloy_primitives::B256; use ethportal_api::{ jsonrpsee::async_client::Client, - types::{content_key::history::BlockHeaderKey, enr::Enr, portal::ContentInfo}, + types::{content_key::history::BlockHeaderByHashKey, enr::Enr, portal::ContentInfo}, ContentValue, HistoryContentKey, HistoryNetworkApiClient, }; use std::str::FromStr; use tracing::info; -pub async fn test_validate_pre_merge_header_with_proof(peertest: &Peertest, target: &Client) { - info!("Test validating a pre-merge header-with-proof"); +pub async fn test_validate_pre_merge_header_by_hash(peertest: &Peertest, target: &Client) { + info!("Test validating a pre-merge header by block hash"); - // store header_with_proof - let (content_key, content_value) = fixture_header_with_proof(); + // store header_by_hash + let (content_key, content_value) = fixture_header_by_hash(); + + let store_result = peertest + .bootnode + .ipc_client + .store(content_key.clone(), content_value.encode()) + .await + .unwrap(); + + assert!(store_result); + + // calling find_content since it only returns the found data if validation was successful + let result = target + .find_content( + Enr::from_str(&peertest.bootnode.enr.to_base64()).unwrap(), + content_key.clone(), + ) + .await + .unwrap(); + + match result { + ContentInfo::Content { + content, + utp_transfer, + } => { + assert_eq!(content, content_value.encode()); + assert!(!utp_transfer); + } + _ => panic!("Content values should match"), + } +} + +pub async fn test_validate_pre_merge_header_by_number(peertest: &Peertest, target: &Client) { + info!("Test validating a pre-merge header by block number"); + + // store header_by_number + let (content_key, content_value) = fixture_header_by_number(); let store_result = peertest .bootnode @@ -51,8 +89,8 @@ pub async fn test_invalidate_header_by_hash(peertest: &Peertest, target: &Client info!("Test invalidating a pre-merge header-with-proof by header hash"); // store header_with_proof - doesn't perform validation - let (_, content_value) = fixture_header_with_proof(); - let invalid_content_key = HistoryContentKey::BlockHeaderWithProof(BlockHeaderKey { + let (_, content_value) = fixture_header_by_hash(); + let invalid_content_key = HistoryContentKey::BlockHeaderByHash(BlockHeaderByHashKey { block_hash: B256::random().into(), }); @@ -81,7 +119,7 @@ pub async fn test_invalidate_header_by_hash(peertest: &Peertest, target: &Client pub async fn test_validate_pre_merge_block_body(peertest: &Peertest, target: &Client) { info!("Test validating a pre-merge block body"); // store header_with_proof to validate block body - let (content_key, content_value) = fixture_header_with_proof(); + let (content_key, content_value) = fixture_header_by_hash(); let store_result = target .store(content_key, content_value.encode()) .await @@ -124,7 +162,7 @@ pub async fn test_validate_pre_merge_block_body(peertest: &Peertest, target: &Cl pub async fn test_validate_pre_merge_receipts(peertest: &Peertest, target: &Client) { info!("Test validating pre-merge receipts"); // store header_with_proof to validate block body - let (content_key, content_value) = fixture_header_with_proof(); + let (content_key, content_value) = fixture_header_by_hash(); let store_result = target .store(content_key, content_value.encode()) .await diff --git a/ethportal-peertest/src/utils.rs b/ethportal-peertest/src/utils.rs index 80f95e8d9..7caeb6449 100644 --- a/ethportal-peertest/src/utils.rs +++ b/ethportal-peertest/src/utils.rs @@ -1,9 +1,13 @@ -use std::fs; +use std::{ + fmt::{self, Display, Formatter}, + fs, +}; use alloy_primitives::Bytes; use alloy_rlp::Decodable; use futures::{Future, TryFutureExt}; use serde::Deserializer; +use ssz::Decode; use tracing::error; use anyhow::Result; @@ -11,9 +15,13 @@ use serde_yaml::Value; use ureq::serde::Deserialize; use ethportal_api::{ - BeaconContentKey, BeaconContentValue, BeaconNetworkApiClient, ContentValue, Header, - HistoryContentKey, HistoryContentValue, HistoryNetworkApiClient, RawContentValue, - StateContentKey, StateContentValue, StateNetworkApiClient, + types::{ + content_key::history::{BlockHeaderByHashKey, BlockHeaderByNumberKey}, + execution::header_with_proof::HeaderWithProof, + }, + BeaconContentKey, BeaconContentValue, BeaconNetworkApiClient, BlockBodyKey, BlockReceiptsKey, + ContentValue, Header, HistoryContentKey, HistoryContentValue, HistoryNetworkApiClient, + RawContentValue, StateContentKey, StateContentValue, StateNetworkApiClient, }; pub async fn wait_for_successful_result(f: impl Fn() -> Fut) -> O @@ -110,16 +118,32 @@ fn read_fixture(file_name: &str) -> (HistoryContentKey, HistoryContentValue) { /// History HeaderWithProof content key & value /// Block #1000010 -pub fn fixture_header_with_proof_1000010() -> (HistoryContentKey, HistoryContentValue) { +pub fn fixture_header_by_hash_1000010() -> (HistoryContentKey, HistoryContentValue) { read_fixture("portal-spec-tests/tests/mainnet/history/headers_with_proof/1000010.yaml") } -/// History HeaderWithProof content key & value +/// History HeaderByHash content key & value /// Block #14764013 (pre-merge) -pub fn fixture_header_with_proof() -> (HistoryContentKey, HistoryContentValue) { +pub fn fixture_header_by_hash() -> (HistoryContentKey, HistoryContentValue) { read_fixture("portal-spec-tests/tests/mainnet/history/headers_with_proof/14764013.yaml") } +/// History HeaderByNumber content key & value +/// Block #14764013 (pre-merge) +pub fn fixture_header_by_number() -> (HistoryContentKey, HistoryContentValue) { + let (_, content_value) = + read_fixture("portal-spec-tests/tests/mainnet/history/headers_with_proof/14764013.yaml"); + + // Create a content key from the block number + let HistoryContentValue::BlockHeaderWithProof(header_with_proof) = content_value.clone() else { + panic!("Expected HistoryContentValue::BlockHeaderWithProof") + }; + let content_key = HistoryContentKey::BlockHeaderByNumber(BlockHeaderByNumberKey { + block_number: header_with_proof.header.number, + }); + (content_key, content_value) +} + /// History BlockBody content key & value /// Block #14764013 (pre-merge) pub fn fixture_block_body() -> (HistoryContentKey, HistoryContentValue) { @@ -132,29 +156,100 @@ pub fn fixture_receipts() -> (HistoryContentKey, HistoryContentValue) { read_fixture("portal-spec-tests/tests/mainnet/history/receipts/14764013.yaml") } -/// Epoch Accumulator #1659 -/// Content Key: 0x030013c08b64bf7e3afab80ad4f8ea9423f1a7d8b31a149fc3b832d7980719c60c -/// Content ID: 0x61f6fd26ed4fb88cfae02ad691e4f41e0053e0305cb62f7cdfde5a7967ffbe65 -pub fn fixture_epoch_acc_1() -> (HistoryContentKey, HistoryContentValue) { - read_epoch_acc("0013c08b64bf7e3afab80ad4f8ea9423f1a7d8b31a149fc3b832d7980719c60c") +enum DependentType { + BlockBody, + Receipts, +} + +impl Display for DependentType { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + match self { + DependentType::BlockBody => write!(f, "body"), + DependentType::Receipts => write!(f, "receipts"), + } + } } -/// Epoch Accumulator #434 -/// Content Key: 0x03ed8823c84177d8ffabf104566f313a2b2a43d05304ba6c74c2f5555bae0ef329 -/// Content ID: 0x29e3c0966a85ee262ef4afeccd89721fda0962ae563a3818e81798fe28bdb37e -pub fn fixture_epoch_acc_2() -> (HistoryContentKey, HistoryContentValue) { - read_epoch_acc("ed8823c84177d8ffabf104566f313a2b2a43d05304ba6c74c2f5555bae0ef329") +/// History HeaderWithProof content key & value +/// Block #15040641 (pre-merge) +pub fn fixture_header_by_hash_with_proof_15040641() -> (HistoryContentKey, HistoryContentValue) { + read_binary_history_fixture(15040641, None) } -fn read_epoch_acc(hash: &str) -> (HistoryContentKey, HistoryContentValue) { - let epoch_acc = std::fs::read(format!("test_assets/mainnet/0x03{hash}.portalcontent")).unwrap(); - let epoch_acc_hash = ethportal_api::utils::bytes::hex_decode(&format!("0x{hash}")).unwrap(); - let content_key = - ethportal_api::HistoryContentKey::EpochAccumulator(ethportal_api::EpochAccumulatorKey { - epoch_hash: alloy_primitives::B256::from_slice(&epoch_acc_hash), - }); - let content_value = HistoryContentValue::decode(&content_key, &epoch_acc).unwrap(); - (content_key, content_value) +/// History BlockBody content key & value +/// Block #15040641 (pre-merge) +pub fn fixture_block_body_15040641() -> (HistoryContentKey, HistoryContentValue) { + read_binary_history_fixture(15040641, Some(DependentType::BlockBody)) +} + +/// History Receipts content key & value +/// Block #15040641 (pre-merge) +pub fn fixture_receipts_15040641() -> (HistoryContentKey, HistoryContentValue) { + read_binary_history_fixture(15040641, Some(DependentType::Receipts)) +} + +/// History HeaderWithProof content key & value +/// Block #15040708 (pre-merge) +pub fn fixture_header_by_hash_with_proof_15040708() -> (HistoryContentKey, HistoryContentValue) { + read_binary_history_fixture(15040708, None) +} + +/// History BlockBody content key & value +/// Block #15040708 (pre-merge) +pub fn fixture_block_body_15040708() -> (HistoryContentKey, HistoryContentValue) { + read_binary_history_fixture(15040708, Some(DependentType::BlockBody)) +} + +/// History Receipts content key & value +/// Block #15040708 (pre-merge) +pub fn fixture_receipts_15040708() -> (HistoryContentKey, HistoryContentValue) { + read_binary_history_fixture(15040708, Some(DependentType::Receipts)) +} + +fn read_binary_history_fixture( + block_number: u64, + dependent: Option, +) -> (HistoryContentKey, HistoryContentValue) { + let header_value = std::fs::read(format!( + "test_assets/mainnet/large_content/{block_number}/header.bin" + )) + .unwrap(); + let header_content_value: HeaderWithProof = + HeaderWithProof::from_ssz_bytes(&header_value).unwrap(); + + match dependent { + Some(dependent_type) => { + let dependent_value = std::fs::read(format!( + "test_assets/mainnet/large_content/{block_number}/{dependent_type}.bin" + )) + .unwrap(); + match dependent_type { + DependentType::BlockBody => { + let content_key = HistoryContentKey::BlockBody(BlockBodyKey { + block_hash: header_content_value.header.hash().0, + }); + let content_value = + HistoryContentValue::decode(&content_key, &dependent_value).unwrap(); + (content_key, content_value) + } + DependentType::Receipts => { + let content_key = HistoryContentKey::BlockReceipts(BlockReceiptsKey { + block_hash: header_content_value.header.hash().0, + }); + let content_value = + HistoryContentValue::decode(&content_key, &dependent_value).unwrap(); + (content_key, content_value) + } + } + } + None => { + let content_key = HistoryContentKey::BlockHeaderByHash(BlockHeaderByHashKey { + block_hash: header_content_value.header.hash().0, + }); + let content_value = HistoryContentValue::BlockHeaderWithProof(header_content_value); + (content_key, content_value) + } + } } #[derive(Debug, Clone, Deserialize)] diff --git a/portal-bridge/src/api/execution.rs b/portal-bridge/src/api/execution.rs index 61e8ee19d..a5cb39445 100644 --- a/portal-bridge/src/api/execution.rs +++ b/portal-bridge/src/api/execution.rs @@ -4,6 +4,7 @@ use alloy_primitives::B256; use anyhow::{anyhow, bail}; use ethportal_api::{ types::{ + content_key::history::{BlockHeaderByHashKey, BlockHeaderByNumberKey}, execution::{ accumulator::EpochAccumulator, block_body::{ @@ -17,8 +18,7 @@ use ethportal_api::{ jsonrpc::{params::Params, request::JsonRequest}, }, utils::bytes::{hex_decode, hex_encode}, - BlockBodyKey, BlockHeaderKey, BlockReceiptsKey, Header, HistoryContentKey, HistoryContentValue, - Receipts, + BlockBodyKey, BlockReceiptsKey, Header, HistoryContentKey, HistoryContentValue, Receipts, }; use futures::future::join_all; use serde_json::{json, Value}; @@ -69,12 +69,18 @@ impl ExecutionApi { }) } - /// Return a validated FullHeader & content key / value pair for the given header. + /// Return a validated FullHeader & content by hash and number key / value pair for the given + /// header. pub async fn get_header( &self, height: u64, epoch_acc: Option>, - ) -> anyhow::Result<(FullHeader, HistoryContentKey, HistoryContentValue)> { + ) -> anyhow::Result<( + FullHeader, + HistoryContentKey, // BlockHeaderByHash + HistoryContentKey, // BlockHeaderByNumber + HistoryContentValue, + )> { // Geth requires block numbers to be formatted using the following padding. let block_param = format!("0x{height:01X}"); let params = Params::Array(vec![json!(block_param), json!(true)]); @@ -97,10 +103,16 @@ impl ExecutionApi { if let Err(msg) = full_header.validate() { bail!("Header validation failed: {msg}"); }; - // Construct content key / value pair. - let content_key = HistoryContentKey::BlockHeaderWithProof(BlockHeaderKey { - block_hash: full_header.header.hash().0, - }); + // Construct header by hash content key / value pair. + let header_by_hash_content_key = + HistoryContentKey::BlockHeaderByHash(BlockHeaderByHashKey { + block_hash: full_header.header.hash().0, + }); + // Construct header by number content key / value pair. + let header_by_number_content_key = + HistoryContentKey::BlockHeaderByNumber(BlockHeaderByNumberKey { + block_number: full_header.header.number, + }); let content_value = match &full_header.epoch_acc { Some(epoch_acc) => { // Construct HeaderWithProof @@ -119,7 +131,12 @@ impl ExecutionApi { HistoryContentValue::BlockHeaderWithProof(header_with_proof) } }; - Ok((full_header, content_key, content_value)) + Ok(( + full_header, + header_by_hash_content_key, + header_by_number_content_key, + content_value, + )) } /// Return a validated BlockBody content key / value for the given FullHeader. diff --git a/portal-bridge/src/bridge/era1.rs b/portal-bridge/src/bridge/era1.rs index 38faabfca..74a409e6f 100644 --- a/portal-bridge/src/bridge/era1.rs +++ b/portal-bridge/src/bridge/era1.rs @@ -36,9 +36,13 @@ use crate::{ }; use ethportal_api::{ jsonrpsee::http_client::HttpClient, - types::{execution::accumulator::EpochAccumulator, portal::ContentInfo}, - BlockBodyKey, BlockHeaderKey, BlockReceiptsKey, EpochAccumulatorKey, HistoryContentKey, - HistoryContentValue, HistoryNetworkApiClient, + types::{ + content_key::history::{BlockHeaderByHashKey, BlockHeaderByNumberKey}, + execution::accumulator::EpochAccumulator, + portal::ContentInfo, + }, + BlockBodyKey, BlockReceiptsKey, HistoryContentKey, HistoryContentValue, + HistoryNetworkApiClient, }; use trin_validation::{ constants::EPOCH_SIZE, header_validator::HeaderValidator, oracle::HeaderOracle, @@ -149,7 +153,7 @@ impl Era1Bridge { .iter() .map(|hash| { ( - HistoryContentKey::BlockHeaderWithProof(BlockHeaderKey { + HistoryContentKey::BlockHeaderByHash(BlockHeaderByHashKey { block_hash: hash.0, }), HistoryContentKey::BlockBody(BlockBodyKey { block_hash: hash.0 }), @@ -254,13 +258,20 @@ impl Era1Bridge { panic!("Failed to get epoch from era1 file: {e}"); } }; - let epoch_acc = match self.get_epoch_acc(epoch_index).await { + let epoch_acc = match lookup_epoch_acc( + epoch_index, + &self.header_oracle.header_validator.pre_merge_acc, + &self.epoch_acc_path, + ) + .await + { Ok(epoch_acc) => epoch_acc, Err(e) => { error!("Failed to get epoch acc for epoch: {epoch_index}, error: {e}"); return; } }; + let epoch_acc = Arc::new(epoch_acc); let header_validator = Arc::new(self.header_oracle.header_validator.clone()); info!("Era1 file read successfully, gossiping block tuples for epoch: {epoch_index}"); let mut serve_block_tuple_handles = vec![]; @@ -294,31 +305,6 @@ impl Era1Bridge { join_all(serve_block_tuple_handles).await; } - async fn get_epoch_acc(&self, epoch_index: u64) -> anyhow::Result> { - let (epoch_hash, epoch_acc) = lookup_epoch_acc( - epoch_index, - &self.header_oracle.header_validator.pre_merge_acc, - &self.epoch_acc_path, - ) - .await?; - // Gossip epoch acc to network if found locally - let content_key = HistoryContentKey::EpochAccumulator(EpochAccumulatorKey { epoch_hash }); - let content_value = HistoryContentValue::EpochAccumulator(epoch_acc.clone()); - // create unique stats for epoch accumulator, since it's rarely gossiped - let block_stats = Arc::new(Mutex::new(HistoryBlockStats::new(epoch_index * EPOCH_SIZE))); - debug!("Built EpochAccumulator for Epoch #{epoch_index:?}: now gossiping."); - // spawn gossip in new thread to avoid blocking - let portal_client = self.portal_client.clone(); - tokio::spawn(async move { - if let Err(msg) = - gossip_history_content(portal_client, content_key, content_value, block_stats).await - { - warn!("Failed to gossip epoch accumulator: {msg}"); - } - }); - Ok(Arc::new(epoch_acc)) - } - fn spawn_serve_block_tuple( portal_client: HttpClient, block_tuple: BlockTuple, @@ -377,10 +363,10 @@ impl Era1Bridge { let mut gossip_header = true; if hunt { let header_hash = block_tuple.header.header.hash(); - let header_key = BlockHeaderKey { + let header_by_hash_key = BlockHeaderByHashKey { block_hash: header_hash.0, }; - let header_content_key = HistoryContentKey::BlockHeaderWithProof(header_key); + let header_content_key = HistoryContentKey::BlockHeaderByHash(header_by_hash_key); let header_content_info = portal_client .recursive_find_content(header_content_key.clone()) .await; @@ -547,10 +533,6 @@ impl Era1Bridge { header_record.block_hash ); - // Construct HistoryContentKey - let content_key = HistoryContentKey::BlockHeaderWithProof(BlockHeaderKey { - block_hash: header.hash().0, - }); // Construct HeaderWithProof let header_with_proof = construct_proof(header.clone(), &epoch_acc).await?; // Double check that the proof is valid @@ -558,7 +540,27 @@ impl Era1Bridge { // Construct HistoryContentValue let content_value = HistoryContentValue::BlockHeaderWithProof(header_with_proof); - gossip_history_content(portal_client, content_key, content_value, block_stats).await + // Construct HistoryContentKey for block header by hash and gossip it + let content_key = HistoryContentKey::BlockHeaderByHash(BlockHeaderByHashKey { + block_hash: header.hash().0, + }); + + gossip_history_content( + portal_client.clone(), + content_key, + content_value.clone(), + block_stats.clone(), + ) + .await?; + + // Construct HistoryContentKey for block header by number and gossip it + let content_key = HistoryContentKey::BlockHeaderByNumber(BlockHeaderByNumberKey { + block_number: header.number, + }); + + gossip_history_content(portal_client, content_key, content_value, block_stats).await?; + + Ok(()) } async fn construct_and_gossip_block_body( diff --git a/portal-bridge/src/bridge/history.rs b/portal-bridge/src/bridge/history.rs index 82d26f2ae..4d4238b51 100644 --- a/portal-bridge/src/bridge/history.rs +++ b/portal-bridge/src/bridge/history.rs @@ -22,7 +22,7 @@ use crate::{ }; use ethportal_api::{ jsonrpsee::http_client::HttpClient, types::execution::accumulator::EpochAccumulator, - EpochAccumulatorKey, HistoryContentKey, HistoryContentValue, + HistoryContentKey, }; use trin_validation::{ constants::{EPOCH_SIZE, MERGE_BLOCK_NUMBER}, @@ -98,7 +98,7 @@ impl HistoryBridge { block_stats.clone(), ) .await; - if let HistoryContentKey::BlockHeaderWithProof(_) = asset.content_key { + if let HistoryContentKey::BlockHeaderByHash(_) = asset.content_key { sleep(Duration::from_millis(50)).await; } } @@ -189,11 +189,14 @@ impl HistoryBridge { // look up the epoch acc on a header by header basis if height <= MERGE_BLOCK_NUMBER && current_epoch_index != height / EPOCH_SIZE { current_epoch_index = height / EPOCH_SIZE; - epoch_acc = match self - .construct_and_gossip_epoch_acc(current_epoch_index) - .await + epoch_acc = match lookup_epoch_acc( + current_epoch_index, + &self.header_oracle.header_validator.pre_merge_acc, + &self.epoch_acc_path, + ) + .await { - Ok(val) => Some(val), + Ok(epoch_acc) => Some(Arc::new(epoch_acc)), Err(msg) => { warn!("Unable to find epoch acc for gossip range: {current_epoch_index}. Skipping iteration: {msg:?}"); continue; @@ -257,8 +260,12 @@ impl HistoryBridge { ) -> anyhow::Result<()> { info!("Serving block: {height}"); let timer = metrics.start_process_timer("construct_and_gossip_header"); - let (full_header, header_content_key, header_content_value) = - execution_api.get_header(height, epoch_acc).await?; + let ( + full_header, + header_by_hash_content_key, + header_by_number_content_key, + header_content_value, + ) = execution_api.get_header(height, epoch_acc).await?; let block_stats = Arc::new(Mutex::new(HistoryBlockStats::new( full_header.header.number, ))); @@ -266,13 +273,23 @@ impl HistoryBridge { debug!("Built and validated HeaderWithProof for Block #{height:?}: now gossiping."); if let Err(msg) = gossip_history_content( portal_client.clone(), - header_content_key, + header_by_hash_content_key, + header_content_value.clone(), + block_stats.clone(), + ) + .await + { + warn!("Error gossiping HeaderByHashWithProof #{height:?}: {msg:?}"); + }; + if let Err(msg) = gossip_history_content( + portal_client.clone(), + header_by_number_content_key, header_content_value, block_stats.clone(), ) .await { - warn!("Error gossiping HeaderWithProof #{height:?}: {msg:?}"); + warn!("Error gossiping HeaderByNumberWithProof #{height:?}: {msg:?}"); }; metrics.stop_process_timer(timer); @@ -335,34 +352,6 @@ impl HistoryBridge { Ok(()) } - /// Attempt to lookup an epoch accumulator from local portal-accumulators path provided via cli - /// arg. Gossip the epoch accumulator if found. - async fn construct_and_gossip_epoch_acc( - &self, - epoch_index: u64, - ) -> anyhow::Result> { - let (epoch_hash, epoch_acc) = lookup_epoch_acc( - epoch_index, - &self.header_oracle.header_validator.pre_merge_acc, - &self.epoch_acc_path, - ) - .await?; - // Gossip epoch acc to network if found locally - let content_key = HistoryContentKey::EpochAccumulator(EpochAccumulatorKey { epoch_hash }); - let content_value = HistoryContentValue::EpochAccumulator(epoch_acc.clone()); - // create unique stats for epoch accumulator, since it's rarely gossiped - let block_stats = Arc::new(Mutex::new(HistoryBlockStats::new(epoch_index * EPOCH_SIZE))); - debug!("Built EpochAccumulator for Epoch #{epoch_index:?}: now gossiping."); - let _ = gossip_history_content( - self.portal_client.clone(), - content_key, - content_value, - block_stats, - ) - .await; - Ok(Arc::new(epoch_acc)) - } - async fn construct_and_gossip_receipt( full_header: &FullHeader, portal_client: &HttpClient, diff --git a/portal-bridge/src/bridge/utils.rs b/portal-bridge/src/bridge/utils.rs index 487227edb..4f7ee9ba7 100644 --- a/portal-bridge/src/bridge/utils.rs +++ b/portal-bridge/src/bridge/utils.rs @@ -1,4 +1,3 @@ -use alloy_primitives::B256; use anyhow::anyhow; use ethportal_api::{types::execution::accumulator::EpochAccumulator, utils::bytes::hex_encode}; use ssz::Decode; @@ -10,7 +9,7 @@ pub async fn lookup_epoch_acc( epoch_index: u64, pre_merge_acc: &PreMergeAccumulator, epoch_acc_path: &Path, -) -> anyhow::Result<(B256, EpochAccumulator)> { +) -> anyhow::Result { let epoch_hash = pre_merge_acc.historical_epochs[epoch_index as usize]; let epoch_hash_pretty = hex_encode(epoch_hash); let epoch_hash_pretty = epoch_hash_pretty.trim_start_matches("0x"); @@ -26,5 +25,5 @@ pub async fn lookup_epoch_acc( )) } }; - Ok((epoch_hash, epoch_acc)) + Ok(epoch_acc) } diff --git a/portal-bridge/src/stats.rs b/portal-bridge/src/stats.rs index 04d3e7f61..0bd87dfe0 100644 --- a/portal-bridge/src/stats.rs +++ b/portal-bridge/src/stats.rs @@ -84,10 +84,10 @@ impl StatsReporter for BeaconSlotStats { #[derive(Debug, Clone, Default)] pub struct HistoryBlockStats { pub block_number: u64, - pub header_with_proof: Option, + pub header_by_hash_with_proof: Option, + pub header_by_number_with_proof: Option, pub block_body: Option, pub receipts: Option, - pub epoch_accumulator: Option, } impl StatsReporter for HistoryBlockStats { @@ -100,9 +100,13 @@ impl StatsReporter for HistoryBlockStats { fn report(&self) { let block_number = self.block_number; - if let Some(stats) = &self.header_with_proof { - info!("GossipReport: block#{block_number}: header_with_proof - {stats}"); - debug!("GossipReport: block#{block_number}: header_with_proof - {stats:?}"); + if let Some(stats) = &self.header_by_hash_with_proof { + info!("GossipReport: block#{block_number}: header_by_hash_with_proof - {stats}"); + debug!("GossipReport: block#{block_number}: header_by_hash_with_proof - {stats:?}"); + } + if let Some(stats) = &self.header_by_number_with_proof { + info!("GossipReport: block#{block_number}: header_by_number_with_proof - {stats}"); + debug!("GossipReport: block#{block_number}: header_by_number_with_proof - {stats:?}"); } if let Some(stats) = &self.block_body { info!("GossipReport: block#{block_number}: block_body - {stats}"); @@ -112,16 +116,15 @@ impl StatsReporter for HistoryBlockStats { info!("GossipReport: block#{block_number}: receipts - {stats}"); debug!("GossipReport: block#{block_number}: receipts - {stats:?}"); } - if let Some(stats) = &self.epoch_accumulator { - info!("GossipReport: block#{block_number}: epoch_accumulator - {stats}"); - debug!("GossipReport: block#{block_number}: epoch_accumulator - {stats:?}"); - } } fn update(&mut self, content_key: HistoryContentKey, results: ContentStats) { match content_key { - HistoryContentKey::BlockHeaderWithProof(_) => { - self.header_with_proof = Some(results); + HistoryContentKey::BlockHeaderByHash(_) => { + self.header_by_hash_with_proof = Some(results); + } + HistoryContentKey::BlockHeaderByNumber(_) => { + self.header_by_number_with_proof = Some(results); } HistoryContentKey::BlockBody(_) => { self.block_body = Some(results); @@ -129,9 +132,6 @@ impl StatsReporter for HistoryBlockStats { HistoryContentKey::BlockReceipts(_) => { self.receipts = Some(results); } - HistoryContentKey::EpochAccumulator(_) => { - self.epoch_accumulator = Some(results); - } } } } diff --git a/rpc/src/eth_rpc.rs b/rpc/src/eth_rpc.rs index b3cf79b0b..59228839f 100644 --- a/rpc/src/eth_rpc.rs +++ b/rpc/src/eth_rpc.rs @@ -170,7 +170,7 @@ impl EthApi { async fn fetch_header_by_hash(&self, block_hash: B256) -> Result { let content_value = self - .fetch_history_content(HistoryContentKey::BlockHeaderWithProof(block_hash.into())) + .fetch_history_content(HistoryContentKey::BlockHeaderByHash(block_hash.into())) .await?; let HistoryContentValue::BlockHeaderWithProof(header_with_proof) = content_value else { return Err(RpcServeError::Message(format!( diff --git a/src/bin/poll_latest.rs b/src/bin/poll_latest.rs index 0eb687fdf..e3969204a 100644 --- a/src/bin/poll_latest.rs +++ b/src/bin/poll_latest.rs @@ -4,8 +4,11 @@ use anyhow::{anyhow, Result}; use clap::Parser; use ethportal_api::{ jsonrpsee::http_client::{HttpClient, HttpClientBuilder}, - types::{content_key::overlay::OverlayContentKey, portal::ContentInfo}, - BlockBodyKey, BlockHeaderKey, BlockReceiptsKey, HistoryContentKey, HistoryNetworkApiClient, + types::{ + content_key::{history::BlockHeaderByHashKey, overlay::OverlayContentKey}, + portal::ContentInfo, + }, + BlockBodyKey, BlockReceiptsKey, HistoryContentKey, HistoryNetworkApiClient, }; use futures::StreamExt; use std::{ @@ -133,7 +136,7 @@ async fn audit_block( ) -> Result<()> { metrics.lock().unwrap().active_audit_count += 3; let header_handle = tokio::spawn(audit_content_key( - HistoryContentKey::BlockHeaderWithProof(BlockHeaderKey { block_hash: hash.0 }), + HistoryContentKey::BlockHeaderByHash(BlockHeaderByHashKey { block_hash: hash.0 }), timestamp, timeout, backoff, diff --git a/src/bin/sample_range.rs b/src/bin/sample_range.rs index 4601ed9b5..28a6b4c90 100644 --- a/src/bin/sample_range.rs +++ b/src/bin/sample_range.rs @@ -17,7 +17,8 @@ use url::Url; use ethportal_api::{ jsonrpsee::http_client::{HttpClient, HttpClientBuilder}, - BlockBodyKey, BlockHeaderKey, BlockReceiptsKey, HistoryContentKey, HistoryNetworkApiClient, + types::content_key::history::BlockHeaderByHashKey, + BlockBodyKey, BlockReceiptsKey, HistoryContentKey, HistoryNetworkApiClient, }; use trin_utils::log::init_tracing_logger; use trin_validation::constants::MERGE_BLOCK_NUMBER; @@ -134,7 +135,8 @@ async fn audit_block( metrics: Arc>, client: HttpClient, ) -> anyhow::Result<()> { - let header_ck = HistoryContentKey::BlockHeaderWithProof(BlockHeaderKey { block_hash: hash.0 }); + let header_ck = + HistoryContentKey::BlockHeaderByHash(BlockHeaderByHashKey { block_hash: hash.0 }); let body_ck = HistoryContentKey::BlockBody(BlockBodyKey { block_hash: hash.0 }); let receipts_ck = HistoryContentKey::BlockReceipts(BlockReceiptsKey { block_hash: hash.0 }); match client.recursive_find_content(header_ck).await { diff --git a/src/bin/test_providers.rs b/src/bin/test_providers.rs index 8e2222e7e..375d9fea2 100644 --- a/src/bin/test_providers.rs +++ b/src/bin/test_providers.rs @@ -85,7 +85,7 @@ pub async fn main() -> Result<()> { continue; } }; - let (full_header, _, _) = match api.get_header(*block, epoch_acc).await { + let (full_header, _, _, _) = match api.get_header(*block, epoch_acc).await { Ok(header) => header, Err(_) => { provider_failures += 3; diff --git a/test_assets/mainnet/0x030013c08b64bf7e3afab80ad4f8ea9423f1a7d8b31a149fc3b832d7980719c60c.portalcontent b/test_assets/mainnet/0x030013c08b64bf7e3afab80ad4f8ea9423f1a7d8b31a149fc3b832d7980719c60c.portalcontent deleted file mode 100644 index 929507e27..000000000 Binary files a/test_assets/mainnet/0x030013c08b64bf7e3afab80ad4f8ea9423f1a7d8b31a149fc3b832d7980719c60c.portalcontent and /dev/null differ diff --git a/test_assets/mainnet/0x03ed8823c84177d8ffabf104566f313a2b2a43d05304ba6c74c2f5555bae0ef329.portalcontent b/test_assets/mainnet/0x03ed8823c84177d8ffabf104566f313a2b2a43d05304ba6c74c2f5555bae0ef329.portalcontent deleted file mode 100644 index e72014c53..000000000 Binary files a/test_assets/mainnet/0x03ed8823c84177d8ffabf104566f313a2b2a43d05304ba6c74c2f5555bae0ef329.portalcontent and /dev/null differ diff --git a/test_assets/mainnet/large_content/15040641/body.bin b/test_assets/mainnet/large_content/15040641/body.bin new file mode 100644 index 000000000..913e12b5d Binary files /dev/null and b/test_assets/mainnet/large_content/15040641/body.bin differ diff --git a/test_assets/mainnet/large_content/15040641/header.bin b/test_assets/mainnet/large_content/15040641/header.bin new file mode 100644 index 000000000..a5b931f73 Binary files /dev/null and b/test_assets/mainnet/large_content/15040641/header.bin differ diff --git a/test_assets/mainnet/large_content/15040641/receipts.bin b/test_assets/mainnet/large_content/15040641/receipts.bin new file mode 100644 index 000000000..3530dc7a4 Binary files /dev/null and b/test_assets/mainnet/large_content/15040641/receipts.bin differ diff --git a/test_assets/mainnet/large_content/15040708/body.bin b/test_assets/mainnet/large_content/15040708/body.bin new file mode 100644 index 000000000..3a3f30258 Binary files /dev/null and b/test_assets/mainnet/large_content/15040708/body.bin differ diff --git a/test_assets/mainnet/large_content/15040708/header.bin b/test_assets/mainnet/large_content/15040708/header.bin new file mode 100644 index 000000000..d050ecbc3 Binary files /dev/null and b/test_assets/mainnet/large_content/15040708/header.bin differ diff --git a/test_assets/mainnet/large_content/15040708/receipts.bin b/test_assets/mainnet/large_content/15040708/receipts.bin new file mode 100644 index 000000000..a17992572 Binary files /dev/null and b/test_assets/mainnet/large_content/15040708/receipts.bin differ diff --git a/test_assets/mainnet/large_content/README.txt b/test_assets/mainnet/large_content/README.txt new file mode 100644 index 000000000..80c9aeea0 --- /dev/null +++ b/test_assets/mainnet/large_content/README.txt @@ -0,0 +1,26 @@ +# Large test content.. +This is a folder of large content values to be used in specific testing scenarios... + +# block 15040641 - 0x2be48ebbbcd9a91a3bc4354b4c864789d6144d605f629df8ed11c0602f599bbf +- 447_940 bytes total +- header_with_proof: 1_031 bytes + - content_key: 0x002be48ebbbcd9a91a3bc4354b4c864789d6144d605f629df8ed11c0602f599bbf + - content_id: 0x76df3e911f784cb9288382157e43ea3223a8358c3e0e4d06243ff191a55fa3d9 +- body: 126_134 bytes + - content_key: 0x012be48ebbbcd9a91a3bc4354b4c864789d6144d605f629df8ed11c0602f599bbf + - content_id: 0x41b6cf15cbcb92d2f695b32d1c80ec3b5845ad723b2d6fef4677b527c2c1a6e7 +- receipts: 320_775 bytes + - content_key: 0x022be48ebbbcd9a91a3bc4354b4c864789d6144d605f629df8ed11c0602f599bbf + - content_id: 0xb829a6f3f21a0afce1f45f79da776101462790d3fc3c1beed6a2cb2c91ddb122 + +# block 15040708 - 0x944ade7c054495265fa190494368e510fa960c1b498347f0d3584130d2a3a0d9 +- 987_654 bytes total +- header_with_proof: 1_025 bytes + - content_key: 0x00944ade7c054495265fa190494368e510fa960c1b498347f0d3584130d2a3a0d9 + - content_id: 0x0f356e24e9152a537d26bd62908e00dcb6db0cfdd1e38b74e347fba268461e51 +- body: 763_493 bytes + - content_key: 0x01944ade7c054495265fa190494368e510fa960c1b498347f0d3584130d2a3a0d9 + - content_id: 0xb45d64fca22a045e1fa52ef79797e9dd88295777e0effc221ede6d664aff03a7 +- receipts: 223_136 bytes + - content_key: 0x02944ade7c054495265fa190494368e510fa960c1b498347f0d3584130d2a3a0d9 + - content_id: 0x9f5bfdc22418ff3fe8c86dca88100812af9827d9f8157fe0b39249be4061bf57 diff --git a/tests/rpc_server.rs b/tests/rpc_server.rs index 964467f76..5b91065fe 100644 --- a/tests/rpc_server.rs +++ b/tests/rpc_server.rs @@ -125,7 +125,7 @@ async fn test_eth_get_block_by_hash() { }; // Store header with proof in server - let content_key = HistoryContentKey::BlockHeaderWithProof(block_hash.into()); + let content_key = HistoryContentKey::BlockHeaderByHash(block_hash.into()); let content_value = HistoryContentValue::BlockHeaderWithProof(hwp); let result = native_client .store(content_key, content_value.encode()) diff --git a/tests/self_peertest.rs b/tests/self_peertest.rs index bddeb6a7b..1e1db6461 100644 --- a/tests/self_peertest.rs +++ b/tests/self_peertest.rs @@ -147,9 +147,19 @@ async fn peertest_trace_recursive_find_content() { #[tokio::test(flavor = "multi_thread")] #[serial] -async fn peertest_validate_pre_merge_header_with_proof() { +async fn peertest_validate_pre_merge_header_by_hash() { let (peertest, target, handle) = setup_peertest("mainnet", &[HISTORY_NETWORK]).await; - peertest::scenarios::validation::test_validate_pre_merge_header_with_proof(&peertest, &target) + peertest::scenarios::validation::test_validate_pre_merge_header_by_hash(&peertest, &target) + .await; + peertest.exit_all_nodes(); + handle.stop().unwrap(); +} + +#[tokio::test(flavor = "multi_thread")] +#[serial] +async fn peertest_validate_pre_merge_header_by_number() { + let (peertest, target, handle) = setup_peertest("mainnet", &[HISTORY_NETWORK]).await; + peertest::scenarios::validation::test_validate_pre_merge_header_by_number(&peertest, &target) .await; peertest.exit_all_nodes(); handle.stop().unwrap(); diff --git a/trin-history/src/storage.rs b/trin-history/src/storage.rs index adf64f750..475b09a70 100644 --- a/trin-history/src/storage.rs +++ b/trin-history/src/storage.rs @@ -80,7 +80,7 @@ impl HistoryStorage { #[cfg(test)] #[allow(clippy::unwrap_used)] pub mod test { - use ethportal_api::{BlockHeaderKey, HistoryContentKey}; + use ethportal_api::{types::content_key::history::BlockHeaderByHashKey, HistoryContentKey}; use quickcheck::{QuickCheck, TestResult}; use rand::RngCore; use serial_test::serial; @@ -118,7 +118,7 @@ pub mod test { let (temp_dir, storage_config) = create_test_portal_storage_config_with_capacity(CAPACITY_MB).unwrap(); let mut storage = HistoryStorage::new(storage_config)?; - let content_key = HistoryContentKey::BlockHeaderWithProof(BlockHeaderKey::default()); + let content_key = HistoryContentKey::BlockHeaderByHash(BlockHeaderByHashKey::default()); let value: Vec = "OGFWs179fWnqmjvHQFGHszXloc3Wzdb4".into(); storage.put(content_key.clone(), &value)?; diff --git a/trin-history/src/validation.rs b/trin-history/src/validation.rs index f86250cd1..31286a94d 100644 --- a/trin-history/src/validation.rs +++ b/trin-history/src/validation.rs @@ -1,15 +1,14 @@ use std::sync::Arc; use alloy_primitives::B256; -use anyhow::anyhow; +use anyhow::{anyhow, ensure}; use ssz::Decode; use tokio::sync::RwLock; -use tree_hash::TreeHash; use ethportal_api::{ types::execution::{ - accumulator::EpochAccumulator, block_body::BlockBody, header::Header, - header_with_proof::HeaderWithProof, receipts::Receipts, + block_body::BlockBody, header::Header, header_with_proof::HeaderWithProof, + receipts::Receipts, }, utils::bytes::hex_encode, HistoryContentKey, @@ -30,18 +29,36 @@ impl Validator for ChainHistoryValidator { content: &[u8], ) -> anyhow::Result> { match content_key { - HistoryContentKey::BlockHeaderWithProof(key) => { + HistoryContentKey::BlockHeaderByHash(key) => { let header_with_proof = HeaderWithProof::from_ssz_bytes(content).map_err(|err| { - anyhow!("Header with proof content has invalid encoding: {err:?}") + anyhow!("Header by hash content has invalid encoding: {err:?}") })?; let header_hash = header_with_proof.header.hash(); - if header_hash != B256::from(key.block_hash) { - return Err(anyhow!( - "Content validation failed: Invalid header hash. Found: {header_hash:?} - Expected: {:?}", - hex_encode(key.block_hash) - )); - } + ensure!( + header_hash == B256::from(key.block_hash), + "Content validation failed: Invalid header hash. Found: {header_hash:?} - Expected: {:?}", + hex_encode(header_hash) + ); + self.header_oracle + .read() + .await + .header_validator + .validate_header_with_proof(&header_with_proof)?; + + Ok(ValidationResult::new(true)) + } + HistoryContentKey::BlockHeaderByNumber(key) => { + let header_with_proof = + HeaderWithProof::from_ssz_bytes(content).map_err(|err| { + anyhow!("Header by number content has invalid encoding: {err:?}") + })?; + let header_number = header_with_proof.header.number; + ensure!( + header_number == key.block_number, + "Content validation failed: Invalid header number. Found: {header_number} - Expected: {}", + key.block_number + ); self.header_oracle .read() .await @@ -57,7 +74,7 @@ impl Validator for ChainHistoryValidator { .header_oracle .read() .await - .recursive_find_header_with_proof(B256::from(key.block_hash)) + .recursive_find_header_by_hash_with_proof(B256::from(key.block_hash)) .await? .header; let actual_uncles_root = block_body.uncles_root()?; @@ -86,7 +103,7 @@ impl Validator for ChainHistoryValidator { .header_oracle .read() .await - .recursive_find_header_with_proof(B256::from(key.block_hash)) + .recursive_find_header_by_hash_with_proof(B256::from(key.block_hash)) .await? .header; let actual_receipts_root = receipts.root()?; @@ -99,33 +116,6 @@ impl Validator for ChainHistoryValidator { } Ok(ValidationResult::new(true)) } - HistoryContentKey::EpochAccumulator(key) => { - let epoch_acc = EpochAccumulator::from_ssz_bytes(content).map_err(|msg| { - anyhow!("Epoch Accumulator content has invalid encoding: {:?}", msg) - })?; - - let tree_hash_root = epoch_acc.tree_hash_root(); - if key.epoch_hash != tree_hash_root { - return Err(anyhow!( - "Content validation failed: Invalid epoch accumulator tree hash root. - Found: {:?} - Expected: {:?}", - tree_hash_root, - key.epoch_hash, - )); - } - let pre_merge_acc = &self - .header_oracle - .read() - .await - .header_validator - .pre_merge_acc; - if !pre_merge_acc.historical_epochs.contains(&tree_hash_root) { - return Err(anyhow!( - "Content validation failed: Invalid epoch accumulator, missing from pre-merge accumulator." - )); - } - Ok(ValidationResult::new(true)) - } } } } @@ -141,11 +131,11 @@ mod tests { use ssz::Encode; use ethportal_api::{ - types::execution::accumulator::HeaderRecord, utils::bytes::hex_decode, BlockHeaderKey, - EpochAccumulatorKey, + types::content_key::history::{BlockHeaderByHashKey, BlockHeaderByNumberKey}, + utils::bytes::hex_decode, }; - fn get_hwp_ssz() -> Vec { + fn get_header_with_proof_ssz() -> Vec { let file = fs::read_to_string("../trin-validation/src/assets/fluffy/header_with_proofs.json") .unwrap(); @@ -157,25 +147,27 @@ mod tests { } #[test_log::test(tokio::test)] - async fn validate_header() { - let hwp_ssz = get_hwp_ssz(); - let hwp = HeaderWithProof::from_ssz_bytes(&hwp_ssz).expect("error decoding header"); + async fn validate_header_by_hash() { + let header_with_proof_ssz = get_header_with_proof_ssz(); + let header_with_proof = + HeaderWithProof::from_ssz_bytes(&header_with_proof_ssz).expect("error decoding header"); let header_oracle = default_header_oracle(); let chain_history_validator = ChainHistoryValidator { header_oracle }; - let content_key = HistoryContentKey::BlockHeaderWithProof(BlockHeaderKey { - block_hash: hwp.header.hash().0, + let content_key = HistoryContentKey::BlockHeaderByHash(BlockHeaderByHashKey { + block_hash: header_with_proof.header.hash().0, }); chain_history_validator - .validate_content(&content_key, &hwp_ssz) + .validate_content(&content_key, &header_with_proof_ssz) .await .unwrap(); } #[test_log::test(tokio::test)] #[should_panic(expected = "Merkle proof validation failed for pre-merge header")] - async fn invalidate_header_with_invalid_number() { - let hwp_ssz = get_hwp_ssz(); - let mut header = HeaderWithProof::from_ssz_bytes(&hwp_ssz).expect("error decoding header"); + async fn invalidate_header_by_hash_with_invalid_number() { + let header_with_proof_ssz = get_header_with_proof_ssz(); + let mut header = + HeaderWithProof::from_ssz_bytes(&header_with_proof_ssz).expect("error decoding header"); // set invalid block height header.header.number = 669052; @@ -183,7 +175,7 @@ mod tests { let content_value = header.as_ssz_bytes(); let header_oracle = default_header_oracle(); let chain_history_validator = ChainHistoryValidator { header_oracle }; - let content_key = HistoryContentKey::BlockHeaderWithProof(BlockHeaderKey { + let content_key = HistoryContentKey::BlockHeaderByHash(BlockHeaderByHashKey { block_hash: header.header.hash().0, }); chain_history_validator @@ -194,9 +186,10 @@ mod tests { #[test_log::test(tokio::test)] #[should_panic(expected = "Merkle proof validation failed for pre-merge header")] - async fn invalidate_header_with_invalid_gaslimit() { - let hwp_ssz = get_hwp_ssz(); - let mut header = HeaderWithProof::from_ssz_bytes(&hwp_ssz).expect("error decoding header"); + async fn invalidate_header_by_hash_with_invalid_gaslimit() { + let header_with_proof_ssz = get_header_with_proof_ssz(); + let mut header = + HeaderWithProof::from_ssz_bytes(&header_with_proof_ssz).expect("error decoding header"); // set invalid block gaslimit // valid gaslimit = 3141592 @@ -205,7 +198,7 @@ mod tests { let content_value = header.as_ssz_bytes(); let header_oracle = default_header_oracle(); let chain_history_validator = ChainHistoryValidator { header_oracle }; - let content_key = HistoryContentKey::BlockHeaderWithProof(BlockHeaderKey { + let content_key = HistoryContentKey::BlockHeaderByHash(BlockHeaderByHashKey { block_hash: header.header.hash().0, }); chain_history_validator @@ -214,67 +207,63 @@ mod tests { .unwrap(); } - #[tokio::test] - async fn validate_epoch_acc() { - let epoch_acc = - std::fs::read("./../trin-validation/src/assets/epoch_accs/0x5ec1ffb8c3b146f42606c74ced973dc16ec5a107c0345858c343fc94780b4218.bin").unwrap(); - let epoch_acc = EpochAccumulator::from_ssz_bytes(&epoch_acc).unwrap(); + #[test_log::test(tokio::test)] + async fn validate_header_by_number() { + let header_with_proof_ssz = get_header_with_proof_ssz(); + let header_with_proof = + HeaderWithProof::from_ssz_bytes(&header_with_proof_ssz).expect("error decoding header"); let header_oracle = default_header_oracle(); let chain_history_validator = ChainHistoryValidator { header_oracle }; - let content_key = HistoryContentKey::EpochAccumulator(EpochAccumulatorKey { - epoch_hash: epoch_acc.tree_hash_root(), + let content_key = HistoryContentKey::BlockHeaderByNumber(BlockHeaderByNumberKey { + block_number: header_with_proof.header.number, }); - let content = epoch_acc.as_ssz_bytes(); chain_history_validator - .validate_content(&content_key, &content) + .validate_content(&content_key, &header_with_proof_ssz) .await .unwrap(); } - #[tokio::test] - #[should_panic(expected = "Invalid epoch accumulator tree hash root.")] - async fn invalidate_epoch_acc_with_invalid_root_hash() { - let epoch_acc = - std::fs::read("./../trin-validation/src/assets/epoch_accs/0x5ec1ffb8c3b146f42606c74ced973dc16ec5a107c0345858c343fc94780b4218.bin").unwrap(); - let mut epoch_acc = EpochAccumulator::from_ssz_bytes(&epoch_acc).unwrap(); + #[test_log::test(tokio::test)] + #[should_panic(expected = "Merkle proof validation failed for pre-merge header")] + async fn invalidate_header_by_number_with_invalid_number() { + let header_with_proof_ssz = get_header_with_proof_ssz(); + let mut header = + HeaderWithProof::from_ssz_bytes(&header_with_proof_ssz).expect("error decoding header"); + + // set invalid block height + header.header.number = 669052; + + let content_value = header.as_ssz_bytes(); let header_oracle = default_header_oracle(); let chain_history_validator = ChainHistoryValidator { header_oracle }; - let content_key = HistoryContentKey::EpochAccumulator(EpochAccumulatorKey { - epoch_hash: epoch_acc.tree_hash_root(), + let content_key = HistoryContentKey::BlockHeaderByNumber(BlockHeaderByNumberKey { + block_number: header.header.number, }); - - epoch_acc[0] = HeaderRecord { - block_hash: B256::random(), - total_difficulty: U256::ZERO, - }; - let invalid_content = epoch_acc.as_ssz_bytes(); - chain_history_validator - .validate_content(&content_key, &invalid_content) + .validate_content(&content_key, &content_value) .await .unwrap(); } - #[tokio::test] - #[should_panic(expected = "Invalid epoch accumulator, missing from pre-merge accumulator.")] - async fn invalidate_epoch_acc_missing_from_pre_merge_acc() { - let epoch_acc = - std::fs::read("./../trin-validation/src/assets/epoch_accs/0x5ec1ffb8c3b146f42606c74ced973dc16ec5a107c0345858c343fc94780b4218.bin").unwrap(); - let mut epoch_acc = EpochAccumulator::from_ssz_bytes(&epoch_acc).unwrap(); + #[test_log::test(tokio::test)] + #[should_panic(expected = "Merkle proof validation failed for pre-merge header")] + async fn invalidate_header_by_number_with_invalid_gaslimit() { + let header_with_proof_ssz: Vec = get_header_with_proof_ssz(); + let mut header = + HeaderWithProof::from_ssz_bytes(&header_with_proof_ssz).expect("error decoding header"); + + // set invalid block gaslimit + // valid gaslimit = 3141592 + header.header.gas_limit = U256::from(3141591); + + let content_value = header.as_ssz_bytes(); let header_oracle = default_header_oracle(); let chain_history_validator = ChainHistoryValidator { header_oracle }; - - epoch_acc[0] = HeaderRecord { - block_hash: B256::random(), - total_difficulty: U256::ZERO, - }; - let content_key = HistoryContentKey::EpochAccumulator(EpochAccumulatorKey { - epoch_hash: epoch_acc.tree_hash_root(), + let content_key = HistoryContentKey::BlockHeaderByNumber(BlockHeaderByNumberKey { + block_number: header.header.number, }); - let invalid_content = epoch_acc.as_ssz_bytes(); - chain_history_validator - .validate_content(&content_key, &invalid_content) + .validate_content(&content_key, &content_value) .await .unwrap(); } diff --git a/trin-state/src/validation/validator.rs b/trin-state/src/validation/validator.rs index a4efc45ce..a88ce508c 100644 --- a/trin-state/src/validation/validator.rs +++ b/trin-state/src/validation/validator.rs @@ -145,7 +145,7 @@ impl StateValidator { async fn get_state_root(&self, block_hash: B256) -> Result { let header_oracle = self.header_oracle.read().await; let header = header_oracle - .recursive_find_header_with_proof(block_hash) + .recursive_find_header_by_hash_with_proof(block_hash) .await?; Ok(header.header.state_root) } @@ -186,7 +186,7 @@ mod tests { }); let history_jsonrpc_tx = MockJsonRpcBuilder::new() .with_response( - HistoryEndpoint::RecursiveFindContent(HistoryContentKey::BlockHeaderWithProof( + HistoryEndpoint::RecursiveFindContent(HistoryContentKey::BlockHeaderByHash( header.hash().into(), )), ContentInfo::Content { diff --git a/trin-validation/src/accumulator.rs b/trin-validation/src/accumulator.rs index 5446de1cc..a9cc0113c 100644 --- a/trin-validation/src/accumulator.rs +++ b/trin-validation/src/accumulator.rs @@ -3,12 +3,9 @@ use std::path::PathBuf; use anyhow::anyhow; use serde::{Deserialize, Serialize}; -use serde_json::Value; use ssz::Decode; use ssz_derive::{Decode, Encode}; use ssz_types::{typenum, VariableList}; -use tokio::sync::mpsc; -use tree_hash::TreeHash; use tree_hash_derive::TreeHash; use crate::{ @@ -16,14 +13,7 @@ use crate::{ merkle::proof::MerkleTree, TrinValidationAssets, }; -use ethportal_api::{ - types::{ - execution::{accumulator::EpochAccumulator, header::Header}, - jsonrpc::{endpoints::HistoryEndpoint, request::HistoryJsonRpcRequest}, - }, - utils::bytes::hex_decode, - EpochAccumulatorKey, HistoryContentKey, -}; +use ethportal_api::types::execution::{accumulator::EpochAccumulator, header::Header}; /// SSZ List[Hash256, max_length = MAX_HISTORICAL_EPOCHS] /// List of historical epoch accumulator merkle roots preceding current epoch. @@ -66,81 +56,6 @@ impl PreMergeAccumulator { header.number / EPOCH_SIZE } - pub async fn lookup_premerge_hash_by_number( - &self, - block_number: u64, - history_jsonrpc_tx: mpsc::UnboundedSender, - ) -> anyhow::Result { - if block_number > MERGE_BLOCK_NUMBER { - return Err(anyhow!("Post-merge blocks are not supported.")); - } - let rel_index = block_number % EPOCH_SIZE; - let epoch_index = block_number / EPOCH_SIZE; - let epoch_hash = self.historical_epochs[epoch_index as usize]; - let epoch_acc = self - .lookup_epoch_acc(epoch_hash, history_jsonrpc_tx) - .await?; - Ok(epoch_acc[rel_index as usize].block_hash) - } - - pub async fn lookup_epoch_acc( - &self, - epoch_hash: B256, - history_jsonrpc_tx: mpsc::UnboundedSender, - ) -> anyhow::Result { - let content_key = HistoryContentKey::EpochAccumulator(EpochAccumulatorKey { epoch_hash }); - let endpoint = HistoryEndpoint::RecursiveFindContent(content_key); - let (resp_tx, mut resp_rx) = mpsc::unbounded_channel::>(); - let request = HistoryJsonRpcRequest { - endpoint, - resp: resp_tx, - }; - history_jsonrpc_tx.send(request)?; - - let epoch_acc_ssz = match resp_rx.recv().await { - Some(val) => { - val.map_err(|msg| anyhow!("Chain history subnetwork request error: {:?}", msg))? - } - None => return Err(anyhow!("No response from chain history subnetwork")), - }; - let epoch_acc_ssz = epoch_acc_ssz - .as_str() - .ok_or_else(|| anyhow!("Invalid epoch acc received from chain history network"))?; - let epoch_acc_ssz = hex_decode(epoch_acc_ssz)?; - EpochAccumulator::from_ssz_bytes(&epoch_acc_ssz).map_err(|msg| { - anyhow!( - "Invalid epoch acc received from chain history network: {:?}", - msg - ) - }) - } - - pub async fn generate_proof( - &self, - header: &Header, - history_jsonrpc_tx: mpsc::UnboundedSender, - ) -> anyhow::Result<[B256; 15]> { - if header.number > MERGE_BLOCK_NUMBER { - return Err(anyhow!("Unable to generate proof for post-merge header.")); - } - // Fetch epoch accumulator for header - let epoch_index = self.get_epoch_index_of_header(header); - let epoch_hash = self.historical_epochs[epoch_index as usize]; - let epoch_acc = self - .lookup_epoch_acc(epoch_hash, history_jsonrpc_tx) - .await?; - - // Validate epoch accumulator hash matches historical hash from pre-merge accumulator - let epoch_index = self.get_epoch_index_of_header(header); - let epoch_hash = self.historical_epochs[epoch_index as usize]; - if epoch_acc.tree_hash_root() != epoch_hash { - return Err(anyhow!( - "Epoch acc hash sourced from network doesn't match historical hash in pre-merge acc." - )); - } - PreMergeAccumulator::construct_proof(header, &epoch_acc) - } - pub fn construct_proof( header: &Header, epoch_acc: &EpochAccumulator, diff --git a/trin-validation/src/header_validator.rs b/trin-validation/src/header_validator.rs index 8e6f49335..41664dd98 100644 --- a/trin-validation/src/header_validator.rs +++ b/trin-validation/src/header_validator.rs @@ -226,21 +226,17 @@ mod test { use alloy_primitives::{Address, Bloom, B256, U256}; use alloy_rlp::Decodable; use rstest::*; - use serde_json::{json, Value}; + use serde_json::Value; use ssz::{Decode, Encode}; - use tokio::sync::mpsc; use tree_hash::TreeHash; use crate::constants::DEFAULT_PRE_MERGE_ACC_HASH; use ethportal_api::{ - types::{ - execution::{ - accumulator::EpochAccumulator, - header_with_proof::{ - BlockHeaderProof, HeaderWithProof, PreMergeAccumulatorProof, SszNone, - }, + types::execution::{ + accumulator::EpochAccumulator, + header_with_proof::{ + BlockHeaderProof, HeaderWithProof, PreMergeAccumulatorProof, SszNone, }, - jsonrpc::{endpoints::HistoryEndpoint, request::HistoryJsonRpcRequest}, }, utils::bytes::{hex_decode, hex_encode}, HistoryContentKey, RawContentKey, @@ -262,32 +258,26 @@ mod test { // Use fluffy's proofs as test data to validate that trin // - generates proofs which match fluffy's // - validates hwps + let file = fs::read_to_string("./src/assets/fluffy/header_with_proofs.json").unwrap(); let json: Value = serde_json::from_str(&file).unwrap(); let hwps = json.as_object().unwrap(); let header_validator = get_mainnet_header_validator(); - let (tx, mut rx) = mpsc::unbounded_channel::(); - tokio::spawn(async move { - spawn_mock_epoch_acc_lookup(&mut rx).await; - }); let obj = hwps.get(&block_number.to_string()).unwrap(); // Validate content_key decodes let raw_ck = obj.get("content_key").unwrap().as_str().unwrap(); let raw_ck = RawContentKey::from_str(raw_ck).unwrap(); let ck = HistoryContentKey::try_from(raw_ck).unwrap(); match ck { - HistoryContentKey::BlockHeaderWithProof(_) => (), + HistoryContentKey::BlockHeaderByHash(_) => (), _ => panic!("Invalid test, content key decoded improperly"), } let raw_fluffy_hwp = obj.get("value").unwrap().as_str().unwrap(); let fluffy_hwp = HeaderWithProof::from_ssz_bytes(&hex_decode(raw_fluffy_hwp).unwrap()).unwrap(); let header = get_header(block_number); - let trin_proof = header_validator - .pre_merge_acc - .generate_proof(&header, tx) - .await - .unwrap(); + let epoch_accumulator = read_epoch_accumulator_122(); + let trin_proof = PreMergeAccumulator::construct_proof(&header, &epoch_accumulator).unwrap(); let fluffy_proof = match fluffy_hwp.proof { BlockHeaderProof::PreMergeAccumulatorProof(val) => val, _ => panic!("test reached invalid state"), @@ -331,16 +321,9 @@ mod test { #[tokio::test] async fn invalidate_invalid_proofs() { let header_validator = get_mainnet_header_validator(); - let (tx, mut rx) = mpsc::unbounded_channel::(); - tokio::spawn(async move { - spawn_mock_epoch_acc_lookup(&mut rx).await; - }); let header = get_header(1_000_001); - let mut proof = header_validator - .pre_merge_acc - .generate_proof(&header, tx) - .await - .unwrap(); + let epoch_accumulator = read_epoch_accumulator_122(); + let mut proof = PreMergeAccumulator::construct_proof(&header, &epoch_accumulator).unwrap(); proof.swap(0, 1); let hwp = HeaderWithProof { header, @@ -526,26 +509,12 @@ mod test { } } - async fn spawn_mock_epoch_acc_lookup(rx: &mut mpsc::UnboundedReceiver) { - match rx.recv().await { - Some(request) => match request.endpoint { - HistoryEndpoint::RecursiveFindContent(content_key) => { - let json_value = serde_json::to_value(content_key).unwrap(); - let response = json_value.as_str().unwrap(); - let epoch_acc_hash = response.trim_start_matches("0x03"); - let epoch_acc_hash = B256::from_str(epoch_acc_hash).unwrap(); - let epoch_acc_path = format!("./src/assets/epoch_accs/{epoch_acc_hash}.bin"); - let epoch_acc = fs::read(epoch_acc_path).unwrap(); - let epoch_acc = hex_encode(epoch_acc); - let content: Value = json!(epoch_acc); - let _ = request.resp.send(Ok(content)); - } - _ => panic!("Unexpected request endpoint"), - }, - None => { - panic!("Test run failed: Unable to get response from pre_merge_acc validation.") - } - } + fn read_epoch_accumulator_122() -> EpochAccumulator { + let epoch_acc_bytes = fs::read( + "../portal-spec-tests/tests/mainnet/history/accumulator/epoch-record-00122.ssz", + ) + .unwrap(); + EpochAccumulator::from_ssz_bytes(&epoch_acc_bytes).unwrap() } const HEADER_RLP_15_537_392: &str = "0xf90218a02f1dc309c7cc0a5a2e3b3dd9315fea0ffbc53c56f9237f3ca11b20de0232f153a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d4934794ea674fdde714fd979de3edf0f56aa9716b898ec8a0fee48a40a2765ab31fcd06ab6956341d13dc2c4b9762f2447aa425bb1c089b30a082864b3a65d1ac1917c426d48915dca0fc966fbf3f30fd051659f35dc3fd9be1a013c10513b52358022f800e2f9f1c50328798427b1b4a1ebbbd20b7417fb9719db90100ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff872741c5e4f6c39283ed14f08401c9c3808401c9a028846322c95c8f617369612d65617374322d31763932a02df332ffb74ecd15c9873d3f6153b878e1c514495dfb6e89ad88e574582b02a488232b0043952c93d98508fb17c6ee"; diff --git a/trin-validation/src/oracle.rs b/trin-validation/src/oracle.rs index c9cc68bdf..e10b5a511 100644 --- a/trin-validation/src/oracle.rs +++ b/trin-validation/src/oracle.rs @@ -9,6 +9,7 @@ use ethportal_api::{ consensus::header::BeaconBlockHeader, light_client::store::LightClientStore, types::{ + content_key::history::BlockHeaderByHashKey, execution::header_with_proof::HeaderWithProof, jsonrpc::{ endpoints::{BeaconEndpoint, HistoryEndpoint, StateEndpoint}, @@ -16,7 +17,7 @@ use ethportal_api::{ }, portal::ContentInfo, }, - BlockHeaderKey, ContentValue, Enr, HistoryContentKey, HistoryContentValue, + ContentValue, Enr, HistoryContentKey, HistoryContentValue, }; /// Responsible for dispatching cross-overlay-network requests @@ -49,21 +50,13 @@ impl HeaderOracle { } } - // Only serves pre-block hashes aka. portal-network verified data only - pub async fn get_hash_at_height(&self, block_number: u64) -> anyhow::Result { - self.header_validator - .pre_merge_acc - .lookup_premerge_hash_by_number(block_number, self.history_jsonrpc_tx()?) - .await - } - /// Returns the HeaderWithProof for the given block hash by performing a recursive find content /// request. - pub async fn recursive_find_header_with_proof( + pub async fn recursive_find_header_by_hash_with_proof( &self, block_hash: B256, ) -> anyhow::Result { - let content_key = HistoryContentKey::BlockHeaderWithProof(BlockHeaderKey { + let content_key = HistoryContentKey::BlockHeaderByHash(BlockHeaderByHashKey { block_hash: block_hash.0, }); let endpoint = HistoryEndpoint::RecursiveFindContent(content_key.clone());