From c5af146ba44bea114ae9b0140aafe9b588d7c9a6 Mon Sep 17 00:00:00 2001 From: Kolby Moroz Liebl <31669092+KolbyML@users.noreply.github.com> Date: Mon, 16 Sep 2024 13:27:35 -0600 Subject: [PATCH] fix: resolve PR concerns --- ethportal-peertest/src/scenarios/gossip.rs | 71 ++-------------- portal-bridge/src/api/execution.rs | 7 +- portal-bridge/src/bridge/era1.rs | 2 +- portal-bridge/src/bridge/history.rs | 26 ++---- portal-bridge/src/bridge/utils.rs | 5 +- trin-history/src/validation.rs | 95 ++++++++++++++++++---- trin-validation/src/header_validator.rs | 77 ++++++++++++++++++ 7 files changed, 179 insertions(+), 104 deletions(-) diff --git a/ethportal-peertest/src/scenarios/gossip.rs b/ethportal-peertest/src/scenarios/gossip.rs index 25546b4a9..7cd11b747 100644 --- a/ethportal-peertest/src/scenarios/gossip.rs +++ b/ethportal-peertest/src/scenarios/gossip.rs @@ -192,63 +192,35 @@ pub async fn test_gossip_dropped_with_offer(peertest: &Peertest, target: &Client .unwrap(); // check that the fresh target has stored block_2 - assert_eq!( - header_value_2, - wait_for_history_content(&fresh_target, header_key_2.clone()).await - ); assert_eq!( body_value_2, wait_for_history_content(&fresh_target, body_key_2.clone()).await ); // check that the target has block_1 and block_2 - assert_eq!( - header_value_2, - wait_for_history_content(target, header_key_2.clone()).await - ); assert_eq!( body_value_2, wait_for_history_content(target, body_key_2.clone()).await ); - assert_eq!( - header_value_1, - wait_for_history_content(target, header_key_1.clone()).await - ); assert_eq!( receipts_value_1, wait_for_history_content(target, receipts_key_1.clone()).await ); // check that the peertest bootnode has block_1 and block_2 - assert_eq!( - header_value_2, - wait_for_history_content(&peertest.bootnode.ipc_client, header_key_2.clone()).await - ); assert_eq!( body_value_2, wait_for_history_content(&peertest.bootnode.ipc_client, body_key_2.clone()).await ); - assert_eq!( - header_value_1, - wait_for_history_content(&peertest.bootnode.ipc_client, header_key_1.clone()).await - ); assert_eq!( receipts_value_1, wait_for_history_content(&peertest.bootnode.ipc_client, receipts_key_1.clone()).await ); // check that the peertest node has block_1 and block_2 - assert_eq!( - header_value_2, - wait_for_history_content(&peertest.nodes[0].ipc_client, header_key_2.clone()).await - ); assert_eq!( body_value_2, wait_for_history_content(&peertest.nodes[0].ipc_client, body_key_2.clone()).await ); - assert_eq!( - header_value_1, - wait_for_history_content(&peertest.nodes[0].ipc_client, header_key_1.clone()).await - ); assert_eq!( receipts_value_1, wait_for_history_content(&peertest.nodes[0].ipc_client, receipts_key_1.clone()).await @@ -277,17 +249,8 @@ pub async fn test_gossip_dropped_with_find_content(peertest: &Peertest, target: .await .unwrap(); - // Store receipts_1 locally in client that is not connected to the network - let (header_key_1, header_value_1) = fixture_header_by_hash_with_proof_15040641(); + // Store receipts_1 locally in client, without validation, that is not connected to the network let (receipts_key_1, receipts_value_1) = fixture_receipts_15040641(); - let store_result = HistoryNetworkApiClient::store( - &fresh_target, - header_key_1.clone(), - header_value_1.encode(), - ) - .await - .unwrap(); - assert!(store_result); let store_result = HistoryNetworkApiClient::store( &fresh_target, receipts_key_1.clone(), @@ -297,9 +260,15 @@ pub async fn test_gossip_dropped_with_find_content(peertest: &Peertest, target: .unwrap(); assert!(store_result); - // Store body_2 locally in target + // Store header_1, header_2, body_2 locally in target + let (header_key_1, header_value_1) = fixture_header_by_hash_with_proof_15040641(); let (header_key_2, header_value_2) = fixture_header_by_hash_with_proof_15040708(); let (body_key_2, body_value_2) = fixture_block_body_15040708(); + let store_result = + HistoryNetworkApiClient::store(target, header_key_1.clone(), header_value_1.encode()) + .await + .unwrap(); + assert!(store_result); let store_result = HistoryNetworkApiClient::store(target, header_key_2.clone(), header_value_2.encode()) .await @@ -338,53 +307,29 @@ pub async fn test_gossip_dropped_with_find_content(peertest: &Peertest, target: wait_for_history_content(&fresh_target, body_key_2.clone()).await ); // check that the target has block_1 and block_2 - assert_eq!( - header_value_2, - wait_for_history_content(target, header_key_2.clone()).await - ); assert_eq!( body_value_2, wait_for_history_content(target, body_key_2.clone()).await ); - assert_eq!( - header_value_1, - wait_for_history_content(target, header_key_1.clone()).await - ); assert_eq!( receipts_value_1, wait_for_history_content(target, receipts_key_1.clone()).await ); // check that the peertest bootnode has block_1 and block_2 - assert_eq!( - header_value_2, - wait_for_history_content(&peertest.bootnode.ipc_client, header_key_2.clone()).await - ); assert_eq!( body_value_2, wait_for_history_content(&peertest.bootnode.ipc_client, body_key_2.clone()).await ); - assert_eq!( - header_value_1, - wait_for_history_content(&peertest.bootnode.ipc_client, header_key_1.clone()).await - ); assert_eq!( receipts_value_1, wait_for_history_content(&peertest.bootnode.ipc_client, receipts_key_1.clone()).await ); // check that the peertest node has block_1 and block_2 - assert_eq!( - header_value_2, - wait_for_history_content(&peertest.nodes[0].ipc_client, header_key_2.clone()).await - ); assert_eq!( body_value_2, wait_for_history_content(&peertest.nodes[0].ipc_client, body_key_2.clone()).await ); - assert_eq!( - header_value_1, - wait_for_history_content(&peertest.nodes[0].ipc_client, header_key_1.clone()).await - ); assert_eq!( receipts_value_1, wait_for_history_content(&peertest.nodes[0].ipc_client, receipts_key_1.clone()).await diff --git a/portal-bridge/src/api/execution.rs b/portal-bridge/src/api/execution.rs index 496dc6a34..a5cb39445 100644 --- a/portal-bridge/src/api/execution.rs +++ b/portal-bridge/src/api/execution.rs @@ -69,15 +69,16 @@ impl ExecutionApi { }) } - /// Return a validated FullHeader & content key / value pair for the given header. + /// Return a validated FullHeader & content by hash and number key / value pair for the given + /// header. pub async fn get_header( &self, height: u64, epoch_acc: Option>, ) -> anyhow::Result<( FullHeader, - HistoryContentKey, - HistoryContentKey, + HistoryContentKey, // BlockHeaderByHash + HistoryContentKey, // BlockHeaderByNumber HistoryContentValue, )> { // Geth requires block numbers to be formatted using the following padding. diff --git a/portal-bridge/src/bridge/era1.rs b/portal-bridge/src/bridge/era1.rs index 629bbd009..74a409e6f 100644 --- a/portal-bridge/src/bridge/era1.rs +++ b/portal-bridge/src/bridge/era1.rs @@ -258,7 +258,7 @@ impl Era1Bridge { panic!("Failed to get epoch from era1 file: {e}"); } }; - let (_, epoch_acc) = match lookup_epoch_acc( + let epoch_acc = match lookup_epoch_acc( epoch_index, &self.header_oracle.header_validator.pre_merge_acc, &self.epoch_acc_path, diff --git a/portal-bridge/src/bridge/history.rs b/portal-bridge/src/bridge/history.rs index da7d46710..4d4238b51 100644 --- a/portal-bridge/src/bridge/history.rs +++ b/portal-bridge/src/bridge/history.rs @@ -189,11 +189,14 @@ impl HistoryBridge { // look up the epoch acc on a header by header basis if height <= MERGE_BLOCK_NUMBER && current_epoch_index != height / EPOCH_SIZE { current_epoch_index = height / EPOCH_SIZE; - epoch_acc = match self - .construct_and_gossip_epoch_acc(current_epoch_index) - .await + epoch_acc = match lookup_epoch_acc( + current_epoch_index, + &self.header_oracle.header_validator.pre_merge_acc, + &self.epoch_acc_path, + ) + .await { - Ok(val) => Some(val), + Ok(epoch_acc) => Some(Arc::new(epoch_acc)), Err(msg) => { warn!("Unable to find epoch acc for gossip range: {current_epoch_index}. Skipping iteration: {msg:?}"); continue; @@ -349,21 +352,6 @@ impl HistoryBridge { Ok(()) } - /// Attempt to lookup an epoch accumulator from local portal-accumulators path provided via cli - /// arg. Gossip the epoch accumulator if found. - async fn construct_and_gossip_epoch_acc( - &self, - epoch_index: u64, - ) -> anyhow::Result> { - let (_, epoch_acc) = lookup_epoch_acc( - epoch_index, - &self.header_oracle.header_validator.pre_merge_acc, - &self.epoch_acc_path, - ) - .await?; - Ok(Arc::new(epoch_acc)) - } - async fn construct_and_gossip_receipt( full_header: &FullHeader, portal_client: &HttpClient, diff --git a/portal-bridge/src/bridge/utils.rs b/portal-bridge/src/bridge/utils.rs index 487227edb..4f7ee9ba7 100644 --- a/portal-bridge/src/bridge/utils.rs +++ b/portal-bridge/src/bridge/utils.rs @@ -1,4 +1,3 @@ -use alloy_primitives::B256; use anyhow::anyhow; use ethportal_api::{types::execution::accumulator::EpochAccumulator, utils::bytes::hex_encode}; use ssz::Decode; @@ -10,7 +9,7 @@ pub async fn lookup_epoch_acc( epoch_index: u64, pre_merge_acc: &PreMergeAccumulator, epoch_acc_path: &Path, -) -> anyhow::Result<(B256, EpochAccumulator)> { +) -> anyhow::Result { let epoch_hash = pre_merge_acc.historical_epochs[epoch_index as usize]; let epoch_hash_pretty = hex_encode(epoch_hash); let epoch_hash_pretty = epoch_hash_pretty.trim_start_matches("0x"); @@ -26,5 +25,5 @@ pub async fn lookup_epoch_acc( )) } }; - Ok((epoch_hash, epoch_acc)) + Ok(epoch_acc) } diff --git a/trin-history/src/validation.rs b/trin-history/src/validation.rs index d59028ac3..31286a94d 100644 --- a/trin-history/src/validation.rs +++ b/trin-history/src/validation.rs @@ -32,7 +32,7 @@ impl Validator for ChainHistoryValidator { HistoryContentKey::BlockHeaderByHash(key) => { let header_with_proof = HeaderWithProof::from_ssz_bytes(content).map_err(|err| { - anyhow!("Header with proof content has invalid encoding: {err:?}") + anyhow!("Header by hash content has invalid encoding: {err:?}") })?; let header_hash = header_with_proof.header.hash(); ensure!( @@ -51,7 +51,7 @@ impl Validator for ChainHistoryValidator { HistoryContentKey::BlockHeaderByNumber(key) => { let header_with_proof = HeaderWithProof::from_ssz_bytes(content).map_err(|err| { - anyhow!("Header with proof content has invalid encoding: {err:?}") + anyhow!("Header by number content has invalid encoding: {err:?}") })?; let header_number = header_with_proof.header.number; ensure!( @@ -131,10 +131,11 @@ mod tests { use ssz::Encode; use ethportal_api::{ - types::content_key::history::BlockHeaderByHashKey, utils::bytes::hex_decode, + types::content_key::history::{BlockHeaderByHashKey, BlockHeaderByNumberKey}, + utils::bytes::hex_decode, }; - fn get_hwp_ssz() -> Vec { + fn get_header_with_proof_ssz() -> Vec { let file = fs::read_to_string("../trin-validation/src/assets/fluffy/header_with_proofs.json") .unwrap(); @@ -146,25 +147,27 @@ mod tests { } #[test_log::test(tokio::test)] - async fn validate_header() { - let hwp_ssz = get_hwp_ssz(); - let hwp = HeaderWithProof::from_ssz_bytes(&hwp_ssz).expect("error decoding header"); + async fn validate_header_by_hash() { + let header_with_proof_ssz = get_header_with_proof_ssz(); + let header_with_proof = + HeaderWithProof::from_ssz_bytes(&header_with_proof_ssz).expect("error decoding header"); let header_oracle = default_header_oracle(); let chain_history_validator = ChainHistoryValidator { header_oracle }; let content_key = HistoryContentKey::BlockHeaderByHash(BlockHeaderByHashKey { - block_hash: hwp.header.hash().0, + block_hash: header_with_proof.header.hash().0, }); chain_history_validator - .validate_content(&content_key, &hwp_ssz) + .validate_content(&content_key, &header_with_proof_ssz) .await .unwrap(); } #[test_log::test(tokio::test)] #[should_panic(expected = "Merkle proof validation failed for pre-merge header")] - async fn invalidate_header_with_invalid_number() { - let hwp_ssz = get_hwp_ssz(); - let mut header = HeaderWithProof::from_ssz_bytes(&hwp_ssz).expect("error decoding header"); + async fn invalidate_header_by_hash_with_invalid_number() { + let header_with_proof_ssz = get_header_with_proof_ssz(); + let mut header = + HeaderWithProof::from_ssz_bytes(&header_with_proof_ssz).expect("error decoding header"); // set invalid block height header.header.number = 669052; @@ -183,9 +186,10 @@ mod tests { #[test_log::test(tokio::test)] #[should_panic(expected = "Merkle proof validation failed for pre-merge header")] - async fn invalidate_header_with_invalid_gaslimit() { - let hwp_ssz = get_hwp_ssz(); - let mut header = HeaderWithProof::from_ssz_bytes(&hwp_ssz).expect("error decoding header"); + async fn invalidate_header_by_hash_with_invalid_gaslimit() { + let header_with_proof_ssz = get_header_with_proof_ssz(); + let mut header = + HeaderWithProof::from_ssz_bytes(&header_with_proof_ssz).expect("error decoding header"); // set invalid block gaslimit // valid gaslimit = 3141592 @@ -203,6 +207,67 @@ mod tests { .unwrap(); } + #[test_log::test(tokio::test)] + async fn validate_header_by_number() { + let header_with_proof_ssz = get_header_with_proof_ssz(); + let header_with_proof = + HeaderWithProof::from_ssz_bytes(&header_with_proof_ssz).expect("error decoding header"); + let header_oracle = default_header_oracle(); + let chain_history_validator = ChainHistoryValidator { header_oracle }; + let content_key = HistoryContentKey::BlockHeaderByNumber(BlockHeaderByNumberKey { + block_number: header_with_proof.header.number, + }); + chain_history_validator + .validate_content(&content_key, &header_with_proof_ssz) + .await + .unwrap(); + } + + #[test_log::test(tokio::test)] + #[should_panic(expected = "Merkle proof validation failed for pre-merge header")] + async fn invalidate_header_by_number_with_invalid_number() { + let header_with_proof_ssz = get_header_with_proof_ssz(); + let mut header = + HeaderWithProof::from_ssz_bytes(&header_with_proof_ssz).expect("error decoding header"); + + // set invalid block height + header.header.number = 669052; + + let content_value = header.as_ssz_bytes(); + let header_oracle = default_header_oracle(); + let chain_history_validator = ChainHistoryValidator { header_oracle }; + let content_key = HistoryContentKey::BlockHeaderByNumber(BlockHeaderByNumberKey { + block_number: header.header.number, + }); + chain_history_validator + .validate_content(&content_key, &content_value) + .await + .unwrap(); + } + + #[test_log::test(tokio::test)] + #[should_panic(expected = "Merkle proof validation failed for pre-merge header")] + async fn invalidate_header_by_number_with_invalid_gaslimit() { + let header_with_proof_ssz: Vec = get_header_with_proof_ssz(); + let mut header = + HeaderWithProof::from_ssz_bytes(&header_with_proof_ssz).expect("error decoding header"); + + // set invalid block gaslimit + // valid gaslimit = 3141592 + header.header.gas_limit = U256::from(3141591); + + let content_value = header.as_ssz_bytes(); + let header_oracle = default_header_oracle(); + let chain_history_validator = ChainHistoryValidator { header_oracle }; + let content_key = HistoryContentKey::BlockHeaderByNumber(BlockHeaderByNumberKey { + block_number: header.header.number, + }); + chain_history_validator + .validate_content(&content_key, &content_value) + .await + .unwrap(); + } + fn default_header_oracle() -> Arc> { Arc::new(RwLock::new(HeaderOracle::default())) } diff --git a/trin-validation/src/header_validator.rs b/trin-validation/src/header_validator.rs index d4002ef63..41664dd98 100644 --- a/trin-validation/src/header_validator.rs +++ b/trin-validation/src/header_validator.rs @@ -239,8 +239,59 @@ mod test { }, }, utils::bytes::{hex_decode, hex_encode}, + HistoryContentKey, RawContentKey, }; + #[rstest] + #[case(1_000_001)] + #[case(1_000_002)] + #[case(1_000_003)] + #[case(1_000_004)] + #[case(1_000_005)] + #[case(1_000_006)] + #[case(1_000_007)] + #[case(1_000_008)] + #[case(1_000_009)] + #[case(1_000_010)] + #[tokio::test] + async fn generate_and_verify_header_with_proofs(#[case] block_number: u64) { + // Use fluffy's proofs as test data to validate that trin + // - generates proofs which match fluffy's + // - validates hwps + + let file = fs::read_to_string("./src/assets/fluffy/header_with_proofs.json").unwrap(); + let json: Value = serde_json::from_str(&file).unwrap(); + let hwps = json.as_object().unwrap(); + let header_validator = get_mainnet_header_validator(); + let obj = hwps.get(&block_number.to_string()).unwrap(); + // Validate content_key decodes + let raw_ck = obj.get("content_key").unwrap().as_str().unwrap(); + let raw_ck = RawContentKey::from_str(raw_ck).unwrap(); + let ck = HistoryContentKey::try_from(raw_ck).unwrap(); + match ck { + HistoryContentKey::BlockHeaderByHash(_) => (), + _ => panic!("Invalid test, content key decoded improperly"), + } + let raw_fluffy_hwp = obj.get("value").unwrap().as_str().unwrap(); + let fluffy_hwp = + HeaderWithProof::from_ssz_bytes(&hex_decode(raw_fluffy_hwp).unwrap()).unwrap(); + let header = get_header(block_number); + let epoch_accumulator = read_epoch_accumulator_122(); + let trin_proof = PreMergeAccumulator::construct_proof(&header, &epoch_accumulator).unwrap(); + let fluffy_proof = match fluffy_hwp.proof { + BlockHeaderProof::PreMergeAccumulatorProof(val) => val, + _ => panic!("test reached invalid state"), + }; + assert_eq!(trin_proof, fluffy_proof.proof); + let hwp = HeaderWithProof { + header, + proof: BlockHeaderProof::PreMergeAccumulatorProof(PreMergeAccumulatorProof { + proof: trin_proof, + }), + }; + header_validator.validate_header_with_proof(&hwp).unwrap(); + } + #[rstest] #[case(HEADER_RLP_15_537_392, HWP_TEST_VECTOR_15_537_392, 15_537_392)] #[case(HEADER_RLP_15_537_393, HWP_TEST_VECTOR_15_537_393, 15_537_393)] @@ -267,6 +318,24 @@ mod test { assert_eq!(encoded_hwp, test_vector); } + #[tokio::test] + async fn invalidate_invalid_proofs() { + let header_validator = get_mainnet_header_validator(); + let header = get_header(1_000_001); + let epoch_accumulator = read_epoch_accumulator_122(); + let mut proof = PreMergeAccumulator::construct_proof(&header, &epoch_accumulator).unwrap(); + proof.swap(0, 1); + let hwp = HeaderWithProof { + header, + proof: BlockHeaderProof::PreMergeAccumulatorProof(PreMergeAccumulatorProof { proof }), + }; + assert!(header_validator + .validate_header_with_proof(&hwp) + .unwrap_err() + .to_string() + .contains("Merkle proof validation failed")); + } + #[tokio::test] #[should_panic(expected = "Missing accumulator proof for pre-merge header.")] async fn header_validator_cannot_validate_merge_header_missing_proof() { @@ -440,6 +509,14 @@ mod test { } } + fn read_epoch_accumulator_122() -> EpochAccumulator { + let epoch_acc_bytes = fs::read( + "../portal-spec-tests/tests/mainnet/history/accumulator/epoch-record-00122.ssz", + ) + .unwrap(); + EpochAccumulator::from_ssz_bytes(&epoch_acc_bytes).unwrap() + } + const HEADER_RLP_15_537_392: &str = "0xf90218a02f1dc309c7cc0a5a2e3b3dd9315fea0ffbc53c56f9237f3ca11b20de0232f153a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d4934794ea674fdde714fd979de3edf0f56aa9716b898ec8a0fee48a40a2765ab31fcd06ab6956341d13dc2c4b9762f2447aa425bb1c089b30a082864b3a65d1ac1917c426d48915dca0fc966fbf3f30fd051659f35dc3fd9be1a013c10513b52358022f800e2f9f1c50328798427b1b4a1ebbbd20b7417fb9719db90100ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff872741c5e4f6c39283ed14f08401c9c3808401c9a028846322c95c8f617369612d65617374322d31763932a02df332ffb74ecd15c9873d3f6153b878e1c514495dfb6e89ad88e574582b02a488232b0043952c93d98508fb17c6ee"; const HEADER_RLP_15_537_393: &str = "0xf9021ba02b3ea3cd4befcab070812443affb08bf17a91ce382c714a536ca3cacab82278ba01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d4934794829bd824b016326a401d083b33d092293333a830a04919dafa6ac8becfbbd0c2808f6c9511a057c21e42839caff5dfb6d3ef514951a0dd5eec02b019ff76e359b09bfa19395a2a0e97bc01e70d8d5491e640167c96a8a0baa842cfd552321a9c2450576126311e071680a1258032219c6490b663c1dab8b90100000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000800000000000000000000000080000000000000000000000000000000000000000000000000200000000000000000008000000000040000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000084000000000010020000000000000000000000000000000000020000000200000000200000000000000000000000000000000000000000400000000000000000000000008727472e1db3626a83ed14f18401c9c3808401c9a205846322c96292e4b883e5bda9e7a59ee4bb99e9b1bc460021a04cbec03dddd4b939730a7fe6048729604d4266e82426d472a2b2024f3cc4043f8862a3ee77461d4fc9850a1a4e5f06";