Skip to content

Commit

Permalink
fix: resolve PR concerns
Browse files Browse the repository at this point in the history
  • Loading branch information
KolbyML committed Sep 16, 2024
1 parent d091da2 commit c5af146
Show file tree
Hide file tree
Showing 7 changed files with 179 additions and 104 deletions.
71 changes: 8 additions & 63 deletions ethportal-peertest/src/scenarios/gossip.rs
Original file line number Diff line number Diff line change
Expand Up @@ -192,63 +192,35 @@ pub async fn test_gossip_dropped_with_offer(peertest: &Peertest, target: &Client
.unwrap();

// check that the fresh target has stored block_2
assert_eq!(
header_value_2,
wait_for_history_content(&fresh_target, header_key_2.clone()).await
);
assert_eq!(
body_value_2,
wait_for_history_content(&fresh_target, body_key_2.clone()).await
);
// check that the target has block_1 and block_2
assert_eq!(
header_value_2,
wait_for_history_content(target, header_key_2.clone()).await
);
assert_eq!(
body_value_2,
wait_for_history_content(target, body_key_2.clone()).await
);
assert_eq!(
header_value_1,
wait_for_history_content(target, header_key_1.clone()).await
);
assert_eq!(
receipts_value_1,
wait_for_history_content(target, receipts_key_1.clone()).await
);

// check that the peertest bootnode has block_1 and block_2
assert_eq!(
header_value_2,
wait_for_history_content(&peertest.bootnode.ipc_client, header_key_2.clone()).await
);
assert_eq!(
body_value_2,
wait_for_history_content(&peertest.bootnode.ipc_client, body_key_2.clone()).await
);
assert_eq!(
header_value_1,
wait_for_history_content(&peertest.bootnode.ipc_client, header_key_1.clone()).await
);
assert_eq!(
receipts_value_1,
wait_for_history_content(&peertest.bootnode.ipc_client, receipts_key_1.clone()).await
);

// check that the peertest node has block_1 and block_2
assert_eq!(
header_value_2,
wait_for_history_content(&peertest.nodes[0].ipc_client, header_key_2.clone()).await
);
assert_eq!(
body_value_2,
wait_for_history_content(&peertest.nodes[0].ipc_client, body_key_2.clone()).await
);
assert_eq!(
header_value_1,
wait_for_history_content(&peertest.nodes[0].ipc_client, header_key_1.clone()).await
);
assert_eq!(
receipts_value_1,
wait_for_history_content(&peertest.nodes[0].ipc_client, receipts_key_1.clone()).await
Expand Down Expand Up @@ -277,17 +249,8 @@ pub async fn test_gossip_dropped_with_find_content(peertest: &Peertest, target:
.await
.unwrap();

// Store receipts_1 locally in client that is not connected to the network
let (header_key_1, header_value_1) = fixture_header_by_hash_with_proof_15040641();
// Store receipts_1 locally in client, without validation, that is not connected to the network
let (receipts_key_1, receipts_value_1) = fixture_receipts_15040641();
let store_result = HistoryNetworkApiClient::store(
&fresh_target,
header_key_1.clone(),
header_value_1.encode(),
)
.await
.unwrap();
assert!(store_result);
let store_result = HistoryNetworkApiClient::store(
&fresh_target,
receipts_key_1.clone(),
Expand All @@ -297,9 +260,15 @@ pub async fn test_gossip_dropped_with_find_content(peertest: &Peertest, target:
.unwrap();
assert!(store_result);

// Store body_2 locally in target
// Store header_1, header_2, body_2 locally in target
let (header_key_1, header_value_1) = fixture_header_by_hash_with_proof_15040641();
let (header_key_2, header_value_2) = fixture_header_by_hash_with_proof_15040708();
let (body_key_2, body_value_2) = fixture_block_body_15040708();
let store_result =
HistoryNetworkApiClient::store(target, header_key_1.clone(), header_value_1.encode())
.await
.unwrap();
assert!(store_result);
let store_result =
HistoryNetworkApiClient::store(target, header_key_2.clone(), header_value_2.encode())
.await
Expand Down Expand Up @@ -338,53 +307,29 @@ pub async fn test_gossip_dropped_with_find_content(peertest: &Peertest, target:
wait_for_history_content(&fresh_target, body_key_2.clone()).await
);
// check that the target has block_1 and block_2
assert_eq!(
header_value_2,
wait_for_history_content(target, header_key_2.clone()).await
);
assert_eq!(
body_value_2,
wait_for_history_content(target, body_key_2.clone()).await
);
assert_eq!(
header_value_1,
wait_for_history_content(target, header_key_1.clone()).await
);
assert_eq!(
receipts_value_1,
wait_for_history_content(target, receipts_key_1.clone()).await
);
// check that the peertest bootnode has block_1 and block_2
assert_eq!(
header_value_2,
wait_for_history_content(&peertest.bootnode.ipc_client, header_key_2.clone()).await
);
assert_eq!(
body_value_2,
wait_for_history_content(&peertest.bootnode.ipc_client, body_key_2.clone()).await
);
assert_eq!(
header_value_1,
wait_for_history_content(&peertest.bootnode.ipc_client, header_key_1.clone()).await
);
assert_eq!(
receipts_value_1,
wait_for_history_content(&peertest.bootnode.ipc_client, receipts_key_1.clone()).await
);

// check that the peertest node has block_1 and block_2
assert_eq!(
header_value_2,
wait_for_history_content(&peertest.nodes[0].ipc_client, header_key_2.clone()).await
);
assert_eq!(
body_value_2,
wait_for_history_content(&peertest.nodes[0].ipc_client, body_key_2.clone()).await
);
assert_eq!(
header_value_1,
wait_for_history_content(&peertest.nodes[0].ipc_client, header_key_1.clone()).await
);
assert_eq!(
receipts_value_1,
wait_for_history_content(&peertest.nodes[0].ipc_client, receipts_key_1.clone()).await
Expand Down
7 changes: 4 additions & 3 deletions portal-bridge/src/api/execution.rs
Original file line number Diff line number Diff line change
Expand Up @@ -69,15 +69,16 @@ impl ExecutionApi {
})
}

/// Return a validated FullHeader & content key / value pair for the given header.
/// Return a validated FullHeader & content by hash and number key / value pair for the given
/// header.
pub async fn get_header(
&self,
height: u64,
epoch_acc: Option<Arc<EpochAccumulator>>,
) -> anyhow::Result<(
FullHeader,
HistoryContentKey,
HistoryContentKey,
HistoryContentKey, // BlockHeaderByHash
HistoryContentKey, // BlockHeaderByNumber
HistoryContentValue,
)> {
// Geth requires block numbers to be formatted using the following padding.
Expand Down
2 changes: 1 addition & 1 deletion portal-bridge/src/bridge/era1.rs
Original file line number Diff line number Diff line change
Expand Up @@ -258,7 +258,7 @@ impl Era1Bridge {
panic!("Failed to get epoch from era1 file: {e}");
}
};
let (_, epoch_acc) = match lookup_epoch_acc(
let epoch_acc = match lookup_epoch_acc(
epoch_index,
&self.header_oracle.header_validator.pre_merge_acc,
&self.epoch_acc_path,
Expand Down
26 changes: 7 additions & 19 deletions portal-bridge/src/bridge/history.rs
Original file line number Diff line number Diff line change
Expand Up @@ -189,11 +189,14 @@ impl HistoryBridge {
// look up the epoch acc on a header by header basis
if height <= MERGE_BLOCK_NUMBER && current_epoch_index != height / EPOCH_SIZE {
current_epoch_index = height / EPOCH_SIZE;
epoch_acc = match self
.construct_and_gossip_epoch_acc(current_epoch_index)
.await
epoch_acc = match lookup_epoch_acc(
current_epoch_index,
&self.header_oracle.header_validator.pre_merge_acc,
&self.epoch_acc_path,
)
.await
{
Ok(val) => Some(val),
Ok(epoch_acc) => Some(Arc::new(epoch_acc)),
Err(msg) => {
warn!("Unable to find epoch acc for gossip range: {current_epoch_index}. Skipping iteration: {msg:?}");
continue;
Expand Down Expand Up @@ -349,21 +352,6 @@ impl HistoryBridge {
Ok(())
}

/// Attempt to lookup an epoch accumulator from local portal-accumulators path provided via cli
/// arg. Gossip the epoch accumulator if found.
async fn construct_and_gossip_epoch_acc(
&self,
epoch_index: u64,
) -> anyhow::Result<Arc<EpochAccumulator>> {
let (_, epoch_acc) = lookup_epoch_acc(
epoch_index,
&self.header_oracle.header_validator.pre_merge_acc,
&self.epoch_acc_path,
)
.await?;
Ok(Arc::new(epoch_acc))
}

async fn construct_and_gossip_receipt(
full_header: &FullHeader,
portal_client: &HttpClient,
Expand Down
5 changes: 2 additions & 3 deletions portal-bridge/src/bridge/utils.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
use alloy_primitives::B256;
use anyhow::anyhow;
use ethportal_api::{types::execution::accumulator::EpochAccumulator, utils::bytes::hex_encode};
use ssz::Decode;
Expand All @@ -10,7 +9,7 @@ pub async fn lookup_epoch_acc(
epoch_index: u64,
pre_merge_acc: &PreMergeAccumulator,
epoch_acc_path: &Path,
) -> anyhow::Result<(B256, EpochAccumulator)> {
) -> anyhow::Result<EpochAccumulator> {
let epoch_hash = pre_merge_acc.historical_epochs[epoch_index as usize];
let epoch_hash_pretty = hex_encode(epoch_hash);
let epoch_hash_pretty = epoch_hash_pretty.trim_start_matches("0x");
Expand All @@ -26,5 +25,5 @@ pub async fn lookup_epoch_acc(
))
}
};
Ok((epoch_hash, epoch_acc))
Ok(epoch_acc)
}
95 changes: 80 additions & 15 deletions trin-history/src/validation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ impl Validator<HistoryContentKey> for ChainHistoryValidator {
HistoryContentKey::BlockHeaderByHash(key) => {
let header_with_proof =
HeaderWithProof::from_ssz_bytes(content).map_err(|err| {
anyhow!("Header with proof content has invalid encoding: {err:?}")
anyhow!("Header by hash content has invalid encoding: {err:?}")
})?;
let header_hash = header_with_proof.header.hash();
ensure!(
Expand All @@ -51,7 +51,7 @@ impl Validator<HistoryContentKey> for ChainHistoryValidator {
HistoryContentKey::BlockHeaderByNumber(key) => {
let header_with_proof =
HeaderWithProof::from_ssz_bytes(content).map_err(|err| {
anyhow!("Header with proof content has invalid encoding: {err:?}")
anyhow!("Header by number content has invalid encoding: {err:?}")
})?;
let header_number = header_with_proof.header.number;
ensure!(
Expand Down Expand Up @@ -131,10 +131,11 @@ mod tests {
use ssz::Encode;

use ethportal_api::{
types::content_key::history::BlockHeaderByHashKey, utils::bytes::hex_decode,
types::content_key::history::{BlockHeaderByHashKey, BlockHeaderByNumberKey},
utils::bytes::hex_decode,
};

fn get_hwp_ssz() -> Vec<u8> {
fn get_header_with_proof_ssz() -> Vec<u8> {
let file =
fs::read_to_string("../trin-validation/src/assets/fluffy/header_with_proofs.json")
.unwrap();
Expand All @@ -146,25 +147,27 @@ mod tests {
}

#[test_log::test(tokio::test)]
async fn validate_header() {
let hwp_ssz = get_hwp_ssz();
let hwp = HeaderWithProof::from_ssz_bytes(&hwp_ssz).expect("error decoding header");
async fn validate_header_by_hash() {
let header_with_proof_ssz = get_header_with_proof_ssz();
let header_with_proof =
HeaderWithProof::from_ssz_bytes(&header_with_proof_ssz).expect("error decoding header");
let header_oracle = default_header_oracle();
let chain_history_validator = ChainHistoryValidator { header_oracle };
let content_key = HistoryContentKey::BlockHeaderByHash(BlockHeaderByHashKey {
block_hash: hwp.header.hash().0,
block_hash: header_with_proof.header.hash().0,
});
chain_history_validator
.validate_content(&content_key, &hwp_ssz)
.validate_content(&content_key, &header_with_proof_ssz)
.await
.unwrap();
}

#[test_log::test(tokio::test)]
#[should_panic(expected = "Merkle proof validation failed for pre-merge header")]
async fn invalidate_header_with_invalid_number() {
let hwp_ssz = get_hwp_ssz();
let mut header = HeaderWithProof::from_ssz_bytes(&hwp_ssz).expect("error decoding header");
async fn invalidate_header_by_hash_with_invalid_number() {
let header_with_proof_ssz = get_header_with_proof_ssz();
let mut header =
HeaderWithProof::from_ssz_bytes(&header_with_proof_ssz).expect("error decoding header");

// set invalid block height
header.header.number = 669052;
Expand All @@ -183,9 +186,10 @@ mod tests {

#[test_log::test(tokio::test)]
#[should_panic(expected = "Merkle proof validation failed for pre-merge header")]
async fn invalidate_header_with_invalid_gaslimit() {
let hwp_ssz = get_hwp_ssz();
let mut header = HeaderWithProof::from_ssz_bytes(&hwp_ssz).expect("error decoding header");
async fn invalidate_header_by_hash_with_invalid_gaslimit() {
let header_with_proof_ssz = get_header_with_proof_ssz();
let mut header =
HeaderWithProof::from_ssz_bytes(&header_with_proof_ssz).expect("error decoding header");

// set invalid block gaslimit
// valid gaslimit = 3141592
Expand All @@ -203,6 +207,67 @@ mod tests {
.unwrap();
}

#[test_log::test(tokio::test)]
async fn validate_header_by_number() {
let header_with_proof_ssz = get_header_with_proof_ssz();
let header_with_proof =
HeaderWithProof::from_ssz_bytes(&header_with_proof_ssz).expect("error decoding header");
let header_oracle = default_header_oracle();
let chain_history_validator = ChainHistoryValidator { header_oracle };
let content_key = HistoryContentKey::BlockHeaderByNumber(BlockHeaderByNumberKey {
block_number: header_with_proof.header.number,
});
chain_history_validator
.validate_content(&content_key, &header_with_proof_ssz)
.await
.unwrap();
}

#[test_log::test(tokio::test)]
#[should_panic(expected = "Merkle proof validation failed for pre-merge header")]
async fn invalidate_header_by_number_with_invalid_number() {
let header_with_proof_ssz = get_header_with_proof_ssz();
let mut header =
HeaderWithProof::from_ssz_bytes(&header_with_proof_ssz).expect("error decoding header");

// set invalid block height
header.header.number = 669052;

let content_value = header.as_ssz_bytes();
let header_oracle = default_header_oracle();
let chain_history_validator = ChainHistoryValidator { header_oracle };
let content_key = HistoryContentKey::BlockHeaderByNumber(BlockHeaderByNumberKey {
block_number: header.header.number,
});
chain_history_validator
.validate_content(&content_key, &content_value)
.await
.unwrap();
}

#[test_log::test(tokio::test)]
#[should_panic(expected = "Merkle proof validation failed for pre-merge header")]
async fn invalidate_header_by_number_with_invalid_gaslimit() {
let header_with_proof_ssz: Vec<u8> = get_header_with_proof_ssz();
let mut header =
HeaderWithProof::from_ssz_bytes(&header_with_proof_ssz).expect("error decoding header");

// set invalid block gaslimit
// valid gaslimit = 3141592
header.header.gas_limit = U256::from(3141591);

let content_value = header.as_ssz_bytes();
let header_oracle = default_header_oracle();
let chain_history_validator = ChainHistoryValidator { header_oracle };
let content_key = HistoryContentKey::BlockHeaderByNumber(BlockHeaderByNumberKey {
block_number: header.header.number,
});
chain_history_validator
.validate_content(&content_key, &content_value)
.await
.unwrap();
}

fn default_header_oracle() -> Arc<RwLock<HeaderOracle>> {
Arc::new(RwLock::new(HeaderOracle::default()))
}
Expand Down
Loading

0 comments on commit c5af146

Please sign in to comment.