Skip to content

Commit

Permalink
eth-sparse-mpt: verify proof consistency
Browse files Browse the repository at this point in the history
  • Loading branch information
dvush committed Feb 21, 2025
1 parent b5b5a87 commit 69ee27b
Show file tree
Hide file tree
Showing 7 changed files with 88 additions and 8 deletions.
11 changes: 11 additions & 0 deletions crates/eth-sparse-mpt/src/reth_sparse_trie/shared_cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ use crate::{
utils::HashMap,
};
use alloy_primitives::Bytes;
use alloy_primitives::B256;
use alloy_trie::Nibbles;

/// SparseTrieSharedCache is a storage for fetched parts of the ethereum tries
Expand All @@ -15,6 +16,16 @@ pub struct SparseTrieSharedCache {
internal: Arc<RwLock<RethSparseTrieShareCacheInternal>>,
}

impl SparseTrieSharedCache {
pub fn new_with_parent_hash(parent_root_hash: B256) -> Self {
let mut internal = RethSparseTrieShareCacheInternal::default();
internal.account_trie.expected_root_hash = parent_root_hash;
Self {
internal: Arc::new(RwLock::new(internal)),
}
}
}

pub struct StorageTrieInfo {}

#[derive(Default, Debug)]
Expand Down
39 changes: 34 additions & 5 deletions crates/eth-sparse-mpt/src/sparse_mpt/fixed_trie.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,14 @@
use crate::utils::{hash_map_with_capacity, HashMap, HashSet};
use crate::utils::fast_hash;
use crate::utils::{hash_map_with_capacity, HashMap};
use alloy_primitives::keccak256;
use alloy_primitives::Bytes;
use alloy_rlp::Decodable;
use alloy_trie::nodes::{
BranchNode as AlloyBranchNode, ExtensionNode as AlloyExtensionNode, LeafNode as AlloyLeafNode,
TrieNode as AlloyTrieNode,
};
use reth_trie::Nibbles;
use revm_primitives::B256;
use serde::{Deserialize, Serialize};
use serde_with::{serde_as, Seq};
use smallvec::SmallVec;
Expand All @@ -18,6 +21,9 @@ use super::{
DiffTrieNode, DiffTrieNodeKind, NodeCursor,
};

const NULL_NODE_BYTES: Bytes = Bytes::from_static(&[0x80]);
const HEAD_NODE_PATH: Nibbles = Nibbles::new();

#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum FixedTrieNode {
Leaf(Arc<FixedLeafNode>),
Expand All @@ -40,6 +46,9 @@ pub enum AddNodeError {
/// parent must be added before children
#[error("Invalid input")]
InvalidInput,
/// This happens when proofs added are not from the same trie.
#[error("Inconsistent proofs added")]
InconsistentProofs,
}

impl FixedTrieNode {
Expand Down Expand Up @@ -136,11 +145,15 @@ impl From<AlloyExtensionNode> for FixedExtensionNode {
#[serde_as]
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct FixedTrie {
/// if set to non-zero we will verify proofs for consistency
#[serde(default)]
pub expected_root_hash: B256,
#[serde_as(as = "Seq<(_, _)>")]
pub nodes: HashMap<u64, FixedTrieNode>,
pub head: u64,
pub ptrs: u64,
pub nodes_inserted: HashSet<Nibbles>,
// used to verify proof consistency storsed fast_hash(path), fast_hash(node_from_proof)
pub nodes_inserted: HashMap<u64, u64>,
// used for preallocations, wrong value will not influence correctness
pub height: usize,
}
Expand Down Expand Up @@ -206,17 +219,33 @@ impl FixedTrie {
/// nodes must be sorted by key
/// nodes must be empty if and only if trie is empty
pub fn add_nodes(&mut self, nodes: &[(Nibbles, Bytes)]) -> Result<(), AddNodeError> {
// this is not a critical error
debug_assert!(nodes.iter().is_sorted_by_key(|(path, _)| path));

// when adding empty proof we init try to be empty
if nodes.is_empty() && self.nodes.is_empty() {
self.nodes.insert(0, FixedTrieNode::Null);
self.head = 0;
self.ptrs = 0;
self.nodes_inserted.insert(Nibbles::new());
self.nodes_inserted
.insert(fast_hash(&HEAD_NODE_PATH), fast_hash(&NULL_NODE_BYTES));
}

for (path, node) in nodes {
if self.nodes_inserted.contains(path) {
let path_hash = fast_hash(&path);
let node_hash = fast_hash(&node);

if let Some(inserted_node_hash) = self.nodes_inserted.get(&path_hash) {
if *inserted_node_hash != node_hash {
return Err(AddNodeError::InconsistentProofs);
}
continue;
} else if path == &HEAD_NODE_PATH && !self.expected_root_hash.is_zero() {
// here we verify the first proof that we insert and compare the head node hash to the root hash provided from outside
let proof_root_hash = keccak256(node);
if proof_root_hash != self.expected_root_hash {
return Err(AddNodeError::InconsistentProofs);
}
}

let alloy_trie_node = AlloyTrieNode::decode(&mut node.as_ref())?;
Expand Down Expand Up @@ -296,7 +325,7 @@ impl FixedTrie {
}
self.height = max(self.height, height);

self.nodes_inserted.insert(path.clone());
self.nodes_inserted.insert(path_hash, node_hash);
let ptr = get_new_ptr(&mut self.ptrs);
self.nodes.insert(ptr, fixed_trie_node);

Expand Down
10 changes: 9 additions & 1 deletion crates/eth-sparse-mpt/src/utils.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
use std::hash::{Hash, Hasher};

use alloy_primitives::{keccak256, Bytes};
use alloy_rlp::{length_of_length, BufMut, Encodable, Header, EMPTY_STRING_CODE};
use alloy_trie::{
nodes::{ExtensionNodeRef, LeafNodeRef},
Nibbles,
};
use reth_trie::RlpNode;
use rustc_hash::FxBuildHasher;
use rustc_hash::{FxBuildHasher, FxHasher};

pub type HashMap<K, V> = std::collections::HashMap<K, V, FxBuildHasher>;
pub type HashSet<K> = std::collections::HashSet<K, FxBuildHasher>;
Expand All @@ -14,6 +16,12 @@ pub fn hash_map_with_capacity<K, V>(capacity: usize) -> HashMap<K, V> {
HashMap::with_capacity_and_hasher(capacity, FxBuildHasher)
}

pub fn fast_hash<H: Hash>(value: &H) -> u64 {
let mut hasher = FxHasher::default();
value.hash(&mut hasher);
hasher.finish()
}

pub fn rlp_pointer(rlp_encode: Bytes) -> Bytes {
if rlp_encode.len() < 32 {
rlp_encode
Expand Down
1 change: 1 addition & 0 deletions crates/rbuilder/src/building/testing/test_chain_state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -194,6 +194,7 @@ impl TestChainState {

let root_hasher = Arc::from(RootHasherImpl::new(
genesis_header.num_hash(),
None,
RootHashContext::new(true, false, None),
provider_factory.clone(),
provider_factory.clone(),
Expand Down
9 changes: 9 additions & 0 deletions crates/rbuilder/src/provider/reth_prov.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ use alloy_primitives::{BlockHash, BlockNumber, B256};
use reth_errors::ProviderResult;
use reth_provider::{BlockReader, DatabaseProviderFactory, HeaderProvider};
use reth_provider::{StateCommitmentProvider, StateProviderBox};
use tracing::error;

use super::{RootHasher, StateProviderFactory};

Expand Down Expand Up @@ -68,8 +69,16 @@ where

fn root_hasher(&self, parent_num_hash: BlockNumHash) -> ProviderResult<Box<dyn RootHasher>> {
let hasher = self.history_by_block_hash(parent_num_hash.hash)?;
let parent_state_root = self
.provider
.header_by_hash_or_number(parent_num_hash.hash.into())?
.map(|h| h.state_root);
if parent_state_root.is_none() {
error!("Parent hash is not found (for root_hasher)");
}
Ok(Box::new(RootHasherImpl::new(
parent_num_hash,
parent_state_root,
self.root_hash_context.clone(),
self.provider.clone(),
hasher,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ use reth_provider::{
StateProviderBox,
};
use revm_primitives::B256;
use tracing::error;

use crate::{
building::builders::mock_block_building_helper::MockRootHasher, roothash::RootHashContext,
Expand Down Expand Up @@ -72,8 +73,16 @@ where

fn root_hasher(&self, parent_num_hash: BlockNumHash) -> ProviderResult<Box<dyn RootHasher>> {
Ok(if let Some(root_hash_context) = &self.root_hash_context {
let parent_state_root = self
.provider
.header_by_hash_or_number(parent_num_hash.hash.into())?
.map(|h| h.state_root);
if parent_state_root.is_none() {
error!("Parent hash is not found (for root_hasher)");
}
Box::new(RootHasherImpl::new(
parent_num_hash,
parent_state_root,
root_hash_context.clone(),
self.provider.clone(),
self.provider.clone(),
Expand Down
17 changes: 15 additions & 2 deletions crates/rbuilder/src/utils/provider_factory_reopen.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ use std::ops::DerefMut;
use std::{path::PathBuf, sync::Arc};
use tokio::sync::broadcast;
use tokio_util::sync::CancellationToken;
use tracing::debug;
use tracing::{debug, error};

/// This struct is used as a workaround for https://github.com/paradigmxyz/reth/issues/7836
/// it shares one instance of the provider factory that is recreated when inconsistency is detected.
Expand Down Expand Up @@ -247,8 +247,15 @@ where
.check_consistency_and_reopen_if_needed()
.map_err(|e| ProviderError::Database(DatabaseError::Other(e.to_string())))
.unwrap();
let parent_state_root = provider
.header_by_hash_or_number(parent_num_hash.hash.into())?
.map(|h| h.state_root);
if parent_state_root.is_none() {
error!("Parent hash is not found (for root_hasher)");
}
Box::new(RootHasherImpl::new(
parent_num_hash,
parent_state_root,
root_hash_config.clone(),
provider.clone(),
provider,
Expand All @@ -270,16 +277,22 @@ pub struct RootHasherImpl<T, HasherType> {
impl<T, HasherType> RootHasherImpl<T, HasherType> {
pub fn new(
parent_num_hash: BlockNumHash,
parent_state_root: Option<B256>,
config: RootHashContext,
provider: T,
hasher: HasherType,
) -> Self {
let sparse_trie_shared_cache = if let Some(parent_state_root) = parent_state_root {
SparseTrieSharedCache::new_with_parent_hash(parent_state_root)
} else {
SparseTrieSharedCache::default()
};
Self {
parent_num_hash,
provider,
hasher,
config,
sparse_trie_shared_cache: Default::default(),
sparse_trie_shared_cache,
}
}
}
Expand Down

0 comments on commit 69ee27b

Please sign in to comment.