Skip to content
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 2 additions & 3 deletions crates/era-utils/src/export.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
//! Logic to export from database era1 block history
//! and injecting them into era1 files with `Era1Writer`.

use crate::calculate_td_by_number;
use alloy_consensus::BlockHeader;
use alloy_primitives::{BlockNumber, B256, U256};
use eyre::{eyre, Result};
Expand Down Expand Up @@ -114,9 +115,7 @@ where

let mut total_difficulty = if config.first_block_number > 0 {
let prev_block_number = config.first_block_number - 1;
provider
.header_td_by_number(prev_block_number)?
.ok_or_else(|| eyre!("Total difficulty not found for block {prev_block_number}"))?
calculate_td_by_number(provider, prev_block_number)?
} else {
U256::ZERO
};
Expand Down
49 changes: 32 additions & 17 deletions crates/era-utils/src/history.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use alloy_consensus::BlockHeader;
use alloy_primitives::{BlockHash, BlockNumber, U256};
use futures_util::{Stream, StreamExt};
use reth_db_api::{
Expand All @@ -19,15 +20,15 @@ use reth_etl::Collector;
use reth_fs_util as fs;
use reth_primitives_traits::{Block, FullBlockBody, FullBlockHeader, NodePrimitives};
use reth_provider::{
providers::StaticFileProviderRWRefMut, BlockWriter, ProviderError, StaticFileProviderFactory,
providers::StaticFileProviderRWRefMut, BlockReader, BlockWriter, StaticFileProviderFactory,
StaticFileSegment, StaticFileWriter,
};
use reth_stages_types::{
CheckpointBlockRange, EntitiesCheckpoint, HeadersCheckpoint, StageCheckpoint, StageId,
};
use reth_storage_api::{
errors::ProviderResult, DBProvider, DatabaseProviderFactory, HeaderProvider,
NodePrimitivesProvider, StageCheckpointWriter,
errors::ProviderResult, DBProvider, DatabaseProviderFactory, NodePrimitivesProvider,
StageCheckpointWriter,
};
use std::{
collections::Bound,
Expand Down Expand Up @@ -82,11 +83,6 @@ where
.get_highest_static_file_block(StaticFileSegment::Headers)
.unwrap_or_default();

// Find the latest total difficulty
let mut td = static_file_provider
.header_td_by_number(height)?
.ok_or(ProviderError::TotalDifficultyNotFound(height))?;

while let Some(meta) = rx.recv()? {
let from = height;
let provider = provider_factory.database_provider_rw()?;
Expand All @@ -96,7 +92,6 @@ where
&mut static_file_provider.latest_writer(StaticFileSegment::Headers)?,
&provider,
hash_collector,
&mut td,
height..,
)?;

Expand Down Expand Up @@ -146,7 +141,7 @@ where

/// Extracts block headers and bodies from `meta` and appends them using `writer` and `provider`.
///
/// Adds on to `total_difficulty` and collects hash to height using `hash_collector`.
/// Collects hash to height using `hash_collector`.
///
/// Skips all blocks below the [`start_bound`] of `block_numbers` and stops when reaching past the
/// [`end_bound`] or the end of the file.
Expand All @@ -160,7 +155,6 @@ pub fn process<Era, P, B, BB, BH>(
writer: &mut StaticFileProviderRWRefMut<'_, <P as NodePrimitivesProvider>::Primitives>,
provider: &P,
hash_collector: &mut Collector<BlockHash, BlockNumber>,
total_difficulty: &mut U256,
block_numbers: impl RangeBounds<BlockNumber>,
) -> eyre::Result<BlockNumber>
where
Expand All @@ -182,7 +176,7 @@ where
as Box<dyn Fn(Result<BlockTuple, E2sError>) -> eyre::Result<(BH, BB)>>);
let iter = ProcessIter { iter, era: meta };

process_iter(iter, writer, provider, hash_collector, total_difficulty, block_numbers)
process_iter(iter, writer, provider, hash_collector, block_numbers)
}

type ProcessInnerIter<R, BH, BB> =
Expand Down Expand Up @@ -271,7 +265,6 @@ pub fn process_iter<P, B, BB, BH>(
writer: &mut StaticFileProviderRWRefMut<'_, <P as NodePrimitivesProvider>::Primitives>,
provider: &P,
hash_collector: &mut Collector<BlockHash, BlockNumber>,
total_difficulty: &mut U256,
block_numbers: impl RangeBounds<BlockNumber>,
) -> eyre::Result<BlockNumber>
where
Expand Down Expand Up @@ -311,11 +304,8 @@ where
let hash = header.hash_slow();
last_header_number = number;

// Increase total difficulty
*total_difficulty += header.difficulty();

// Append to Headers segment
writer.append_header(&header, *total_difficulty, &hash)?;
writer.append_header(&header, U256::ZERO, &hash)?;

// Write bodies to database.
provider.append_block_bodies(vec![(header.number(), Some(body))])?;
Expand Down Expand Up @@ -382,3 +372,28 @@ where

Ok(())
}

/// Calculates the total difficulty for a given block number by summing the difficulty
/// of all blocks from genesis to the given block.
///
/// Very expensive - iterates through all blocks in batches of 1000.
///
/// Returns an error if any block is missing.
pub fn calculate_td_by_number<P>(provider: &P, num: BlockNumber) -> eyre::Result<U256>
where
P: BlockReader,
{
let mut total_difficulty = U256::ZERO;
let mut start = 0;

while start <= num {
let end = (start + 1000 - 1).min(num);

total_difficulty +=
provider.headers_range(start..=end)?.iter().map(|h| h.difficulty()).sum::<U256>();

start = end + 1;
}

Ok(total_difficulty)
}
3 changes: 2 additions & 1 deletion crates/era-utils/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,5 +14,6 @@ pub use export::{export, ExportConfig};

/// Imports history from ERA files.
pub use history::{
build_index, decode, import, open, process, process_iter, save_stage_checkpoints, ProcessIter,
build_index, calculate_td_by_number, decode, import, open, process, process_iter,
save_stage_checkpoints, ProcessIter,
};
10 changes: 2 additions & 8 deletions crates/node/core/src/node_config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use crate::{
};
use alloy_consensus::BlockHeader;
use alloy_eips::BlockHashOrNumber;
use alloy_primitives::{BlockNumber, B256};
use alloy_primitives::{BlockNumber, B256, U256};
use eyre::eyre;
use reth_chainspec::{ChainSpec, EthChainSpec, MAINNET};
use reth_config::config::PruneConfig;
Expand Down Expand Up @@ -330,12 +330,6 @@ impl<ChainSpec> NodeConfig<ChainSpec> {
.header_by_number(head)?
.expect("the header for the latest block is missing, database is corrupt");

let total_difficulty = provider
.header_td_by_number(head)?
// total difficulty is effectively deprecated, but still required in some places, e.g.
// p2p
.unwrap_or_default();

let hash = provider
.block_hash(head)?
.expect("the hash for the latest block is missing, database is corrupt");
Expand All @@ -344,7 +338,7 @@ impl<ChainSpec> NodeConfig<ChainSpec> {
number: head,
hash,
difficulty: header.difficulty(),
total_difficulty,
total_difficulty: U256::ZERO,
Copy link
Collaborator Author

@joshieDo joshieDo Oct 20, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

im guessing so, but is this okay?

Copy link
Member

@Rjected Rjected Oct 20, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think this should be fine, in p2p this is deprecated as of eth/69 and the node builder only uses the Head to get the timestamp

timestamp: header.timestamp(),
})
}
Expand Down
1 change: 0 additions & 1 deletion crates/rpc/rpc-eth-types/src/error/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -462,7 +462,6 @@ impl From<reth_errors::ProviderError> for EthApiError {
}
ProviderError::BestBlockNotFound => Self::HeaderNotFound(BlockId::latest()),
ProviderError::BlockNumberForTransactionIndexNotFound => Self::UnknownBlockOrTxIndex,
ProviderError::TotalDifficultyNotFound(num) => Self::HeaderNotFound(num.into()),
ProviderError::FinalizedBlockNotFound => Self::HeaderNotFound(BlockId::finalized()),
ProviderError::SafeBlockNotFound => Self::HeaderNotFound(BlockId::safe()),
err => Self::Internal(err.into()),
Expand Down
20 changes: 3 additions & 17 deletions crates/stages/stages/src/stages/era.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,11 @@ use reth_era_utils as era;
use reth_etl::Collector;
use reth_primitives_traits::{FullBlockBody, FullBlockHeader, NodePrimitives};
use reth_provider::{
BlockReader, BlockWriter, DBProvider, HeaderProvider, StageCheckpointWriter,
StaticFileProviderFactory, StaticFileWriter,
BlockReader, BlockWriter, DBProvider, StageCheckpointWriter, StaticFileProviderFactory,
StaticFileWriter,
};
use reth_stages_api::{ExecInput, ExecOutput, Stage, StageError, UnwindInput, UnwindOutput};
use reth_static_file_types::StaticFileSegment;
use reth_storage_errors::ProviderError;
use std::{
fmt::{Debug, Formatter},
iter,
Expand Down Expand Up @@ -176,11 +175,6 @@ where
.get_highest_static_file_block(StaticFileSegment::Headers)
.unwrap_or_default();

// Find the latest total difficulty
let mut td = static_file_provider
.header_td_by_number(last_header_number)?
.ok_or(ProviderError::TotalDifficultyNotFound(last_header_number))?;

// Although headers were downloaded in reverse order, the collector iterates it in
// ascending order
let mut writer = static_file_provider.latest_writer(StaticFileSegment::Headers)?;
Expand All @@ -190,7 +184,6 @@ where
&mut writer,
provider,
&mut self.hash_collector,
&mut td,
last_header_number..=input.target(),
)
.map_err(|e| StageError::Fatal(e.into()))?;
Expand Down Expand Up @@ -336,7 +329,7 @@ mod tests {
};
use reth_ethereum_primitives::TransactionSigned;
use reth_primitives_traits::{SealedBlock, SealedHeader};
use reth_provider::{BlockNumReader, TransactionsProvider};
use reth_provider::{BlockNumReader, HeaderProvider, TransactionsProvider};
use reth_testing_utils::generators::{
random_block_range, random_signed_tx, BlockRangeParams,
};
Expand Down Expand Up @@ -447,9 +440,6 @@ mod tests {
match output {
Some(output) if output.checkpoint.block_number > initial_checkpoint => {
let provider = self.db.factory.provider()?;
let mut td = provider
.header_td_by_number(initial_checkpoint.saturating_sub(1))?
.unwrap_or_default();

for block_num in initial_checkpoint..
output
Expand All @@ -469,10 +459,6 @@ mod tests {
assert!(header.is_some());
let header = SealedHeader::seal_slow(header.unwrap());
assert_eq!(header.hash(), hash);

// validate the header total difficulty
td += header.difficulty;
assert_eq!(provider.header_td_by_number(block_num)?, Some(td));
}

self.validate_db_blocks(
Expand Down
39 changes: 7 additions & 32 deletions crates/stages/stages/src/stages/headers.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use alloy_consensus::BlockHeader;
use alloy_primitives::{BlockHash, BlockNumber, Bytes, B256};
use alloy_primitives::{BlockHash, BlockNumber, Bytes, B256, U256};
use futures_util::StreamExt;
use reth_config::config::EtlConfig;
use reth_db_api::{
Expand All @@ -16,15 +16,14 @@ use reth_network_p2p::headers::{
};
use reth_primitives_traits::{serde_bincode_compat, FullBlockHeader, NodePrimitives, SealedHeader};
use reth_provider::{
providers::StaticFileWriter, BlockHashReader, DBProvider, HeaderProvider,
HeaderSyncGapProvider, StaticFileProviderFactory,
providers::StaticFileWriter, BlockHashReader, DBProvider, HeaderSyncGapProvider,
StaticFileProviderFactory,
};
use reth_stages_api::{
CheckpointBlockRange, EntitiesCheckpoint, ExecInput, ExecOutput, HeadersCheckpoint, Stage,
StageCheckpoint, StageError, StageId, UnwindInput, UnwindOutput,
};
use reth_static_file_types::StaticFileSegment;
use reth_storage_errors::provider::ProviderError;
use std::task::{ready, Context, Poll};

use tokio::sync::watch;
Expand Down Expand Up @@ -107,11 +106,6 @@ where
.get_highest_static_file_block(StaticFileSegment::Headers)
.unwrap_or_default();

// Find the latest total difficulty
let mut td = static_file_provider
.header_td_by_number(last_header_number)?
.ok_or(ProviderError::TotalDifficultyNotFound(last_header_number))?;

// Although headers were downloaded in reverse order, the collector iterates it in ascending
// order
let mut writer = static_file_provider.latest_writer(StaticFileSegment::Headers)?;
Expand All @@ -134,11 +128,8 @@ where
}
last_header_number = header.number();

// Increase total difficulty
td += header.difficulty();

// Append to Headers segment
writer.append_header(header, td, header_hash)?;
writer.append_header(header, U256::ZERO, header_hash)?;
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

do we ever call this with something nonzero after this PR? if not then maybe we should just remove the arg?

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yeah I think we should remove this from the function arg entirely

}

info!(target: "sync::stages::headers", total = total_headers, "Writing headers hash index");
Expand Down Expand Up @@ -399,7 +390,7 @@ mod tests {
use crate::test_utils::{
stage_test_suite, ExecuteStageTestRunner, StageTestRunner, UnwindStageTestRunner,
};
use alloy_primitives::B256;
use alloy_primitives::{B256, U256};
use assert_matches::assert_matches;
use reth_provider::{DatabaseProviderFactory, ProviderFactory, StaticFileProviderFactory};
use reth_stages_api::StageUnitCheckpoint;
Expand All @@ -415,7 +406,7 @@ mod tests {
ReverseHeadersDownloader, ReverseHeadersDownloaderBuilder,
};
use reth_network_p2p::test_utils::{TestHeaderDownloader, TestHeadersClient};
use reth_provider::{test_utils::MockNodeTypesWithDB, BlockNumReader};
use reth_provider::{test_utils::MockNodeTypesWithDB, BlockNumReader, HeaderProvider};
use tokio::sync::watch;

pub(crate) struct HeadersTestRunner<D: HeaderDownloader> {
Expand Down Expand Up @@ -493,9 +484,6 @@ mod tests {
match output {
Some(output) if output.checkpoint.block_number > initial_checkpoint => {
let provider = self.db.factory.provider()?;
let mut td = provider
.header_td_by_number(initial_checkpoint.saturating_sub(1))?
.unwrap_or_default();

for block_num in initial_checkpoint..output.checkpoint.block_number {
// look up the header hash
Expand All @@ -509,10 +497,6 @@ mod tests {
assert!(header.is_some());
let header = SealedHeader::seal_slow(header.unwrap());
assert_eq!(header.hash(), hash);

// validate the header total difficulty
td += header.difficulty;
assert_eq!(provider.header_td_by_number(block_num)?, Some(td));
}
}
_ => self.check_no_header_entry_above(initial_checkpoint)?,
Expand Down Expand Up @@ -635,16 +619,7 @@ mod tests {
let static_file_provider = provider.static_file_provider();
let mut writer = static_file_provider.latest_writer(StaticFileSegment::Headers).unwrap();
for header in sealed_headers {
let ttd = if header.number() == 0 {
header.difficulty()
} else {
let parent_block_number = header.number() - 1;
let parent_ttd =
provider.header_td_by_number(parent_block_number).unwrap().unwrap_or_default();
parent_ttd + header.difficulty()
};

writer.append_header(header.header(), ttd, &header.hash()).unwrap();
writer.append_header(header.header(), U256::ZERO, &header.hash()).unwrap();
}
drop(writer);

Expand Down
3 changes: 0 additions & 3 deletions crates/storage/errors/src/provider.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,9 +58,6 @@ pub enum ProviderError {
/// The account address.
address: Address,
},
/// The total difficulty for a block is missing.
#[error("total difficulty not found for block #{_0}")]
TotalDifficultyNotFound(BlockNumber),
/// When required header related data was not found but was required.
#[error("no header found for {_0:?}")]
HeaderNotFound(BlockHashOrNumber),
Expand Down
Loading
Loading