Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

7 changes: 0 additions & 7 deletions crates/prune/prune/src/segments/static_file/headers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -68,9 +68,6 @@ impl<Provider: StaticFileProviderFactory + DBProvider<Tx: DbTxMut>> Segment<Prov
let mut canonical_headers_cursor =
provider.tx_ref().cursor_write::<tables::CanonicalHeaders>()?;

// Note: We no longer prune HeaderTerminalDifficulties table after Paris/Merge
// as it's read-only and kept for backward compatibility

let mut limiter = input.limiter.floor_deleted_entries_limit_to_multiple_of(
NonZeroUsize::new(HEADER_TABLES_TO_PRUNE).unwrap(),
);
Expand Down Expand Up @@ -217,8 +214,6 @@ mod tests {

assert_eq!(db.table::<tables::CanonicalHeaders>().unwrap().len(), headers.len());
assert_eq!(db.table::<tables::Headers>().unwrap().len(), headers.len());
// Note: HeaderTerminalDifficulties table is read-only in database after Paris/Merge
// so we don't check its length as it's not being written to

let test_prune = |to_block: BlockNumber, expected_result: (PruneProgress, usize)| {
let segment = super::Headers::new(db.factory.static_file_provider());
Expand Down Expand Up @@ -282,8 +277,6 @@ mod tests {
db.table::<tables::Headers>().unwrap().len(),
headers.len() - (last_pruned_block_number + 1) as usize
);
// Note: HeaderTerminalDifficulties table is read-only in database after
// Paris/Merge so we don't check its length as it's not being written to
assert_eq!(
db.factory.provider().unwrap().get_prune_checkpoint(PruneSegment::Headers).unwrap(),
Some(PruneCheckpoint {
Expand Down
9 changes: 1 addition & 8 deletions crates/stages/stages/src/stages/headers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -343,8 +343,7 @@ where
(input.unwind_to + 1)..,
)?;
provider.tx_ref().unwind_table_by_num::<tables::CanonicalHeaders>(input.unwind_to)?;
// Note: We no longer unwind HeaderTerminalDifficulties table after Paris/Merge
// as it's read-only and kept for backward compatibility

let unfinalized_headers_unwound =
provider.tx_ref().unwind_table_by_num::<tables::Headers>(input.unwind_to)?;

Expand Down Expand Up @@ -572,12 +571,6 @@ mod tests {
self.db.ensure_no_entry_above::<tables::CanonicalHeaders, _>(block, |key| key)?;
self.db.ensure_no_entry_above::<tables::Headers, _>(block, |key| key)?;

// Note: We no longer unwind HeaderTerminalDifficulties table after Paris/Merge, so
// we don't need to ensure entry above
// self.db.ensure_no_entry_above::<tables::HeaderTerminalDifficulties, _>(
// block,
// |num| num,
// )?;
Ok(())
}

Expand Down
7 changes: 0 additions & 7 deletions crates/stages/stages/src/test_utils/test_db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -162,14 +162,7 @@ impl TestStageDB {
writer.append_header(header.header(), td, &header.hash())?;
} else {
tx.put::<tables::CanonicalHeaders>(header.number, header.hash())?;
// Note: HeaderTerminalDifficulties table is read-only in database after
// Paris/Merge but still written to static files for historical data
tx.put::<tables::Headers>(header.number, header.header().clone())?;

// Only insert into HeaderTerminalDifficulties for pre-merge blocks
if !MAINNET.is_paris_active_at_block(header.number) {
tx.put::<tables::HeaderTerminalDifficulties>(header.number, td.into())?;
}
}

tx.put::<tables::HeaderNumbers>(header.hash(), header.number)?;
Expand Down
1 change: 0 additions & 1 deletion crates/static-file/static-file/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ reth-prune-types.workspace = true
reth-primitives-traits.workspace = true
reth-static-file-types.workspace = true
reth-stages-types.workspace = true
reth-chainspec.workspace = true

alloy-primitives.workspace = true

Expand Down
43 changes: 9 additions & 34 deletions crates/static-file/static-file/src/segments/headers.rs
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@mattsse Could you clarify how this approach would work if we attempt to backfill post-merge ranges from the live DB? Wouldn’t we encounter missing HTD values, given we don’t store them anymore post-merge?

Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use crate::segments::Segment;
use alloy_primitives::BlockNumber;
use reth_chainspec::{EthChainSpec, EthereumHardforks};
use reth_codecs::Compact;
use reth_db_api::{cursor::DbCursorRO, table::Value, tables, transaction::DbTx};
use reth_primitives_traits::NodePrimitives;
Expand All @@ -19,7 +18,6 @@ impl<Provider> Segment<Provider> for Headers
where
Provider: StaticFileProviderFactory<Primitives: NodePrimitives<BlockHeader: Compact + Value>>
+ DBProvider
+ ChainSpecProvider<ChainSpec: EthereumHardforks>,
{
fn segment(&self) -> StaticFileSegment {
StaticFileSegment::Headers
Expand All @@ -42,46 +40,23 @@ where

let mut header_td_cursor =
provider.tx_ref().cursor_read::<tables::HeaderTerminalDifficulties>()?;
let header_td_walker = header_td_cursor.walk_range(block_range.clone())?;

let mut canonical_headers_cursor =
provider.tx_ref().cursor_read::<tables::CanonicalHeaders>()?;
let canonical_headers_walker = canonical_headers_cursor.walk_range(block_range.clone())?;
let canonical_headers_walker = canonical_headers_cursor.walk_range(block_range)?;

// Get the final Paris difficulty for post-merge blocks
let final_paris_difficulty = provider.chain_spec().final_paris_total_difficulty();

let header_td_walker = header_td_cursor.walk_range(block_range)?;
let mut header_td_iter = header_td_walker.peekable();

for (header_entry, canonical_header_entry) in headers_walker.zip(canonical_headers_walker) {
for ((header_entry, header_td_entry), canonical_header_entry) in
headers_walker.zip(header_td_walker).zip(canonical_headers_walker)
{
let (header_block, header) = header_entry?;
let (header_td_block, header_td) = header_td_entry?;
let (canonical_header_block, canonical_header) = canonical_header_entry?;

debug_assert_eq!(header_block, canonical_header_block);

// For post-merge blocks, use final Paris difficulty
// For pre-merge blocks, get the stored difficulty from the iterator
let total_difficulty = if provider.chain_spec().is_paris_active_at_block(header_block) {
final_paris_difficulty.unwrap_or_default()
} else {
// For pre-merge blocks, we expect an entry in the terminal difficulties table
// Check if we have a matching entry in our iterator
match header_td_iter.peek() {
Some(Ok((td_block, _))) if *td_block == header_block => {
// We have a matching entry, consume it
let (_, header_td) = header_td_iter.next().unwrap()?;
header_td.0
}
_ => {
// No matching entry for this pre-merge block - this shouldn't happen
return Err(reth_storage_errors::provider::ProviderError::HeaderNotFound(
header_block.into(),
));
}
}
};
debug_assert_eq!(header_block, header_td_block);
debug_assert_eq!(header_td_block, canonical_header_block);

static_file_writer.append_header(&header, total_difficulty, &canonical_header)?;
static_file_writer.append_header(&header, header_td.0, &canonical_header)?;
}

Ok(())
Expand Down
2 changes: 0 additions & 2 deletions crates/static-file/static-file/src/static_file_producer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ use crate::{segments, segments::Segment, StaticFileProducerEvent};
use alloy_primitives::BlockNumber;
use parking_lot::Mutex;
use rayon::prelude::*;
use reth_chainspec::EthereumHardforks;
use reth_codecs::Compact;
use reth_db_api::table::Value;
use reth_primitives_traits::NodePrimitives;
Expand Down Expand Up @@ -98,7 +97,6 @@ where
>,
> + StageCheckpointReader
+ BlockReader
+ ChainSpecProvider<ChainSpec: EthereumHardforks>,
>,
{
/// Listen for events on the `static_file_producer`.
Expand Down
2 changes: 0 additions & 2 deletions crates/storage/provider/src/providers/database/provider.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2992,8 +2992,6 @@ impl<TX: DbTxMut + DbTx + 'static, N: NodeTypesForProvider + 'static> BlockWrite
// this table in `canonical_hashes_range`.
self.remove::<tables::CanonicalHeaders>(block + 1..)?;
self.remove::<tables::Headers<HeaderTy<N>>>(block + 1..)?;
// Note: HeaderTerminalDifficulties table is read-only in the database after
// Paris/Merge, so we do not remove entries from it here.

// First transaction to be removed
let unwind_tx_from = self
Expand Down
Loading