Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion crates/cli/commands/src/stage/drop.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,6 @@ impl<C: ChainSpecParser> Command<C> {
StageEnum::Headers => {
tx.clear::<tables::CanonicalHeaders>()?;
tx.clear::<tables::Headers<HeaderTy<N>>>()?;
tx.clear::<tables::HeaderTerminalDifficulties>()?;
tx.clear::<tables::HeaderNumbers>()?;
reset_stage_checkpoint(tx, StageId::Headers)?;

Expand Down
7 changes: 0 additions & 7 deletions crates/cli/commands/src/stage/dump/execution.rs
Original file line number Diff line number Diff line change
Expand Up @@ -69,13 +69,6 @@ fn import_tables_with_range<N: NodeTypesWithDB>(
to,
)
})??;
output_db.update(|tx| {
tx.import_table_with_range::<tables::HeaderTerminalDifficulties, _>(
&db_tool.provider_factory.db_ref().tx()?,
Some(from),
to,
)
})??;
output_db.update(|tx| {
tx.import_table_with_range::<tables::Headers, _>(
&db_tool.provider_factory.db_ref().tx()?,
Expand Down
1 change: 0 additions & 1 deletion crates/cli/commands/src/test_vectors/tables.rs
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,6 @@ pub fn generate_vectors(mut tables: Vec<String>) -> Result<()> {

generate!([
(CanonicalHeaders, PER_TABLE, TABLE),
(HeaderTerminalDifficulties, PER_TABLE, TABLE),
(HeaderNumbers, PER_TABLE, TABLE),
(Headers<Header>, PER_TABLE, TABLE),
(BlockBodyIndices, PER_TABLE, TABLE),
Expand Down
45 changes: 16 additions & 29 deletions crates/prune/prune/src/segments/static_file/headers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,9 @@ use std::num::NonZeroUsize;
use tracing::trace;

/// Number of header tables to prune in one step
const HEADER_TABLES_TO_PRUNE: usize = 3;
///
/// Note: `HeaderTerminalDifficulties` is no longer pruned after Paris/Merge as it's read-only
const HEADER_TABLES_TO_PRUNE: usize = 2;

#[derive(Debug)]
pub struct Headers<N> {
Expand Down Expand Up @@ -72,9 +74,6 @@ where
.tx_ref()
.cursor_write::<tables::Headers<<Provider::Primitives as NodePrimitives>::BlockHeader>>(
)?;

let mut header_tds_cursor =
provider.tx_ref().cursor_write::<tables::HeaderTerminalDifficulties>()?;
let mut canonical_headers_cursor =
provider.tx_ref().cursor_write::<tables::CanonicalHeaders>()?;

Expand All @@ -86,7 +85,6 @@ where
provider,
&mut limiter,
headers_cursor.walk_range(range.clone())?,
header_tds_cursor.walk_range(range.clone())?,
canonical_headers_cursor.walk_range(range)?,
);

Expand All @@ -111,6 +109,7 @@ where
})
}
}

type Walker<'a, Provider, T> =
RangeWalker<'a, T, <<Provider as DBProvider>::Tx as DbTxMut>::CursorMut<T>>;

Expand All @@ -127,7 +126,6 @@ where
Provider,
tables::Headers<<Provider::Primitives as NodePrimitives>::BlockHeader>,
>,
header_tds_walker: Walker<'a, Provider, tables::HeaderTerminalDifficulties>,
canonical_headers_walker: Walker<'a, Provider, tables::CanonicalHeaders>,
}

Expand All @@ -149,10 +147,9 @@ where
Provider,
tables::Headers<<Provider::Primitives as NodePrimitives>::BlockHeader>,
>,
header_tds_walker: Walker<'a, Provider, tables::HeaderTerminalDifficulties>,
canonical_headers_walker: Walker<'a, Provider, tables::CanonicalHeaders>,
) -> Self {
Self { provider, limiter, headers_walker, header_tds_walker, canonical_headers_walker }
Self { provider, limiter, headers_walker, canonical_headers_walker }
}
}

Expand All @@ -168,7 +165,6 @@ where
}

let mut pruned_block_headers = None;
let mut pruned_block_td = None;
let mut pruned_block_canonical = None;

if let Err(err) = self.provider.tx_ref().prune_table_with_range_step(
Expand All @@ -180,15 +176,6 @@ where
return Some(Err(err.into()))
}

if let Err(err) = self.provider.tx_ref().prune_table_with_range_step(
&mut self.header_tds_walker,
self.limiter,
&mut |_| false,
&mut |row| pruned_block_td = Some(row.0),
) {
return Some(Err(err.into()))
}

if let Err(err) = self.provider.tx_ref().prune_table_with_range_step(
&mut self.canonical_headers_walker,
self.limiter,
Expand All @@ -198,7 +185,7 @@ where
return Some(Err(err.into()))
}

if ![pruned_block_headers, pruned_block_td, pruned_block_canonical].iter().all_equal() {
if ![pruned_block_headers, pruned_block_canonical].iter().all_equal() {
return Some(Err(PrunerError::InconsistentData(
"All headers-related tables should be pruned up to the same height",
)))
Expand All @@ -216,7 +203,7 @@ mod tests {
static_file::headers::HEADER_TABLES_TO_PRUNE, PruneInput, PruneLimiter, Segment,
SegmentOutput,
};
use alloy_primitives::{BlockNumber, B256, U256};
use alloy_primitives::{BlockNumber, B256};
use assert_matches::assert_matches;
use reth_db_api::{tables, transaction::DbTx};
use reth_provider::{
Expand All @@ -241,18 +228,17 @@ mod tests {
let headers = random_header_range(&mut rng, 0..100, B256::ZERO);
let tx = db.factory.provider_rw().unwrap().into_tx();
for header in &headers {
TestStageDB::insert_header(None, &tx, header, U256::ZERO).unwrap();
TestStageDB::insert_header(None, &tx, header).unwrap();
}
tx.commit().unwrap();

assert_eq!(db.table::<tables::CanonicalHeaders>().unwrap().len(), headers.len());
assert_eq!(db.table::<tables::Headers>().unwrap().len(), headers.len());
assert_eq!(db.table::<tables::HeaderTerminalDifficulties>().unwrap().len(), headers.len());

let test_prune = |to_block: BlockNumber, expected_result: (PruneProgress, usize)| {
let segment = super::Headers::new(db.factory.static_file_provider());
let prune_mode = PruneMode::Before(to_block);
let mut limiter = PruneLimiter::default().set_deleted_entries_limit(10);
let mut limiter = PruneLimiter::default().set_deleted_entries_limit(6);
let input = PruneInput {
previous_checkpoint: db
.factory
Expand Down Expand Up @@ -311,10 +297,6 @@ mod tests {
db.table::<tables::Headers>().unwrap().len(),
headers.len() - (last_pruned_block_number + 1) as usize
);
assert_eq!(
db.table::<tables::HeaderTerminalDifficulties>().unwrap().len(),
headers.len() - (last_pruned_block_number + 1) as usize
);
assert_eq!(
db.factory.provider().unwrap().get_prune_checkpoint(PruneSegment::Headers).unwrap(),
Some(PruneCheckpoint {
Expand All @@ -325,11 +307,16 @@ mod tests {
);
};

// First test: Prune with limit of 6 entries
// This will prune blocks 0-2 (3 blocks × 2 tables = 6 entries)
test_prune(
3,
(PruneProgress::HasMoreData(PruneInterruptReason::DeletedEntriesLimitReached), 9),
(PruneProgress::HasMoreData(PruneInterruptReason::DeletedEntriesLimitReached), 6),
);
test_prune(3, (PruneProgress::Finished, 3));

// Second test: Prune remaining blocks
// This will prune block 3 (1 block × 2 tables = 2 entries)
test_prune(3, (PruneProgress::Finished, 2));
}

#[test]
Expand Down
3 changes: 1 addition & 2 deletions crates/prune/types/src/segment.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,7 @@ pub enum PruneSegment {
AccountHistory,
/// Prune segment responsible for the `StorageChangeSets` and `StoragesHistory` tables.
StorageHistory,
/// Prune segment responsible for the `CanonicalHeaders`, `Headers` and
/// `HeaderTerminalDifficulties` tables.
/// Prune segment responsible for the `CanonicalHeaders`, `Headers` tables.
Headers,
/// Prune segment responsible for the `Transactions` table.
Transactions,
Expand Down
14 changes: 1 addition & 13 deletions crates/rpc/rpc-engine-api/src/error.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use alloy_primitives::{B256, U256};
use alloy_primitives::B256;
use alloy_rpc_types_engine::{
ForkchoiceUpdateError, INVALID_FORK_CHOICE_STATE_ERROR, INVALID_FORK_CHOICE_STATE_ERROR_MSG,
INVALID_PAYLOAD_ATTRIBUTES_ERROR, INVALID_PAYLOAD_ATTRIBUTES_ERROR_MSG,
Expand Down Expand Up @@ -59,17 +59,6 @@ pub enum EngineApiError {
/// Requested number of items
count: u64,
},
/// Terminal total difficulty mismatch during transition configuration exchange.
#[error(
"invalid transition terminal total difficulty: \
execution: {execution}, consensus: {consensus}"
)]
TerminalTD {
/// Execution terminal total difficulty value.
execution: U256,
/// Consensus terminal total difficulty value.
consensus: U256,
},
/// Terminal block hash mismatch during transition configuration exchange.
#[error(
"invalid transition terminal block hash: \
Expand Down Expand Up @@ -202,7 +191,6 @@ impl From<EngineApiError> for jsonrpsee_types::error::ErrorObject<'static> {
}
},
// Any other server error
EngineApiError::TerminalTD { .. } |
EngineApiError::TerminalBlockHash { .. } |
EngineApiError::NewPayload(_) |
EngineApiError::Internal(_) |
Expand Down
14 changes: 1 addition & 13 deletions crates/stages/stages/benches/setup/mod.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,7 @@
#![expect(unreachable_pub)]
use alloy_primitives::{Address, B256, U256};
use alloy_primitives::{Address, B256};
use itertools::concat;
use reth_db::{test_utils::TempDatabase, Database, DatabaseEnv};
use reth_db_api::{
cursor::DbCursorRO,
tables,
transaction::{DbTx, DbTxMut},
};
use reth_primitives_traits::{Account, SealedBlock, SealedHeader};
use reth_provider::{
test_utils::MockNodeTypesWithDB, DBProvider, DatabaseProvider, DatabaseProviderFactory,
Expand Down Expand Up @@ -198,13 +193,6 @@ pub(crate) fn txs_testdata(num_blocks: u64) -> TestStageDB {
);

db.insert_blocks(blocks.iter(), StorageKind::Static).unwrap();

// initialize TD
db.commit(|tx| {
let (head, _) = tx.cursor_read::<tables::Headers>()?.first()?.unwrap_or_default();
Ok(tx.put::<tables::HeaderTerminalDifficulties>(head, U256::from(0).into())?)
})
.unwrap();
}

db
Expand Down
2 changes: 1 addition & 1 deletion crates/stages/stages/src/stages/bodies.rs
Original file line number Diff line number Diff line change
Expand Up @@ -580,7 +580,7 @@ mod tests {
..Default::default()
},
);
self.db.insert_headers_with_td(blocks.iter().map(|block| block.sealed_header()))?;
self.db.insert_headers(blocks.iter().map(|block| block.sealed_header()))?;
if let Some(progress) = blocks.get(start as usize) {
// Insert last progress data
{
Expand Down
6 changes: 1 addition & 5 deletions crates/stages/stages/src/stages/era.rs
Original file line number Diff line number Diff line change
Expand Up @@ -384,7 +384,7 @@ mod tests {
..Default::default()
},
);
self.db.insert_headers_with_td(blocks.iter().map(|block| block.sealed_header()))?;
self.db.insert_headers(blocks.iter().map(|block| block.sealed_header()))?;
if let Some(progress) = blocks.get(start as usize) {
// Insert last progress data
{
Expand Down Expand Up @@ -499,10 +499,6 @@ mod tests {
.ensure_no_entry_above_by_value::<tables::HeaderNumbers, _>(block, |val| val)?;
self.db.ensure_no_entry_above::<tables::CanonicalHeaders, _>(block, |key| key)?;
self.db.ensure_no_entry_above::<tables::Headers, _>(block, |key| key)?;
self.db.ensure_no_entry_above::<tables::HeaderTerminalDifficulties, _>(
block,
|num| num,
)?;
Ok(())
}

Expand Down
1 change: 0 additions & 1 deletion crates/stages/stages/src/stages/execution.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@ use super::missing_static_data_error;
/// Input tables:
/// - [`tables::CanonicalHeaders`] get next block to execute.
/// - [`tables::Headers`] get for revm environment variables.
/// - [`tables::HeaderTerminalDifficulties`]
/// - [`tables::BlockBodyIndices`] to get tx number
/// - [`tables::Transactions`] to execute
///
Expand Down
4 changes: 2 additions & 2 deletions crates/stages/stages/src/stages/finish.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ mod tests {
let start = input.checkpoint().block_number;
let mut rng = generators::rng();
let head = random_header(&mut rng, start, None);
self.db.insert_headers_with_td(std::iter::once(&head))?;
self.db.insert_headers(std::iter::once(&head))?;

// use previous progress as seed size
let end = input.target.unwrap_or_default() + 1;
Expand All @@ -82,7 +82,7 @@ mod tests {
}

let mut headers = random_header_range(&mut rng, start + 1..end, head.hash());
self.db.insert_headers_with_td(headers.iter())?;
self.db.insert_headers(headers.iter())?;
headers.insert(0, head);
Ok(headers)
}
Expand Down
9 changes: 1 addition & 8 deletions crates/stages/stages/src/stages/headers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -333,9 +333,6 @@ where
(input.unwind_to + 1)..,
)?;
provider.tx_ref().unwind_table_by_num::<tables::CanonicalHeaders>(input.unwind_to)?;
provider
.tx_ref()
.unwind_table_by_num::<tables::HeaderTerminalDifficulties>(input.unwind_to)?;
let unfinalized_headers_unwound =
provider.tx_ref().unwind_table_by_num::<tables::Headers>(input.unwind_to)?;

Expand Down Expand Up @@ -460,7 +457,7 @@ mod tests {
let start = input.checkpoint().block_number;
let headers = random_header_range(&mut rng, 0..start + 1, B256::ZERO);
let head = headers.last().cloned().unwrap();
self.db.insert_headers_with_td(headers.iter())?;
self.db.insert_headers(headers.iter())?;

// use previous checkpoint as seed size
let end = input.target.unwrap_or_default() + 1;
Expand Down Expand Up @@ -551,10 +548,6 @@ mod tests {
.ensure_no_entry_above_by_value::<tables::HeaderNumbers, _>(block, |val| val)?;
self.db.ensure_no_entry_above::<tables::CanonicalHeaders, _>(block, |key| key)?;
self.db.ensure_no_entry_above::<tables::Headers, _>(block, |key| key)?;
self.db.ensure_no_entry_above::<tables::HeaderTerminalDifficulties, _>(
block,
|num| num,
)?;
Ok(())
}

Expand Down
Loading