diff --git a/Cargo.lock b/Cargo.lock index c3bfe9bb938..65fa7f81ab5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -129,7 +129,7 @@ dependencies = [ "hashbrown 0.16.0", "indexmap 2.12.0", "itoa", - "k256 0.13.4 (registry+https://github.com/rust-lang/crates.io-index)", + "k256", "paste", "rand 0.9.2", "ruint", @@ -1634,7 +1634,7 @@ dependencies = [ "coins-core", "digest 0.10.7", "hmac", - "k256 0.13.4 (registry+https://github.com/rust-lang/crates.io-index)", + "k256", "serde", "sha2", "thiserror 1.0.69", @@ -2703,25 +2703,12 @@ dependencies = [ "der", "digest 0.10.7", "elliptic-curve", - "rfc6979 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rfc6979", "serdect", "signature", "spki", ] -[[package]] -name = "ecdsa" -version = "0.16.9" -source = "git+https://github.com/sp1-patches/signatures.git?tag=patch-16.9-sp1-4.1.0#1880299a48fe7ef249edaa616fd411239fb5daf1" -dependencies = [ - "der", - "digest 0.10.7", - "elliptic-curve", - "rfc6979 0.4.0 (git+https://github.com/sp1-patches/signatures.git?tag=patch-16.9-sp1-4.1.0)", - "signature", - "spki", -] - [[package]] name = "educe" version = "0.6.0" @@ -2758,7 +2745,6 @@ dependencies = [ "ff 0.13.1", "generic-array 0.14.9", "group 0.13.0", - "hkdf", "pem-rfc7468", "pkcs8", "rand_core 0.6.4", @@ -2813,7 +2799,7 @@ dependencies = [ "base64 0.21.7", "bytes", "hex", - "k256 0.13.4 (registry+https://github.com/rust-lang/crates.io-index)", + "k256", "log", "rand 0.8.5", "rlp 0.5.2", @@ -3126,7 +3112,7 @@ dependencies = [ "elliptic-curve", "ethabi", "generic-array 0.14.9", - "k256 0.13.4 (registry+https://github.com/rust-lang/crates.io-index)", + "k256", "num_enum 0.7.5", "once_cell", "open-fastrlp", @@ -3558,7 +3544,7 @@ dependencies = [ "ethrex-crypto", "ethrex-rlp", "hex", - "k256 0.13.4 (registry+https://github.com/rust-lang/crates.io-index)", + "k256", "lambdaworks-math 0.13.0", "lazy_static", "malachite 0.6.1", @@ -4681,15 +4667,6 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6fe2267d4ed49bc07b63801559be28c718ea06c4738b7a03c94df7386d2cde46" -[[package]] -name = "hkdf" -version = "0.12.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" -dependencies = [ - "hmac", -] - [[package]] name = "hmac" version = "0.12.1" @@ -5468,7 +5445,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6e3919bbaa2945715f0bb6d3934a173d1e9a59ac23767fbaaef277265a7411b" dependencies = [ "cfg-if 1.0.4", - "ecdsa 0.16.9 (registry+https://github.com/rust-lang/crates.io-index)", + "ecdsa", "elliptic-curve", "once_cell", "serdect", @@ -5476,21 +5453,6 @@ dependencies = [ "signature", ] -[[package]] -name = "k256" -version = "0.13.4" -source = "git+https://github.com/sp1-patches/elliptic-curves?tag=patch-k256-13.4-sp1-5.0.0#f7d8998e05d8cbcbd8e543eba1030a7385011fa8" -dependencies = [ - "cfg-if 1.0.4", - "ecdsa 0.16.9 (git+https://github.com/sp1-patches/signatures.git?tag=patch-16.9-sp1-4.1.0)", - "elliptic-curve", - "hex", - "once_cell", - "sha2", - "signature", - "sp1-lib", -] - [[package]] name = "keccak" version = "0.1.5" @@ -6733,7 +6695,7 @@ version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c9863ad85fa8f4460f9c48cb909d38a0d689dba1f6f6988a5e3e0d31071bcd4b" dependencies = [ - "ecdsa 0.16.9 (registry+https://github.com/rust-lang/crates.io-index)", + "ecdsa", "elliptic-curve", "primeorder", "sha2", @@ -8221,15 +8183,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "rfc6979" -version = "0.4.0" -source = "git+https://github.com/sp1-patches/signatures.git?tag=patch-16.9-sp1-4.1.0#1880299a48fe7ef249edaa616fd411239fb5daf1" -dependencies = [ - "hmac", - "subtle", -] - [[package]] name = "rgb" version = "0.8.52" @@ -9257,19 +9210,19 @@ dependencies = [ [[package]] name = "secp256k1" version = "0.30.0" -source = "git+https://github.com/sp1-patches/rust-secp256k1?tag=patch-0.30.0-sp1-5.0.0#04d87db04bcc2dc5dd8e1ab3f046cc655440d07a" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b50c5943d326858130af85e049f2661ba3c78b26589b8ab98e65e80ae44a1252" dependencies = [ "bitcoin_hashes", - "cfg-if 1.0.4", - "k256 0.13.4 (git+https://github.com/sp1-patches/elliptic-curves?tag=patch-k256-13.4-sp1-5.0.0)", "rand 0.8.5", "secp256k1-sys", ] [[package]] name = "secp256k1-sys" -version = "0.10.0" -source = "git+https://github.com/sp1-patches/rust-secp256k1?tag=patch-0.30.0-sp1-5.0.0#04d87db04bcc2dc5dd8e1ab3f046cc655440d07a" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4387882333d3aa8cb20530a17c69a3752e97837832f34f6dccc760e715001d9" dependencies = [ "cc", ] @@ -9785,7 +9738,7 @@ dependencies = [ "hashbrown 0.14.5", "hex", "itertools 0.13.0", - "k256 0.13.4 (registry+https://github.com/rust-lang/crates.io-index)", + "k256", "num", "num_cpus", "p256", @@ -9853,7 +9806,7 @@ dependencies = [ "elliptic-curve", "generic-array 1.1.0", "itertools 0.13.0", - "k256 0.13.4 (registry+https://github.com/rust-lang/crates.io-index)", + "k256", "num", "p256", "p3-field", @@ -9881,7 +9834,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fce8ad0f153443d09d398eccb650a0b2dcbf829470e394e4bf60ec4379c7af93" dependencies = [ "bincode", - "elliptic-curve", "serde", "sp1-primitives", ] @@ -10106,7 +10058,7 @@ dependencies = [ "hex", "indicatif", "itertools 0.13.0", - "k256 0.13.4 (registry+https://github.com/rust-lang/crates.io-index)", + "k256", "p3-baby-bear", "p3-field", "p3-fri", diff --git a/crates/l2/sequencer/l1_committer.rs b/crates/l2/sequencer/l1_committer.rs index 27bdb0889d0..1410b217c5e 100644 --- a/crates/l2/sequencer/l1_committer.rs +++ b/crates/l2/sequencer/l1_committer.rs @@ -125,10 +125,6 @@ pub struct L1Committer { /// It is used to ensure state availability for batch preparation and /// witness generation. current_checkpoint_store: Store, - /// Blockchain instance using the current checkpoint store. - /// - /// It is used for witness generation. - current_checkpoint_blockchain: Arc, /// Network genesis. /// /// It is used for creating checkpoints. @@ -180,14 +176,13 @@ impl L1Committer { get_last_committed_batch(ð_client, committer_config.on_chain_proposer_address) .await?; - let (current_checkpoint_store, current_checkpoint_blockchain) = - Self::get_checkpoint_from_path( - genesis.clone(), - blockchain.options.clone(), - &checkpoints_dir.join(batch_checkpoint_name(last_committed_batch)), - &rollup_store, - ) - .await?; + let (current_checkpoint_store, _) = Self::get_checkpoint_from_path( + genesis.clone(), + blockchain.options.clone(), + &checkpoints_dir.join(batch_checkpoint_name(last_committed_batch)), + &rollup_store, + ) + .await?; Ok(Self { eth_client, @@ -212,7 +207,6 @@ impl L1Committer { elasticity_multiplier: proposer_config.elasticity_multiplier, git_commit_hash: get_git_commit_hash(), current_checkpoint_store, - current_checkpoint_blockchain, genesis, checkpoints_dir, }) @@ -266,12 +260,18 @@ impl L1Committer { let l1_fork = get_l1_active_fork(&self.eth_client, self.osaka_activation_time) .await .map_err(CommitterError::EthClientError)?; + let batch = match self .rollup_store .get_batch(batch_to_commit, l1_fork) .await? { - Some(batch) => batch, + Some(batch) => { + // If we have the batch already sealed, we need to ensure the checkpoint + // is available. + self.check_current_checkpoint(&batch).await?; + batch + } None => { let Some(batch) = self.produce_batch(batch_to_commit).await? else { // The batch is empty (there's no new blocks from last batch) @@ -321,6 +321,189 @@ impl L1Committer { } } + async fn generate_one_time_checkpoint( + &self, + batch_number: u64, + ) -> Result<(PathBuf, Store, Arc), CommitterError> { + let rand_suffix: u32 = rand::thread_rng().r#gen(); + let one_time_checkpoint_path = self.checkpoints_dir.join(format!( + "temp_checkpoint_batch_{batch_number}_{rand_suffix}" + )); + + let (one_time_checkpoint_store, one_time_new_checkpoint_blockchain) = self + .create_checkpoint( + &self.current_checkpoint_store, + &one_time_checkpoint_path, + &self.rollup_store, + ) + .await?; + + Ok(( + one_time_checkpoint_path, + one_time_checkpoint_store, + one_time_new_checkpoint_blockchain, + )) + } + + fn remove_one_time_checkpoint(&self, path: &PathBuf) -> Result<(), CommitterError> { + if path.exists() { + let _ = remove_dir_all(path).inspect_err(|e| { + error!( + "Failed to remove one-time checkpoint directory at path {path:?}. Should be removed manually. Error: {}", e.to_string() + ) + }); + } + Ok(()) + } + + /// Ensure the checkpoint for the given batch is available locally + /// If not, generate it by re-executing the blocks in the batch + async fn check_current_checkpoint(&mut self, batch: &Batch) -> Result<(), CommitterError> { + info!("Checking checkpoint for batch {}", batch.number); + let batch_checkpoint_name = batch_checkpoint_name(batch.number); + let expected_checkpoint_path = self.checkpoints_dir.join(&batch_checkpoint_name); + + let current_checkpoint_path = self.current_checkpoint_store.get_store_directory()?; + + if current_checkpoint_path == expected_checkpoint_path { + info!( + "Current checkpoint store is already at the expected path for batch {}: {:?}", + batch.number, expected_checkpoint_path + ); + return Ok(()); + } + + if !expected_checkpoint_path.exists() { + info!( + "Checkpoint for batch {} not found locally, generating it by re-executing the blocks in the batch", + batch.number + ); + self.current_checkpoint_store = self.generate_checkpoint_for_batch(batch).await?; + return Ok(()); + } + + info!( + "Checkpoint for batch {} is available at {:?}", + batch.number, expected_checkpoint_path + ); + + // At this step, the checkpoint is available + // We need to load it as the current checkpoint store + let (new_checkpoint_store, _) = Self::get_checkpoint_from_path( + self.genesis.clone(), + self.blockchain.options.clone(), + &expected_checkpoint_path, + &self.rollup_store, + ) + .await?; + + self.current_checkpoint_store = new_checkpoint_store; + + Ok(()) + } + + /// Generate the checkpoint for the given batch by re-executing the blocks in the batch + async fn generate_checkpoint_for_batch( + &mut self, + batch: &Batch, + ) -> Result { + let (one_time_checkpoint_path, one_time_checkpoint_store, one_time_checkpoint_blockchain) = + self.generate_one_time_checkpoint(batch.number).await?; + + self.execute_batch_to_generate_checkpoint( + batch, + one_time_checkpoint_store.clone(), + one_time_checkpoint_blockchain, + ) + .await + .inspect_err(|_| { + let _ = self.remove_one_time_checkpoint(&one_time_checkpoint_path); + })?; + + // Create the next checkpoint from the one-time checkpoint used + let new_checkpoint_path = self + .checkpoints_dir + .join(batch_checkpoint_name(batch.number)); + let (new_checkpoint, _) = self + .create_checkpoint( + &one_time_checkpoint_store, + &new_checkpoint_path, + &self.rollup_store, + ) + .await?; + + // Clean up one-time checkpoint + self.remove_one_time_checkpoint(&one_time_checkpoint_path)?; + Ok(new_checkpoint) + } + + async fn execute_batch_to_generate_checkpoint( + &mut self, + batch: &Batch, + one_time_checkpoint_store: Store, + one_time_checkpoint_blockchain: Arc, + ) -> Result<(), CommitterError> { + info!("Generating missing checkpoint for batch {}", batch.number); + + // Fetch the blocks in the batch along with their respective fee configs + let (blocks, fee_configs) = fetch_blocks_with_respective_fee_configs::( + batch, + &self.store, + &self.rollup_store, + ) + .await?; + + // Re-execute the blocks in the batch to recreate the checkpoint + for (i, block) in blocks.iter().enumerate() { + let fee_config = fee_configs.get(i).ok_or(ChainError::WitnessGeneration( + "FeeConfig not found for witness generation".to_string(), + ))?; + + let parent_header = self + .store + .get_block_header_by_hash(block.header.parent_hash)? + .ok_or(CommitterError::ChainError(ChainError::ParentNotFound))?; + + // Here we use the checkpoint store because we need the previous + // state available (i.e. not pruned) for re-execution. + let vm_db = StoreVmDatabase::new(one_time_checkpoint_store.clone(), parent_header); + + let mut vm = Evm::new_for_l2(vm_db, *fee_config)?; + + vm.execute_block(block)?; + + let account_updates = vm.get_state_transitions()?; + let account_updates_list = one_time_checkpoint_store + .apply_account_updates_batch(block.header.parent_hash, &account_updates)? + .ok_or(CommitterError::FailedToGetInformationFromStorage( + "no account updated".to_owned(), + ))?; + + let mut receipts = vec![]; + for (index, _) in block.body.transactions.iter().enumerate() { + let receipt = self + .store + .get_receipt(block.header.number, index.try_into()?) + .await? + .ok_or(CommitterError::RetrievalError( + "Transactions in a block should have a receipt".to_owned(), + ))?; + receipts.push(receipt); + } + + one_time_checkpoint_blockchain.store_block( + block.clone(), + account_updates_list, + BlockExecutionResult { + receipts, + requests: vec![], + }, + )?; + } + + Ok(()) + } + async fn produce_batch(&mut self, batch_number: u64) -> Result, CommitterError> { let last_committed_blocks = self .rollup_store @@ -337,12 +520,6 @@ impl L1Committer { )))?; let first_block_to_commit = last_block + 1; - // We need to guarantee that the checkpoint path is new - // to avoid causing a lock error under rocksdb feature. - let new_checkpoint_path = self - .checkpoints_dir - .join(batch_checkpoint_name(batch_number)); - // For re-execution we need to use a checkpoint to the previous state // (i.e. checkpoint of the state to the latest block from the previous // batch, or the state of the genesis if this is the first batch). @@ -350,24 +527,24 @@ impl L1Committer { // struct, but we need to create a one-time copy of it because // we still need to use the current checkpoint store later for witness // generation. - - let (new_checkpoint_store, new_checkpoint_blockchain) = self - .create_checkpoint( - &self.current_checkpoint_store, - &new_checkpoint_path, - &self.rollup_store, - ) - .await?; + let ( + one_time_checkpoint_path, + one_time_checkpoint_store, + one_time_new_checkpoint_blockchain, + ) = self.generate_one_time_checkpoint(batch_number).await?; // Try to prepare batch let result = self .prepare_batch_from_block( *last_block, batch_number, - new_checkpoint_store.clone(), - new_checkpoint_blockchain.clone(), + one_time_checkpoint_store.clone(), + one_time_new_checkpoint_blockchain, ) - .await?; + .await + .inspect_err(|_| { + let _ = self.remove_one_time_checkpoint(&one_time_checkpoint_path); + })?; let Some(( blobs_bundle, @@ -377,8 +554,7 @@ impl L1Committer { last_block_of_batch, )) = result else { - debug!("No new blocks to commit, skipping"); - + self.remove_one_time_checkpoint(&one_time_checkpoint_path)?; return Ok(None); }; @@ -394,15 +570,6 @@ impl L1Committer { verify_tx: None, }; - self.rollup_store.seal_batch(batch.clone()).await?; - - debug!( - first_block = batch.first_block, - last_block = batch.last_block, - "Batch {} stored in database", - batch.number - ); - info!( first_block = batch.first_block, last_block = batch.last_block, @@ -410,7 +577,23 @@ impl L1Committer { batch.number, ); - self.generate_and_store_batch_prover_input(&batch).await?; + let batch_prover_input = self.generate_batch_prover_input(&batch).await?; + + self.rollup_store + .seal_batch_with_prover_input(batch.clone(), &self.git_commit_hash, batch_prover_input) + .await?; + + // Create the next checkpoint from the one-time checkpoint used + let new_checkpoint_path = self + .checkpoints_dir + .join(batch_checkpoint_name(batch_number)); + let (new_checkpoint_store, _) = self + .create_checkpoint( + &one_time_checkpoint_store, + &new_checkpoint_path, + &self.rollup_store, + ) + .await?; // We need to update the current checkpoint after generating the witness // with it, and before sending the commitment. @@ -420,7 +603,8 @@ impl L1Committer { // but the directory is not deleted until the batch it serves in is verified // on L1. self.current_checkpoint_store = new_checkpoint_store; - self.current_checkpoint_blockchain = new_checkpoint_blockchain; + + self.remove_one_time_checkpoint(&one_time_checkpoint_path)?; Ok(Some(batch)) } @@ -721,56 +905,38 @@ impl L1Committer { ))) } - async fn generate_and_store_batch_prover_input( + async fn generate_batch_prover_input( &self, batch: &Batch, - ) -> Result<(), CommitterError> { - if self + ) -> Result { + if let Some(prover_input) = self .rollup_store .get_prover_input_by_batch_and_version(batch.number, &self.git_commit_hash) .await? - .is_some() { info!( "Prover input for batch {} and version {} already exists, skipping generation", batch.number, self.git_commit_hash ); - return Ok(()); + return Ok(prover_input); } let (blocks, fee_configs) = fetch_blocks_with_respective_fee_configs::( - batch.number, + batch, &self.store, &self.rollup_store, ) .await?; - let rand_suffix: u32 = rand::thread_rng().r#gen(); - let one_time_checkpoint_path = self.checkpoints_dir.join(format!( - "temp_checkpoint_witness_{}_{rand_suffix}", - batch.number - )); - // We need to create a one-time checkpoint copy because if witness generation fails the checkpoint would be modified - let (_, one_time_checkpoint_blockchain) = self - .create_checkpoint( - &self.current_checkpoint_store, - &one_time_checkpoint_path, - &self.rollup_store, - ) - .await?; + let (one_time_checkpoint_path, _, one_time_checkpoint_blockchain) = + self.generate_one_time_checkpoint(batch.number).await?; let result = one_time_checkpoint_blockchain .generate_witness_for_blocks_with_fee_configs(&blocks, Some(&fee_configs)) .await .map_err(CommitterError::FailedToGenerateBatchWitness); - if one_time_checkpoint_path.exists() { - let _ = remove_dir_all(&one_time_checkpoint_path).inspect_err(|e| { - error!( - "Failed to remove one-time checkpoint directory at path {one_time_checkpoint_path:?}. Should be removed manually. Error: {}", e.to_string() - ) - }); - } + self.remove_one_time_checkpoint(&one_time_checkpoint_path)?; let batch_witness = result?; @@ -827,15 +993,7 @@ impl L1Committer { fee_configs, }; - self.rollup_store - .store_prover_input_by_batch_and_version( - batch.number, - &self.git_commit_hash, - prover_input, - ) - .await?; - - Ok(()) + Ok(prover_input) } /// Creates a checkpoint of the given store at the specified path. @@ -917,7 +1075,7 @@ impl L1Committer { let mut encoded_blocks: Vec = Vec::new(); let (blocks, _) = fetch_blocks_with_respective_fee_configs::( - batch.number, + batch, &self.store, &self.rollup_store, ) diff --git a/crates/l2/sequencer/utils.rs b/crates/l2/sequencer/utils.rs index 0b19e610d56..2af59ac5687 100644 --- a/crates/l2/sequencer/utils.rs +++ b/crates/l2/sequencer/utils.rs @@ -1,5 +1,6 @@ use aligned_sdk::common::types::Network; use ethrex_common::types::Block; +use ethrex_common::types::batch::Batch; use ethrex_common::types::fee_config::FeeConfig; use ethrex_common::utils::keccak; use ethrex_common::{Address, H256, types::TxType}; @@ -143,35 +144,28 @@ where } pub async fn fetch_blocks_with_respective_fee_configs( - batch_number: u64, + batch: &Batch, store: &Store, rollup_store: &StoreRollup, ) -> Result<(Vec, Vec), E> where E: From + From, { - let batch_blocks = rollup_store - .get_block_numbers_by_batch(batch_number) - .await? - .ok_or(RollupStoreError::Custom( - "failed to retrieve data from storage".to_string(), - ))?; - let mut blocks = Vec::new(); let mut fee_configs = vec![]; - for block_number in batch_blocks { + for block_number in batch.first_block..=batch.last_block { let block_header = store .get_block_header(block_number)? .ok_or(StoreError::Custom( - "failed to retrieve data from storage".to_string(), + "failed to retrieve block header from storage".to_string(), ))?; let block_body = store .get_block_body(block_number) .await? .ok_or(StoreError::Custom( - "failed to retrieve data from storage".to_string(), + "failed to retrieve block body from storage".to_string(), ))?; let block = Block::new(block_header, block_body); diff --git a/crates/l2/storage/src/api.rs b/crates/l2/storage/src/api.rs index 1f489cc7ae4..37e761d0f55 100644 --- a/crates/l2/storage/src/api.rs +++ b/crates/l2/storage/src/api.rs @@ -60,6 +60,13 @@ pub trait StoreEngineRollup: Debug + Send + Sync { async fn seal_batch(&self, batch: Batch) -> Result<(), RollupStoreError>; + async fn seal_batch_with_prover_input( + &self, + batch: Batch, + prover_version: &str, + prover_input_data: ProverInputData, + ) -> Result<(), RollupStoreError>; + async fn get_last_batch_number(&self) -> Result, RollupStoreError>; async fn get_verify_tx_by_batch( diff --git a/crates/l2/storage/src/store.rs b/crates/l2/storage/src/store.rs index 99ba432091e..c8b9f3d977a 100644 --- a/crates/l2/storage/src/store.rs +++ b/crates/l2/storage/src/store.rs @@ -230,6 +230,18 @@ impl Store { self.engine.seal_batch(batch).await } + /// Seals a batch along with its prover input data in one atomic operation. + pub async fn seal_batch_with_prover_input( + &self, + batch: Batch, + prover_version: &str, + prover_input: ProverInputData, + ) -> Result<(), RollupStoreError> { + self.engine + .seal_batch_with_prover_input(batch, prover_version, prover_input) + .await + } + pub async fn update_operations_count( &self, transaction_inc: u64, diff --git a/crates/l2/storage/src/store_db/in_memory.rs b/crates/l2/storage/src/store_db/in_memory.rs index ff588ca5b2d..d23717a2b68 100644 --- a/crates/l2/storage/src/store_db/in_memory.rs +++ b/crates/l2/storage/src/store_db/in_memory.rs @@ -325,6 +325,25 @@ impl StoreEngineRollup for Store { Ok(()) } + async fn seal_batch_with_prover_input( + &self, + batch: Batch, + prover_version: &str, + prover_input_data: ProverInputData, + ) -> Result<(), RollupStoreError> { + let batch_number = batch.number; + + // There is no problem in performing these two operations not atomically + // as in the in-memory store restarts will lose all data anyway. + self.seal_batch(batch).await?; + self.store_prover_input_by_batch_and_version( + batch_number, + prover_version, + prover_input_data, + ) + .await + } + async fn delete_proof_by_batch_and_type( &self, batch_number: u64, diff --git a/crates/l2/storage/src/store_db/sql.rs b/crates/l2/storage/src/store_db/sql.rs index 9c96d8d6f76..2d7c11d67a1 100644 --- a/crates/l2/storage/src/store_db/sql.rs +++ b/crates/l2/storage/src/store_db/sql.rs @@ -304,6 +304,53 @@ impl SQLStore { self.execute_in_tx(queries, db_tx).await } + + async fn seal_batch_in_tx( + &self, + batch: Batch, + transaction: &Transaction, + ) -> Result<(), RollupStoreError> { + let blocks: Vec = (batch.first_block..=batch.last_block).collect(); + for block_number in blocks.iter() { + self.store_batch_number_by_block_in_tx(*block_number, batch.number, Some(transaction)) + .await?; + } + self.store_block_numbers_by_batch_in_tx(batch.number, blocks, Some(transaction)) + .await?; + self.store_message_hashes_by_batch_in_tx( + batch.number, + batch.message_hashes, + Some(transaction), + ) + .await?; + self.store_privileged_transactions_hash_by_batch_number_in_tx( + batch.number, + batch.privileged_transactions_hash, + Some(transaction), + ) + .await?; + self.store_blob_bundle_by_batch_number_in_tx( + batch.number, + batch.blobs_bundle.blobs, + Some(transaction), + ) + .await?; + self.store_state_root_by_batch_number_in_tx( + batch.number, + batch.state_root, + Some(transaction), + ) + .await?; + if let Some(commit_tx) = batch.commit_tx { + self.store_commit_tx_by_batch_in_tx(batch.number, commit_tx, Some(transaction)) + .await?; + } + if let Some(verify_tx) = batch.verify_tx { + self.store_verify_tx_by_batch_in_tx(batch.number, verify_tx, Some(transaction)) + .await?; + } + Ok(()) + } } fn read_from_row_int(row: &Row, index: i32) -> Result { @@ -656,48 +703,33 @@ impl StoreEngineRollup for SQLStore { } async fn seal_batch(&self, batch: Batch) -> Result<(), RollupStoreError> { - let blocks: Vec = (batch.first_block..=batch.last_block).collect(); let conn = self.write_conn.lock().await; let transaction = conn.transaction().await?; - for block_number in blocks.iter() { - self.store_batch_number_by_block_in_tx(*block_number, batch.number, Some(&transaction)) - .await?; - } - self.store_block_numbers_by_batch_in_tx(batch.number, blocks, Some(&transaction)) - .await?; - self.store_message_hashes_by_batch_in_tx( - batch.number, - batch.message_hashes, - Some(&transaction), - ) - .await?; - self.store_privileged_transactions_hash_by_batch_number_in_tx( - batch.number, - batch.privileged_transactions_hash, - Some(&transaction), - ) - .await?; - self.store_blob_bundle_by_batch_number_in_tx( - batch.number, - batch.blobs_bundle.blobs, - Some(&transaction), - ) - .await?; - self.store_state_root_by_batch_number_in_tx( + self.seal_batch_in_tx(batch, &transaction).await?; + + transaction.commit().await.map_err(RollupStoreError::from) + } + + async fn seal_batch_with_prover_input( + &self, + batch: Batch, + prover_version: &str, + prover_input: ProverInputData, + ) -> Result<(), RollupStoreError> { + let conn = self.write_conn.lock().await; + let transaction = conn.transaction().await?; + + self.store_prover_input_by_batch_and_version_in_tx( batch.number, - batch.state_root, + prover_version, + prover_input, Some(&transaction), ) .await?; - if let Some(commit_tx) = batch.commit_tx { - self.store_commit_tx_by_batch_in_tx(batch.number, commit_tx, Some(&transaction)) - .await?; - } - if let Some(verify_tx) = batch.verify_tx { - self.store_verify_tx_by_batch_in_tx(batch.number, verify_tx, Some(&transaction)) - .await?; - } + + self.seal_batch_in_tx(batch, &transaction).await?; + transaction.commit().await.map_err(RollupStoreError::from) } diff --git a/crates/storage/api.rs b/crates/storage/api.rs index 664f72612ae..22c9a1f3fdd 100644 --- a/crates/storage/api.rs +++ b/crates/storage/api.rs @@ -3,7 +3,7 @@ use ethrex_common::types::{ Block, BlockBody, BlockHash, BlockHeader, BlockNumber, ChainConfig, Code, Index, Receipt, Transaction, }; -use std::path::Path; +use std::path::{Path, PathBuf}; use std::{fmt::Debug, panic::RefUnwindSafe}; use crate::UpdateBatch; @@ -381,6 +381,8 @@ pub trait StoreEngine: Debug + Send + Sync + RefUnwindSafe { async fn create_checkpoint(&self, path: &Path) -> Result<(), StoreError>; + fn get_store_directory(&self) -> Result; + fn flatkeyvalue_computed(&self, _account: H256) -> Result { Ok(false) } diff --git a/crates/storage/store.rs b/crates/storage/store.rs index c9743315474..a7ef658a509 100644 --- a/crates/storage/store.rs +++ b/crates/storage/store.rs @@ -18,7 +18,7 @@ use ethrex_crypto::keccak::keccak_hash; use ethrex_rlp::decode::RLPDecode; use ethrex_rlp::encode::RLPEncode; use ethrex_trie::{Nibbles, NodeRLP, Trie, TrieLogger, TrieNode, TrieWitness}; -use std::{collections::hash_map::Entry, sync::Arc}; +use std::{collections::hash_map::Entry, path::PathBuf, sync::Arc}; use std::{ collections::{BTreeMap, HashMap}, sync::Mutex, @@ -1365,6 +1365,10 @@ impl Store { pub async fn create_checkpoint(&self, path: impl AsRef) -> Result<(), StoreError> { self.engine.create_checkpoint(path.as_ref()).await } + + pub fn get_store_directory(&self) -> Result { + self.engine.get_store_directory() + } } pub struct AccountProof { diff --git a/crates/storage/store_db/in_memory.rs b/crates/storage/store_db/in_memory.rs index 811a56328c3..eba25d5ab22 100644 --- a/crates/storage/store_db/in_memory.rs +++ b/crates/storage/store_db/in_memory.rs @@ -14,7 +14,7 @@ use ethrex_trie::{InMemoryTrieDB, Nibbles, Trie, db::NodeMap}; use std::{ collections::HashMap, fmt::Debug, - path::Path, + path::{Path, PathBuf}, sync::{Arc, Mutex, MutexGuard}, }; @@ -739,6 +739,10 @@ impl StoreEngine for Store { // Silently ignoring the request to create a checkpoint is harmless Ok(()) } + + fn get_store_directory(&self) -> Result { + Ok(PathBuf::from("in_memory_store")) + } } impl Debug for Store { diff --git a/crates/storage/store_db/rocksdb.rs b/crates/storage/store_db/rocksdb.rs index d2273cd3895..4792bd446fd 100644 --- a/crates/storage/store_db/rocksdb.rs +++ b/crates/storage/store_db/rocksdb.rs @@ -20,7 +20,7 @@ use rocksdb::{ }; use std::{ collections::HashSet, - path::Path, + path::{Path, PathBuf}, sync::{ Arc, Mutex, mpsc::{SyncSender, sync_channel}, @@ -2029,6 +2029,11 @@ impl StoreEngine for Store { Ok(()) } + fn get_store_directory(&self) -> Result { + let path = self.db.path(); + Ok(path.to_path_buf()) + } + fn flatkeyvalue_computed(&self, account: H256) -> Result { let account_nibbles = Nibbles::from_bytes(account.as_bytes()); let last_computed_flatkeyvalue = self.last_written()?;