Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 6 additions & 0 deletions crates/l2/common/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,18 @@ ethrex-rlp.workspace = true
ethrex-storage.workspace = true
ethrex-trie.workspace = true
ethrex-vm.workspace = true

bytes.workspace = true
thiserror.workspace = true
serde.workspace = true
lambdaworks-crypto.workspace = true
sha3.workspace = true
secp256k1.workspace = true
serde_with.workspace = true
# We do not need to enable the "unaligned" feature here since we are just using
# rkyv for ProverInputData, a data structure meant to be used in the host and not
# inside a guest program
rkyv.workspace = true

[lints.clippy]
unwrap_used = "deny"
Expand Down
18 changes: 18 additions & 0 deletions crates/l2/common/src/prover.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,26 @@
use ethrex_common::types::{
Block, blobs_bundle, block_execution_witness::ExecutionWitness, fee_config::FeeConfig,
};
use rkyv::{Archive, Deserialize as RDeserialize, Serialize as RSerialize};
use serde::{Deserialize, Serialize};
use serde_with::serde_as;
use std::fmt::{Debug, Display};

use crate::calldata::Value;

#[serde_as]
#[derive(Serialize, Deserialize, RDeserialize, RSerialize, Archive)]
pub struct ProverInputData {
pub blocks: Vec<Block>,
pub execution_witness: ExecutionWitness,
pub elasticity_multiplier: u64,
#[serde_as(as = "[_; 48]")]
pub blob_commitment: blobs_bundle::Commitment,
#[serde_as(as = "[_; 48]")]
pub blob_proof: blobs_bundle::Proof,
pub fee_config: FeeConfig,
}

/// Enum used to identify the different proving systems.
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, Serialize, Deserialize)]
pub enum ProverType {
Expand Down
2 changes: 2 additions & 0 deletions crates/l2/prover/src/guest_program/src/risc0/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions crates/l2/prover/src/guest_program/src/sp1/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions crates/l2/prover/src/prover.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use crate::{backend::Backend, config::ProverConfig, prove, to_batch_proof};
use ethrex_l2::sequencer::proof_coordinator::{ProofData, get_commit_hash};
use ethrex_l2::sequencer::{proof_coordinator::ProofData, utils::get_git_commit_hash};
use ethrex_l2_common::prover::BatchProof;
use guest_program::input::ProgramInput;
use std::time::Duration;
Expand Down Expand Up @@ -38,7 +38,7 @@ impl Prover {
proof_coordinator_endpoints: cfg.proof_coordinators,
proving_time_ms: cfg.proving_time_ms,
aligned_mode: cfg.aligned_mode,
commit_hash: get_commit_hash(),
commit_hash: get_git_commit_hash(),
#[cfg(all(feature = "sp1", feature = "gpu"))]
sp1_server: cfg.sp1_server,
}
Expand Down
4 changes: 4 additions & 0 deletions crates/l2/sequencer/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,8 @@ pub enum ProofCoordinatorError {
MissingTDXPrivateKey,
#[error("Metrics error")]
Metrics(#[from] MetricsError),
#[error("Missing prover input for batch {0} (version {1})")]
MissingBatchProverInput(u64, String),
}

#[derive(Debug, thiserror::Error)]
Expand Down Expand Up @@ -261,6 +263,8 @@ pub enum CommitterError {
UnexpectedError(String),
#[error("Unreachable code reached: {0}")]
Unreachable(String),
#[error("Failed to generate batch witness: {0}")]
FailedToGenerateBatchWitness(#[source] ChainError),
}

#[derive(Debug, thiserror::Error)]
Expand Down
110 changes: 91 additions & 19 deletions crates/l2/sequencer/l1_committer.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
use crate::{
CommitterConfig, EthConfig, SequencerConfig,
BlockProducerConfig, CommitterConfig, EthConfig, SequencerConfig,
based::sequencer_state::{SequencerState, SequencerStatus},
sequencer::{
errors::CommitterError,
utils::{self, system_now_ms},
utils::{self, fetch_batch_blocks, get_git_commit_hash, system_now_ms},
},
};

use bytes::Bytes;
use ethrex_blockchain::{Blockchain, vm::StoreVmDatabase};
use ethrex_blockchain::{Blockchain, BlockchainType, vm::StoreVmDatabase};
use ethrex_common::{
Address, H256, U256,
types::{
Expand All @@ -24,6 +24,7 @@ use ethrex_l2_common::{
PRIVILEGED_TX_BUDGET, compute_privileged_transactions_hash,
get_block_privileged_transactions,
},
prover::ProverInputData,
state_diff::{StateDiff, prepare_state_diff},
};
use ethrex_l2_rpc::signer::{Signer, SignerHealth};
Expand Down Expand Up @@ -103,6 +104,10 @@ pub struct L1Committer {
last_committed_batch: u64,
/// Cancellation token for the next inbound InMessage::Commit
cancellation_token: Option<CancellationToken>,
/// Elasticity multiplier for prover input generation
elasticity_multiplier: u64,
/// Git commit hash of the build
git_commit_hash: String,
}

#[derive(Clone, Serialize)]
Expand All @@ -125,6 +130,7 @@ impl L1Committer {
#[allow(clippy::too_many_arguments)]
pub async fn new(
committer_config: &CommitterConfig,
proposer_config: &BlockProducerConfig,
eth_config: &EthConfig,
blockchain: Arc<Blockchain>,
store: Store,
Expand Down Expand Up @@ -163,6 +169,8 @@ impl L1Committer {
last_committed_batch_timestamp: 0,
last_committed_batch,
cancellation_token: None,
elasticity_multiplier: proposer_config.elasticity_multiplier,
git_commit_hash: get_git_commit_hash(),
})
}

Expand All @@ -175,6 +183,7 @@ impl L1Committer {
) -> Result<GenServerHandle<L1Committer>, CommitterError> {
let state = Self::new(
&cfg.l1_committer,
&cfg.block_producer,
&cfg.eth,
blockchain,
store.clone(),
Expand Down Expand Up @@ -264,6 +273,15 @@ impl L1Committer {
}
};

info!(
first_block = batch.first_block,
last_block = batch.last_block,
"Generating and storing witness for batch {}",
batch.number,
);

self.generate_and_store_batch_prover_input(&batch).await?;

info!(
first_block = batch.first_block,
last_block = batch.last_block,
Expand Down Expand Up @@ -536,6 +554,71 @@ impl L1Committer {
))
}

async fn generate_and_store_batch_prover_input(
&self,
batch: &Batch,
) -> Result<(), CommitterError> {
let blocks =
fetch_batch_blocks::<CommitterError>(batch.number, &self.store, &self.rollup_store)
.await?;

let batch_witness = self
.blockchain
.generate_witness_for_blocks(&blocks)
.await
.map_err(CommitterError::FailedToGenerateBatchWitness)?;

// We still need to differentiate the validium case because for validium
// we are generating the BlobsBundle with BlobsBundle::default which
// sets the commitments and proofs to empty vectors.
let (blob_commitment, blob_proof) = if self.validium {
([0; 48], [0; 48])
} else {
let BlobsBundle {
commitments,
proofs,
..
} = &batch.blobs_bundle;

(
commitments
.first()
.cloned()
.ok_or(CommitterError::Unreachable(
"Blob commitment missing in batch blobs bundle".to_string(),
))?,
proofs.first().cloned().ok_or(CommitterError::Unreachable(
"Blob proof missing in batch blobs bundle".to_string(),
))?,
)
};

let BlockchainType::L2(fee_config) = self.blockchain.options.r#type else {
return Err(CommitterError::Unreachable(
"Batch witness generation is only supported for L2 blockchains".to_string(),
));
};

let prover_input = ProverInputData {
blocks,
execution_witness: batch_witness,
elasticity_multiplier: self.elasticity_multiplier,
blob_commitment,
blob_proof,
fee_config,
};

self.rollup_store
.store_prover_input_by_batch_and_version(
batch.number,
&self.git_commit_hash,
prover_input,
)
.await?;

Ok(())
}

async fn send_commitment(&mut self, batch: &Batch) -> Result<H256, CommitterError> {
let messages_merkle_root = compute_merkle_root(&batch.message_hashes);
let last_block_hash = get_last_block_hash(&self.store, batch.last_block)?;
Expand All @@ -551,22 +634,11 @@ impl L1Committer {
let (commit_function_signature, values) = if self.based {
let mut encoded_blocks: Vec<Bytes> = Vec::new();

for i in batch.first_block..=batch.last_block {
let block_header = self
.store
.get_block_header(i)
.map_err(CommitterError::from)?
.ok_or(CommitterError::FailedToRetrieveDataFromStorage)?;

let block_body = self
.store
.get_block_body(i)
.await
.map_err(CommitterError::from)?
.ok_or(CommitterError::FailedToRetrieveDataFromStorage)?;

let block = Block::new(block_header, block_body);
let blocks =
fetch_batch_blocks::<CommitterError>(batch.number, &self.store, &self.rollup_store)
.await?;

for block in blocks {
encoded_blocks.push(block.encode_to_vec().into());
}

Expand Down Expand Up @@ -747,7 +819,7 @@ impl GenServer for L1Committer {
let commit_time: u128 = self.commit_time_ms.into();
let should_send_commitment =
current_time - self.last_committed_batch_timestamp > commit_time;
#[allow(clippy::collapsible_if)]
#[expect(clippy::collapsible_if)]
if should_send_commitment {
if self
.commit_next_batch_to_l1()
Expand Down
2 changes: 0 additions & 2 deletions crates/l2/sequencer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -110,10 +110,8 @@ pub async fn start_l2(
error!("Error starting Committer: {err}");
});
let _ = ProofCoordinator::spawn(
store.clone(),
rollup_store.clone(),
cfg.clone(),
blockchain.clone(),
needed_proof_types.clone(),
)
.await
Expand Down
Loading