From bad5b08f122966b2ffcecf8847034a4e929bfc5e Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Tue, 4 Nov 2025 09:22:21 -0500 Subject: [PATCH 01/26] Update to v0.9.1 (#398) --- Cargo.lock | 20 ++++++++++---------- Cargo.toml | 2 +- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f034c39c..5a943fe5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1575,7 +1575,7 @@ dependencies = [ [[package]] name = "cb-bench-pbs" -version = "0.9.0" +version = "0.9.1" dependencies = [ "alloy", "cb-common", @@ -1592,7 +1592,7 @@ dependencies = [ [[package]] name = "cb-cli" -version = "0.9.0" +version = "0.9.1" dependencies = [ "cb-common", "clap", @@ -1604,7 +1604,7 @@ dependencies = [ [[package]] name = "cb-common" -version = "0.9.0" +version = "0.9.1" dependencies = [ "aes 0.8.4", "alloy", @@ -1650,7 +1650,7 @@ dependencies = [ [[package]] name = "cb-metrics" -version = "0.9.0" +version = "0.9.1" dependencies = [ "axum 0.8.4", "cb-common", @@ -1663,7 +1663,7 @@ dependencies = [ [[package]] name = "cb-pbs" -version = "0.9.0" +version = "0.9.1" dependencies = [ "alloy", "async-trait", @@ -1689,7 +1689,7 @@ dependencies = [ [[package]] name = "cb-signer" -version = "0.9.0" +version = "0.9.1" dependencies = [ "alloy", "axum 0.8.4", @@ -1718,7 +1718,7 @@ dependencies = [ [[package]] name = "cb-tests" -version = "0.9.0" +version = "0.9.1" dependencies = [ "alloy", "axum 0.8.4", @@ -1877,7 +1877,7 @@ dependencies = [ [[package]] name = "commit-boost" -version = "0.9.0" +version = "0.9.1" dependencies = [ "cb-cli", "cb-common", @@ -2165,7 +2165,7 @@ dependencies = [ [[package]] name = "da_commit" -version = "0.9.0" +version = "0.9.1" dependencies = [ "alloy", "color-eyre", @@ -6076,7 +6076,7 @@ checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "status_api" -version = "0.9.0" +version = "0.9.1" dependencies = [ "async-trait", "axum 0.8.4", diff --git a/Cargo.toml b/Cargo.toml index 68cb9e27..e429fc86 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,7 +5,7 @@ resolver = "2" [workspace.package] edition = "2024" rust-version = "1.89" -version = "0.9.0" +version = "0.9.1" [workspace.dependencies] aes = "0.8" From 3e4a7da946f531a874c614c240de4372adb5e003 Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Tue, 4 Nov 2025 16:47:39 -0500 Subject: [PATCH 02/26] Changed get_accept_type() to allow multiple types --- crates/common/src/utils.rs | 163 +++++++++++++++++++++----- crates/pbs/src/error.rs | 3 + crates/pbs/src/routes/get_header.rs | 74 +++++++----- crates/pbs/src/routes/submit_block.rs | 98 +++++++++------- tests/src/mock_relay.rs | 72 ++++++------ 5 files changed, 273 insertions(+), 137 deletions(-) diff --git a/crates/common/src/utils.rs b/crates/common/src/utils.rs index 291932d8..221ca4d1 100644 --- a/crates/common/src/utils.rs +++ b/crates/common/src/utils.rs @@ -1,6 +1,7 @@ #[cfg(feature = "testing-flags")] use std::cell::Cell; use std::{ + collections::HashSet, fmt::Display, net::Ipv4Addr, str::FromStr, @@ -433,36 +434,34 @@ pub fn get_user_agent_with_version(req_headers: &HeaderMap) -> eyre::Result eyre::Result { - let accept = Accept::from_str( - req_headers.get(ACCEPT).and_then(|value| value.to_str().ok()).unwrap_or(APPLICATION_JSON), - ) - .map_err(|e| eyre::eyre!("invalid accept header: {e}"))?; - - if accept.media_types().count() == 0 { - // No valid media types found, default to JSON - return Ok(EncodingType::Json); - } - - // Get the SSZ and JSON media types if present - let mut ssz_type = false; - let mut json_type = false; +pub fn get_accept_types(req_headers: &HeaderMap) -> eyre::Result> { + let mut accepted_types = HashSet::new(); let mut unsupported_type = false; - accept.media_types().for_each(|mt| match mt.essence().to_string().as_str() { - APPLICATION_OCTET_STREAM => ssz_type = true, - APPLICATION_JSON | WILDCARD => json_type = true, - _ => unsupported_type = true, - }); - - // If SSZ is present, prioritize it - if ssz_type { - return Ok(EncodingType::Ssz); + for header in req_headers.get_all(ACCEPT).iter() { + let accept = Accept::from_str(header.to_str()?) + .map_err(|e| eyre::eyre!("invalid accept header: {e}"))?; + for mt in accept.media_types() { + match mt.essence().to_string().as_str() { + APPLICATION_OCTET_STREAM => { + accepted_types.insert(EncodingType::Ssz); + } + APPLICATION_JSON | WILDCARD => { + accepted_types.insert(EncodingType::Json); + } + _ => unsupported_type = true, + }; + } } - // If there aren't any unsupported types, use JSON - if !unsupported_type { - return Ok(EncodingType::Json); + + if accepted_types.is_empty() { + if unsupported_type { + return Err(eyre::eyre!("unsupported accept type")); + } + + // No accept header so just return JSON + accepted_types.insert(EncodingType::Json); } - Err(eyre::eyre!("unsupported accept type")) + Ok(accepted_types) } /// Parse CONTENT TYPE header to get the encoding type of the body, defaulting @@ -490,7 +489,7 @@ pub fn get_consensus_version_header(req_headers: &HeaderMap) -> Option /// Enum for types that can be used to encode incoming request bodies or /// outgoing response bodies -#[derive(Debug, Clone, Copy, PartialEq)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum EncodingType { /// Body is UTF-8 encoded as JSON Json, @@ -636,8 +635,18 @@ pub fn bls_pubkey_from_hex_unchecked(hex: &str) -> BlsPublicKey { #[cfg(test)] mod test { + use axum::http::{HeaderMap, HeaderValue}; + use reqwest::header::ACCEPT; + use super::{create_jwt, decode_jwt, validate_jwt}; - use crate::types::{Jwt, ModuleId}; + use crate::{ + types::{Jwt, ModuleId}, + utils::{ + APPLICATION_JSON, APPLICATION_OCTET_STREAM, EncodingType, WILDCARD, get_accept_types, + }, + }; + + const APPLICATION_TEXT: &str = "application/text"; #[test] fn test_jwt_validation() { @@ -660,4 +669,100 @@ mod test { assert!(response.is_err()); assert_eq!(response.unwrap_err().to_string(), "InvalidSignature"); } + + /// Make sure a missing Accept header is interpreted as JSON + #[test] + fn test_missing_accept_header() { + let headers = HeaderMap::new(); + let result = get_accept_types(&headers).unwrap(); + assert_eq!(result.len(), 1); + assert!(result.contains(&EncodingType::Json)); + } + + /// Test accepting JSON + #[test] + fn test_accept_header_json() { + let mut headers = HeaderMap::new(); + headers.append(ACCEPT, HeaderValue::from_str(APPLICATION_JSON).unwrap()); + let result = get_accept_types(&headers).unwrap(); + assert_eq!(result.len(), 1); + assert!(result.contains(&EncodingType::Json)); + } + + /// Test accepting SSZ + #[test] + fn test_accept_header_ssz() { + let mut headers = HeaderMap::new(); + headers.append(ACCEPT, HeaderValue::from_str(APPLICATION_OCTET_STREAM).unwrap()); + let result = get_accept_types(&headers).unwrap(); + assert_eq!(result.len(), 1); + assert!(result.contains(&EncodingType::Ssz)); + } + + /// Test accepting wildcards + #[test] + fn test_accept_header_wildcard() { + let mut headers = HeaderMap::new(); + headers.append(ACCEPT, HeaderValue::from_str(WILDCARD).unwrap()); + let result = get_accept_types(&headers).unwrap(); + assert_eq!(result.len(), 1); + assert!(result.contains(&EncodingType::Json)); + } + + /// Test accepting one header with multiple values + #[test] + fn test_accept_header_multiple_values() { + let header_string = format!("{APPLICATION_JSON}, {APPLICATION_OCTET_STREAM}"); + let mut headers = HeaderMap::new(); + headers.append(ACCEPT, HeaderValue::from_str(&header_string).unwrap()); + let result = get_accept_types(&headers).unwrap(); + assert_eq!(result.len(), 2); + assert!(result.contains(&EncodingType::Json)); + assert!(result.contains(&EncodingType::Ssz)); + } + + /// Test accepting multiple headers + #[test] + fn test_multiple_accept_headers() { + let mut headers = HeaderMap::new(); + headers.append(ACCEPT, HeaderValue::from_str(APPLICATION_JSON).unwrap()); + headers.append(ACCEPT, HeaderValue::from_str(APPLICATION_OCTET_STREAM).unwrap()); + let result = get_accept_types(&headers).unwrap(); + assert_eq!(result.len(), 2); + assert!(result.contains(&EncodingType::Json)); + assert!(result.contains(&EncodingType::Ssz)); + } + + /// Test accepting one header with multiple values, including a type that + /// can't be used + #[test] + fn test_accept_header_multiple_values_including_unknown() { + let header_string = + format!("{APPLICATION_JSON}, {APPLICATION_OCTET_STREAM}, {APPLICATION_TEXT}"); + let mut headers = HeaderMap::new(); + headers.append(ACCEPT, HeaderValue::from_str(&header_string).unwrap()); + let result = get_accept_types(&headers).unwrap(); + assert_eq!(result.len(), 2); + assert!(result.contains(&EncodingType::Json)); + assert!(result.contains(&EncodingType::Ssz)); + } + + /// Test rejecting an unknown accept type + #[test] + fn test_invalid_accept_header_type() { + let mut headers = HeaderMap::new(); + headers.append(ACCEPT, HeaderValue::from_str(APPLICATION_TEXT).unwrap()); + let result = get_accept_types(&headers); + assert!(result.is_err()); + } + + /// Test accepting one header with multiple values + #[test] + fn test_accept_header_invalid_parse() { + let header_string = format!("{APPLICATION_JSON}, a?;ef)"); + let mut headers = HeaderMap::new(); + headers.append(ACCEPT, HeaderValue::from_str(&header_string).unwrap()); + let result = get_accept_types(&headers); + assert!(result.is_err()); + } } diff --git a/crates/pbs/src/error.rs b/crates/pbs/src/error.rs index 6c1c5c68..4ebdc18f 100644 --- a/crates/pbs/src/error.rs +++ b/crates/pbs/src/error.rs @@ -7,6 +7,7 @@ pub enum PbsClientError { NoPayload, Internal, DecodeError(String), + RelayError(String), } impl PbsClientError { @@ -16,6 +17,7 @@ impl PbsClientError { PbsClientError::NoPayload => StatusCode::BAD_GATEWAY, PbsClientError::Internal => StatusCode::INTERNAL_SERVER_ERROR, PbsClientError::DecodeError(_) => StatusCode::BAD_REQUEST, + PbsClientError::RelayError(_) => StatusCode::FAILED_DEPENDENCY, } } } @@ -27,6 +29,7 @@ impl IntoResponse for PbsClientError { PbsClientError::NoPayload => "no payload from relays".to_string(), PbsClientError::Internal => "internal server error".to_string(), PbsClientError::DecodeError(e) => format!("error decoding request: {e}"), + PbsClientError::RelayError(e) => format!("error processing relay response: {e}"), }; (self.status_code(), msg).into_response() diff --git a/crates/pbs/src/routes/get_header.rs b/crates/pbs/src/routes/get_header.rs index ca8d2d7c..fcb95c76 100644 --- a/crates/pbs/src/routes/get_header.rs +++ b/crates/pbs/src/routes/get_header.rs @@ -7,7 +7,7 @@ use axum::{ use cb_common::{ pbs::{GetHeaderInfo, GetHeaderParams}, utils::{ - CONSENSUS_VERSION_HEADER, EncodingType, get_accept_type, get_user_agent, ms_into_slot, + CONSENSUS_VERSION_HEADER, EncodingType, get_accept_types, get_user_agent, ms_into_slot, }, }; use reqwest::{StatusCode, header::CONTENT_TYPE}; @@ -35,14 +35,14 @@ pub async fn handle_get_header>( let ua = get_user_agent(&req_headers); let ms_into_slot = ms_into_slot(params.slot, state.config.chain); - let accept_type = get_accept_type(&req_headers).map_err(|e| { + let accept_types = get_accept_types(&req_headers).map_err(|e| { error!(%e, "error parsing accept header"); PbsClientError::DecodeError(format!("error parsing accept header: {e}")) }); - if let Err(e) = accept_type { + if let Err(e) = accept_types { return Ok((StatusCode::BAD_REQUEST, e).into_response()); } - let accept_type = accept_type.unwrap(); + let accept_types = accept_types.unwrap(); info!(ua, ms_into_slot, "new request"); @@ -52,30 +52,48 @@ pub async fn handle_get_header>( info!(value_eth = format_ether(*max_bid.data.message.value()), block_hash =% max_bid.block_hash(), "received header"); BEACON_NODE_STATUS.with_label_values(&["200", GET_HEADER_ENDPOINT_TAG]).inc(); - let response = match accept_type { - EncodingType::Ssz => { - let mut res = max_bid.data.as_ssz_bytes().into_response(); - let Ok(consensus_version_header) = - HeaderValue::from_str(&max_bid.version.to_string()) - else { - info!("sending response as JSON"); - return Ok((StatusCode::OK, axum::Json(max_bid)).into_response()); - }; - let Ok(content_type_header) = - HeaderValue::from_str(&format!("{}", EncodingType::Ssz)) - else { - info!("sending response as JSON"); - return Ok((StatusCode::OK, axum::Json(max_bid)).into_response()); - }; - res.headers_mut() - .insert(CONSENSUS_VERSION_HEADER, consensus_version_header); - res.headers_mut().insert(CONTENT_TYPE, content_type_header); - info!("sending response as SSZ"); - res - } - EncodingType::Json => (StatusCode::OK, axum::Json(max_bid)).into_response(), - }; - Ok(response) + + let accepts_ssz = accept_types.contains(&EncodingType::Ssz); + let accepts_json = accept_types.contains(&EncodingType::Json); + + // Handle SSZ + if accepts_ssz { + let mut res = max_bid.data.as_ssz_bytes().into_response(); + let consensus_version_header = match HeaderValue::from_str( + &max_bid.version.to_string(), + ) { + Ok(consensus_version_header) => Ok(consensus_version_header), + Err(e) => { + if accepts_json { + info!("sending response as JSON"); + return Ok((StatusCode::OK, axum::Json(max_bid)).into_response()); + } else { + return Err(PbsClientError::RelayError(format!( + "error decoding consensus version from relay payload: {e}" + ))); + } + } + }?; + + // This won't actually fail since the string is a const + let content_type_header = + HeaderValue::from_str(&EncodingType::Ssz.to_string()).unwrap(); + + res.headers_mut().insert(CONSENSUS_VERSION_HEADER, consensus_version_header); + res.headers_mut().insert(CONTENT_TYPE, content_type_header); + info!("sending response as SSZ"); + return Ok(res); + } + + // Handle JSON + if accepts_json { + Ok((StatusCode::OK, axum::Json(max_bid)).into_response()) + } else { + // This shouldn't ever happen but the compiler needs it + Err(PbsClientError::DecodeError( + "no viable accept types in request".to_string(), + )) + } } else { // spec: return 204 if request is valid but no bid available info!("no header available for slot"); diff --git a/crates/pbs/src/routes/submit_block.rs b/crates/pbs/src/routes/submit_block.rs index 1134b462..53b13811 100644 --- a/crates/pbs/src/routes/submit_block.rs +++ b/crates/pbs/src/routes/submit_block.rs @@ -1,7 +1,6 @@ use std::sync::Arc; use axum::{ - Json, extract::State, http::{HeaderMap, HeaderValue}, response::IntoResponse, @@ -9,8 +8,8 @@ use axum::{ use cb_common::{ pbs::{BuilderApiVersion, GetPayloadInfo}, utils::{ - CONSENSUS_VERSION_HEADER, EncodingType, RawRequest, deserialize_body, get_accept_type, - get_user_agent, timestamp_of_slot_start_millis, utcnow_ms + CONSENSUS_VERSION_HEADER, EncodingType, RawRequest, deserialize_body, get_accept_types, + get_user_agent, timestamp_of_slot_start_millis, utcnow_ms, }, }; use reqwest::{StatusCode, header::CONTENT_TYPE}; @@ -47,8 +46,8 @@ async fn handle_submit_block_impl>( raw_request: RawRequest, api_version: BuilderApiVersion, ) -> Result { - let signed_blinded_block = Arc::new( - deserialize_body(&req_headers, raw_request.body_bytes).await.map_err(|e| { + let signed_blinded_block = + Arc::new(deserialize_body(&req_headers, raw_request.body_bytes).await.map_err(|e| { error!(%e, "failed to deserialize signed blinded block"); PbsClientError::DecodeError(format!("failed to deserialize body: {e}")) })?); @@ -66,14 +65,14 @@ async fn handle_submit_block_impl>( let block_hash = signed_blinded_block.block_hash(); let slot_start_ms = timestamp_of_slot_start_millis(slot.into(), state.config.chain); let ua = get_user_agent(&req_headers); - let response_type = get_accept_type(&req_headers).map_err(|e| { + let response_types = get_accept_types(&req_headers).map_err(|e| { error!(%e, "error parsing accept header"); PbsClientError::DecodeError(format!("error parsing accept header: {e}")) }); - if let Err(e) = response_type { - return Ok((StatusCode::BAD_REQUEST, e.into_response())); + if let Err(e) = response_types { + return Ok((StatusCode::BAD_REQUEST, e).into_response()); } - let response_type = response_type.unwrap(); + let response_types = response_types.unwrap(); info!(ua, ms_into_slot = now.saturating_sub(slot_start_ms), "new request"); @@ -86,41 +85,50 @@ async fn handle_submit_block_impl>( BEACON_NODE_STATUS .with_label_values(&["200", SUBMIT_BLINDED_BLOCK_ENDPOINT_TAG]) .inc(); - let response = match response_type { - EncodingType::Json => { - info!("sending response as JSON"); - Json(payload_and_blobs).into_response() - } - EncodingType::Ssz => { - let mut response = payload_and_blobs.data.as_ssz_bytes().into_response(); - let Ok(consensus_version_header) = - HeaderValue::from_str(&payload_and_blobs.version.to_string()) - else { - info!("sending response as JSON"); - return Ok(( - StatusCode::OK, - axum::Json(payload_and_blobs).into_response(), - )); - }; - let Ok(content_type_header) = - HeaderValue::from_str(&EncodingType::Ssz.to_string()) - else { - info!("sending response as JSON"); - return Ok(( - StatusCode::OK, - axum::Json(payload_and_blobs).into_response(), - )); - }; - response - .headers_mut() - .insert(CONSENSUS_VERSION_HEADER, consensus_version_header); - response.headers_mut().insert(CONTENT_TYPE, content_type_header); - info!("sending response as SSZ"); - response - } - }; - - Ok((StatusCode::OK, response)) + + let accepts_ssz = response_types.contains(&EncodingType::Ssz); + let accepts_json = response_types.contains(&EncodingType::Json); + + // Try SSZ + if accepts_ssz { + let mut response = payload_and_blobs.data.as_ssz_bytes().into_response(); + let consensus_version_header = + match HeaderValue::from_str(&payload_and_blobs.version.to_string()) { + Ok(consensus_version_header) => Ok(consensus_version_header), + Err(e) => { + if accepts_json { + info!("sending response as JSON"); + return Ok((StatusCode::OK, axum::Json(payload_and_blobs)) + .into_response()); + } else { + return Err(PbsClientError::RelayError(format!( + "error decoding consensus version from relay payload: {e}" + ))); + } + } + }?; + + // This won't actually fail since the string is a const + let content_type_header = + HeaderValue::from_str(&EncodingType::Ssz.to_string()).unwrap(); + + response + .headers_mut() + .insert(CONSENSUS_VERSION_HEADER, consensus_version_header); + response.headers_mut().insert(CONTENT_TYPE, content_type_header); + info!("sending response as SSZ"); + return Ok(response); + } + + // Handle JSON + if accepts_json { + Ok((StatusCode::OK, axum::Json(payload_and_blobs)).into_response()) + } else { + // This shouldn't ever happen but the compiler needs it + Err(PbsClientError::DecodeError( + "no viable accept types in request".to_string(), + )) + } } None => { info!("received unblinded block (v2)"); @@ -130,7 +138,7 @@ async fn handle_submit_block_impl>( BEACON_NODE_STATUS .with_label_values(&["202", SUBMIT_BLINDED_BLOCK_ENDPOINT_TAG]) .inc(); - Ok((StatusCode::ACCEPTED, "".into_response())) + Ok((StatusCode::ACCEPTED, "").into_response()) } }, diff --git a/tests/src/mock_relay.rs b/tests/src/mock_relay.rs index 5611a1eb..c0a65a01 100644 --- a/tests/src/mock_relay.rs +++ b/tests/src/mock_relay.rs @@ -26,7 +26,7 @@ use cb_common::{ types::{BlsSecretKey, Chain}, utils::{ CONSENSUS_VERSION_HEADER, EncodingType, RawRequest, TestRandomSeed, deserialize_body, - get_accept_type, get_consensus_version_header, timestamp_of_slot_start_sec, + get_accept_types, get_consensus_version_header, timestamp_of_slot_start_sec, }, }; use cb_pbs::MAX_SIZE_SUBMIT_BLOCK_RESPONSE; @@ -118,16 +118,16 @@ async fn handle_get_header( headers: HeaderMap, ) -> Response { state.received_get_header.fetch_add(1, Ordering::Relaxed); - let accept_type = get_accept_type(&headers) + let accept_types = get_accept_types(&headers) .map_err(|e| (StatusCode::BAD_REQUEST, format!("error parsing accept header: {e}"))); - if let Err(e) = accept_type { + if let Err(e) = accept_types { return e.into_response(); } - let accept_header = accept_type.unwrap(); + let accept_types = accept_types.unwrap(); let consensus_version_header = get_consensus_version_header(&headers).unwrap_or(ForkName::Electra); - let data = match consensus_version_header { + let (data, accept_type) = match consensus_version_header { // Add Fusaka and other forks here when necessary ForkName::Electra => { let mut header = ExecutionPayloadHeaderElectra { @@ -150,16 +150,16 @@ async fn handle_get_header( let object_root = message.tree_hash_root(); let signature = sign_builder_root(state.chain, &state.signer, object_root); let response = SignedBuilderBid { message, signature }; - match accept_header { - EncodingType::Json => { - let versioned_response = GetHeaderResponse { - version: ForkName::Electra, - data: response, - metadata: Default::default(), - }; - serde_json::to_vec(&versioned_response).unwrap() - } - EncodingType::Ssz => response.as_ssz_bytes(), + if accept_types.contains(&EncodingType::Ssz) { + (response.as_ssz_bytes(), EncodingType::Ssz) + } else { + // Return JSON for everything else; this is fine for the mock + let versioned_response = GetHeaderResponse { + version: ForkName::Electra, + data: response, + metadata: Default::default(), + }; + (serde_json::to_vec(&versioned_response).unwrap(), EncodingType::Json) } } _ => { @@ -174,7 +174,7 @@ async fn handle_get_header( let mut response = (StatusCode::OK, data).into_response(); let consensus_version_header = HeaderValue::from_str(&consensus_version_header.to_string()).unwrap(); - let content_type_header = HeaderValue::from_str(&accept_header.to_string()).unwrap(); + let content_type_header = HeaderValue::from_str(&accept_type.to_string()).unwrap(); response.headers_mut().insert(CONSENSUS_VERSION_HEADER, consensus_version_header); response.headers_mut().insert(CONTENT_TYPE, content_type_header); response @@ -205,18 +205,21 @@ async fn handle_submit_block_v1( raw_request: RawRequest, ) -> Response { state.received_submit_block.fetch_add(1, Ordering::Relaxed); - let accept_header = get_accept_type(&headers); - if let Err(e) = accept_header { + let accept_types = get_accept_types(&headers); + if let Err(e) = accept_types { error!(%e, "error parsing accept header"); return (StatusCode::BAD_REQUEST, format!("error parsing accept header: {e}")) .into_response(); } - let accept_header = accept_header.unwrap(); + let accept_types = accept_types.unwrap(); let consensus_version_header = get_consensus_version_header(&headers).unwrap_or(ForkName::Electra); - let data = if state.large_body() { - vec![1u8; 1 + MAX_SIZE_SUBMIT_BLOCK_RESPONSE] + let (data, accept_type) = if state.large_body() { + ( + vec![1u8; 1 + MAX_SIZE_SUBMIT_BLOCK_RESPONSE], + accept_types.iter().next().unwrap_or(&EncodingType::Json), + ) } else { let mut execution_payload = ExecutionPayloadElectra::test_random(); let submit_block = deserialize_body(&headers, raw_request.body_bytes).await.map_err(|e| { @@ -239,19 +242,10 @@ async fn handle_submit_block_v1( let response = PayloadAndBlobs { execution_payload: execution_payload.into(), blobs_bundle }; - match accept_header { - EncodingType::Json => { - // Response is versioned for JSON - let response = SubmitBlindedBlockResponse { - version: ForkName::Electra, - metadata: Default::default(), - data: response, - }; - serde_json::to_vec(&response).unwrap() - } - EncodingType::Ssz => match consensus_version_header { + if accept_types.contains(&EncodingType::Ssz) { + match consensus_version_header { // Response isn't versioned for SSZ - ForkName::Electra => response.as_ssz_bytes(), + ForkName::Electra => (response.as_ssz_bytes(), &EncodingType::Ssz), _ => { return ( StatusCode::BAD_REQUEST, @@ -259,14 +253,22 @@ async fn handle_submit_block_v1( ) .into_response(); } - }, + } + } else { + // Response is versioned for JSON + let response = SubmitBlindedBlockResponse { + version: ForkName::Electra, + metadata: Default::default(), + data: response, + }; + (serde_json::to_vec(&response).unwrap(), &EncodingType::Json) } }; let mut response = (StatusCode::OK, data).into_response(); let consensus_version_header = HeaderValue::from_str(&consensus_version_header.to_string()).unwrap(); - let content_type_header = HeaderValue::from_str(&accept_header.to_string()).unwrap(); + let content_type_header = HeaderValue::from_str(&accept_type.to_string()).unwrap(); response.headers_mut().insert(CONSENSUS_VERSION_HEADER, consensus_version_header); response.headers_mut().insert(CONTENT_TYPE, content_type_header); response From 29398f6b130d2f3a59596fafa4c28299d356704a Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Wed, 5 Nov 2025 14:24:45 -0500 Subject: [PATCH 03/26] Update to v0.9.2 (#400) Co-authored-by: ltitanb <163874448+ltitanb@users.noreply.github.com> --- Cargo.lock | 20 ++++++++++---------- Cargo.toml | 2 +- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5a943fe5..b7d77991 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1575,7 +1575,7 @@ dependencies = [ [[package]] name = "cb-bench-pbs" -version = "0.9.1" +version = "0.9.2" dependencies = [ "alloy", "cb-common", @@ -1592,7 +1592,7 @@ dependencies = [ [[package]] name = "cb-cli" -version = "0.9.1" +version = "0.9.2" dependencies = [ "cb-common", "clap", @@ -1604,7 +1604,7 @@ dependencies = [ [[package]] name = "cb-common" -version = "0.9.1" +version = "0.9.2" dependencies = [ "aes 0.8.4", "alloy", @@ -1650,7 +1650,7 @@ dependencies = [ [[package]] name = "cb-metrics" -version = "0.9.1" +version = "0.9.2" dependencies = [ "axum 0.8.4", "cb-common", @@ -1663,7 +1663,7 @@ dependencies = [ [[package]] name = "cb-pbs" -version = "0.9.1" +version = "0.9.2" dependencies = [ "alloy", "async-trait", @@ -1689,7 +1689,7 @@ dependencies = [ [[package]] name = "cb-signer" -version = "0.9.1" +version = "0.9.2" dependencies = [ "alloy", "axum 0.8.4", @@ -1718,7 +1718,7 @@ dependencies = [ [[package]] name = "cb-tests" -version = "0.9.1" +version = "0.9.2" dependencies = [ "alloy", "axum 0.8.4", @@ -1877,7 +1877,7 @@ dependencies = [ [[package]] name = "commit-boost" -version = "0.9.1" +version = "0.9.2" dependencies = [ "cb-cli", "cb-common", @@ -2165,7 +2165,7 @@ dependencies = [ [[package]] name = "da_commit" -version = "0.9.1" +version = "0.9.2" dependencies = [ "alloy", "color-eyre", @@ -6076,7 +6076,7 @@ checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "status_api" -version = "0.9.1" +version = "0.9.2" dependencies = [ "async-trait", "axum 0.8.4", diff --git a/Cargo.toml b/Cargo.toml index e429fc86..23679360 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,7 +5,7 @@ resolver = "2" [workspace.package] edition = "2024" rust-version = "1.89" -version = "0.9.1" +version = "0.9.2" [workspace.dependencies] aes = "0.8" From 7ab1f7ebb0477c0e59a6b0410aa9b5602bd09379 Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Thu, 6 Nov 2025 16:45:49 -0500 Subject: [PATCH 04/26] get_header()'s impl now works with multiple types --- Cargo.lock | 1 + crates/common/src/pbs/error.rs | 3 + crates/common/src/pbs/mod.rs | 1 + crates/pbs/Cargo.toml | 1 + crates/pbs/src/mev_boost/get_header.rs | 82 ++++++++++++++++++++++---- tests/src/mock_relay.rs | 4 +- 6 files changed, 77 insertions(+), 15 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6dd9c8ae..76063517 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1676,6 +1676,7 @@ dependencies = [ "ethereum_ssz", "eyre", "futures", + "headers", "lazy_static", "parking_lot", "prometheus", diff --git a/crates/common/src/pbs/error.rs b/crates/common/src/pbs/error.rs index 77d942cd..58066c4f 100644 --- a/crates/common/src/pbs/error.rs +++ b/crates/common/src/pbs/error.rs @@ -14,6 +14,9 @@ pub enum PbsError { #[error("json decode error: {err:?}, raw: {raw}")] JsonDecode { err: serde_json::Error, raw: String }, + #[error("error with request: {0}")] + GeneralRequest(String), + #[error("{0}")] ReadResponse(#[from] ResponseReadError), diff --git a/crates/common/src/pbs/mod.rs b/crates/common/src/pbs/mod.rs index af2c07b4..a1152b58 100644 --- a/crates/common/src/pbs/mod.rs +++ b/crates/common/src/pbs/mod.rs @@ -6,5 +6,6 @@ mod types; pub use builder::*; pub use constants::*; +pub use lh_types::ForkVersionDecode; pub use relay::*; pub use types::*; diff --git a/crates/pbs/Cargo.toml b/crates/pbs/Cargo.toml index 1c5c2f1f..b0c1585e 100644 --- a/crates/pbs/Cargo.toml +++ b/crates/pbs/Cargo.toml @@ -15,6 +15,7 @@ cb-metrics.workspace = true ethereum_ssz.workspace = true eyre.workspace = true futures.workspace = true +headers.workspace = true lazy_static.workspace = true parking_lot.workspace = true prometheus.workspace = true diff --git a/crates/pbs/src/mev_boost/get_header.rs b/crates/pbs/src/mev_boost/get_header.rs index 86743703..ebd454d1 100644 --- a/crates/pbs/src/mev_boost/get_header.rs +++ b/crates/pbs/src/mev_boost/get_header.rs @@ -12,20 +12,24 @@ use axum::http::{HeaderMap, HeaderValue}; use cb_common::{ constants::APPLICATION_BUILDER_DOMAIN, pbs::{ - EMPTY_TX_ROOT_HASH, ExecutionPayloadHeaderRef, GetHeaderInfo, GetHeaderParams, - GetHeaderResponse, HEADER_START_TIME_UNIX_MS, HEADER_TIMEOUT_MS, RelayClient, + EMPTY_TX_ROOT_HASH, ExecutionPayloadHeaderRef, ForkVersionDecode, GetHeaderInfo, + GetHeaderParams, GetHeaderResponse, HEADER_START_TIME_UNIX_MS, HEADER_TIMEOUT_MS, + RelayClient, SignedBuilderBid, error::{PbsError, ValidationError}, }, signature::verify_signed_message, types::{BlsPublicKey, BlsPublicKeyBytes, BlsSignature, Chain}, utils::{ - get_user_agent_with_version, ms_into_slot, read_chunked_body_with_max, - timestamp_of_slot_start_sec, utcnow_ms, + EncodingType, get_accept_types, get_consensus_version_header, get_user_agent_with_version, + ms_into_slot, read_chunked_body_with_max, timestamp_of_slot_start_sec, utcnow_ms, }, }; use futures::future::join_all; use parking_lot::RwLock; -use reqwest::{StatusCode, header::USER_AGENT}; +use reqwest::{ + StatusCode, + header::{CONTENT_TYPE, USER_AGENT}, +}; use tokio::time::sleep; use tracing::{Instrument, debug, error, warn}; use tree_hash::TreeHash; @@ -318,6 +322,13 @@ async fn send_one_get_header( // minimize timing games without losing the bid req_config.headers.insert(HEADER_TIMEOUT_MS, HeaderValue::from(req_config.timeout_ms)); + // Check which types this request is for + let accept_types = get_accept_types(&req_config.headers).map_err(|e| { + PbsError::GeneralRequest(format!("error reading accept types: {e}").to_string()) + })?; + let accepts_ssz = accept_types.contains(&EncodingType::Ssz); + let accepts_json = accept_types.contains(&EncodingType::Json); + let start_request = Instant::now(); let res = match relay .client @@ -336,12 +347,37 @@ async fn send_one_get_header( } }; + // Make sure the response type is acceptable + let code = res.status(); + let content_type = match res.headers().get(CONTENT_TYPE) { + Some(header) => { + let header_str = header.to_str().map_err(|e| PbsError::RelayResponse { + error_msg: format!("cannot decode content-type header: {e}").to_string(), + code: (code.as_u16()), + })?; + if header_str.eq_ignore_ascii_case(&EncodingType::Ssz.to_string()) && accepts_ssz { + EncodingType::Ssz + } else if header_str.eq_ignore_ascii_case(&EncodingType::Json.to_string()) && + accepts_json + { + EncodingType::Json + } else { + return Err(PbsError::GeneralRequest(format!( + "relay returned unsupported content type: {header_str}" + ))); + } + } + None => EncodingType::Json, // Default to JSON if no content type is provided + }; + + // Get the consensus fork version if provided (to avoid cloning later) + let fork = get_consensus_version_header(res.headers()); + let request_latency = start_request.elapsed(); RELAY_LATENCY .with_label_values(&[GET_HEADER_ENDPOINT_TAG, &relay.id]) .observe(request_latency.as_secs_f64()); - let code = res.status(); RELAY_STATUS_CODE.with_label_values(&[code.as_str(), GET_HEADER_ENDPOINT_TAG, &relay.id]).inc(); let response_bytes = read_chunked_body_with_max(res, MAX_SIZE_GET_HEADER_RESPONSE).await?; @@ -363,14 +399,33 @@ async fn send_one_get_header( return Ok((start_request_time, None)); } - let get_header_response = match serde_json::from_slice::(&response_bytes) { - Ok(parsed) => parsed, - Err(err) => { - return Err(PbsError::JsonDecode { - err, - raw: String::from_utf8_lossy(&response_bytes).into_owned(), - }); + // Regenerate the header from the response + let get_header_response = match content_type { + EncodingType::Ssz => { + // Get the consensus fork version - this is required according to the spec + let fork = fork.ok_or(PbsError::RelayResponse { + error_msg: "relay did not provide consensus version header for ssz payload" + .to_string(), + code: code.as_u16(), + })?; + let data = + SignedBuilderBid::from_ssz_bytes_by_fork(&response_bytes, fork).map_err(|e| { + PbsError::RelayResponse { + error_msg: (format!("error decoding relay payload: {:?}", e)).to_string(), + code: (code.as_u16()), + } + })?; + GetHeaderResponse { version: fork, data, metadata: Default::default() } } + EncodingType::Json => match serde_json::from_slice::(&response_bytes) { + Ok(parsed) => parsed, + Err(err) => { + return Err(PbsError::JsonDecode { + err, + raw: String::from_utf8_lossy(&response_bytes).into_owned(), + }); + } + }, }; debug!( @@ -380,6 +435,7 @@ async fn send_one_get_header( version =? get_header_response.version, value_eth = format_ether(*get_header_response.value()), block_hash = %get_header_response.block_hash(), + content_type = %content_type, "received new header" ); diff --git a/tests/src/mock_relay.rs b/tests/src/mock_relay.rs index c0a65a01..9ad427c9 100644 --- a/tests/src/mock_relay.rs +++ b/tests/src/mock_relay.rs @@ -127,7 +127,7 @@ async fn handle_get_header( let consensus_version_header = get_consensus_version_header(&headers).unwrap_or(ForkName::Electra); - let (data, accept_type) = match consensus_version_header { + let (data, content_type) = match consensus_version_header { // Add Fusaka and other forks here when necessary ForkName::Electra => { let mut header = ExecutionPayloadHeaderElectra { @@ -174,7 +174,7 @@ async fn handle_get_header( let mut response = (StatusCode::OK, data).into_response(); let consensus_version_header = HeaderValue::from_str(&consensus_version_header.to_string()).unwrap(); - let content_type_header = HeaderValue::from_str(&accept_type.to_string()).unwrap(); + let content_type_header = HeaderValue::from_str(&content_type.to_string()).unwrap(); response.headers_mut().insert(CONSENSUS_VERSION_HEADER, consensus_version_header); response.headers_mut().insert(CONTENT_TYPE, content_type_header); response From bfbcfe4f1c6d74835c5c4e5d05faadbce0064247 Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Fri, 7 Nov 2025 11:17:39 -0500 Subject: [PATCH 05/26] Added retry-different-accept-types thing to get_header --- crates/pbs/src/mev_boost/get_header.rs | 93 +++++++++++++++++++++----- 1 file changed, 75 insertions(+), 18 deletions(-) diff --git a/crates/pbs/src/mev_boost/get_header.rs b/crates/pbs/src/mev_boost/get_header.rs index ebd454d1..592c660c 100644 --- a/crates/pbs/src/mev_boost/get_header.rs +++ b/crates/pbs/src/mev_boost/get_header.rs @@ -27,8 +27,8 @@ use cb_common::{ use futures::future::join_all; use parking_lot::RwLock; use reqwest::{ - StatusCode, - header::{CONTENT_TYPE, USER_AGENT}, + Response, StatusCode, + header::{ACCEPT, CONTENT_TYPE, USER_AGENT}, }; use tokio::time::sleep; use tracing::{Instrument, debug, error, warn}; @@ -305,13 +305,12 @@ struct ValidationContext { parent_block: Arc>>, } -async fn send_one_get_header( - params: GetHeaderParams, - relay: RelayClient, - chain: Chain, +async fn send_get_header_impl( + relay: &RelayClient, mut req_config: RequestContext, - validation: ValidationContext, -) -> Result<(u64, Option), PbsError> { + accepts_ssz: bool, + accepts_json: bool, +) -> Result<(Response, u64, EncodingType), PbsError> { // the timestamp in the header is the consensus block time which is fixed, // use the beginning of the request as proxy to make sure we use only the // last one received @@ -322,14 +321,6 @@ async fn send_one_get_header( // minimize timing games without losing the bid req_config.headers.insert(HEADER_TIMEOUT_MS, HeaderValue::from(req_config.timeout_ms)); - // Check which types this request is for - let accept_types = get_accept_types(&req_config.headers).map_err(|e| { - PbsError::GeneralRequest(format!("error reading accept types: {e}").to_string()) - })?; - let accepts_ssz = accept_types.contains(&EncodingType::Ssz); - let accepts_json = accept_types.contains(&EncodingType::Json); - - let start_request = Instant::now(); let res = match relay .client .get(req_config.url) @@ -348,12 +339,11 @@ async fn send_one_get_header( }; // Make sure the response type is acceptable - let code = res.status(); let content_type = match res.headers().get(CONTENT_TYPE) { Some(header) => { let header_str = header.to_str().map_err(|e| PbsError::RelayResponse { error_msg: format!("cannot decode content-type header: {e}").to_string(), - code: (code.as_u16()), + code: (res.status().as_u16()), })?; if header_str.eq_ignore_ascii_case(&EncodingType::Ssz.to_string()) && accepts_ssz { EncodingType::Ssz @@ -370,6 +360,73 @@ async fn send_one_get_header( None => EncodingType::Json, // Default to JSON if no content type is provided }; + Ok((res, start_request_time, content_type)) +} + +async fn send_one_get_header( + params: GetHeaderParams, + relay: RelayClient, + chain: Chain, + req_config: RequestContext, + validation: ValidationContext, +) -> Result<(u64, Option), PbsError> { + let mut original_headers = req_config.headers.clone(); + + // Check which types this request is for + let accept_types = get_accept_types(&req_config.headers).map_err(|e| { + PbsError::GeneralRequest(format!("error reading accept types: {e}").to_string()) + })?; + let accepts_ssz = accept_types.contains(&EncodingType::Ssz); + let accepts_json = accept_types.contains(&EncodingType::Json); + + // Send the header request + let mut start_request = Instant::now(); + let config = RequestContext { + url: req_config.url.clone(), + timeout_ms: req_config.timeout_ms, + headers: req_config.headers, + }; + let (mut res, mut start_request_time, mut content_type) = + send_get_header_impl(&relay, config, accepts_ssz, accepts_json).await?; + let mut code = res.status(); + + // If the request only supports SSZ, but the relay only supports JSON, resubmit + // to the relay with JSON - we'll convert it ourselves + if code == StatusCode::NOT_ACCEPTABLE && accepts_ssz && !accepts_json { + debug!( + relay_id = relay.id.as_ref(), + "relay does not support SSZ, resubmitting request with JSON accept header" + ); + + // Make sure there's enough time left to resubmit + let elapsed = start_request.elapsed().as_millis() as u64; + if elapsed >= req_config.timeout_ms { + RELAY_STATUS_CODE + .with_label_values(&[TIMEOUT_ERROR_CODE_STR, GET_HEADER_ENDPOINT_TAG, &relay.id]) + .inc(); + return Err(PbsError::RelayResponse { + error_msg: "not enough time left to resubmit request with JSON accept header" + .to_string(), + code: TIMEOUT_ERROR_CODE, + }); + } + + // Resubmit the request with JSON accept header + // Also resets the start request timer + original_headers.remove(ACCEPT); + original_headers + .insert(ACCEPT, HeaderValue::from_str(&EncodingType::Json.to_string()).unwrap()); + let config = RequestContext { + url: req_config.url.clone(), + timeout_ms: req_config.timeout_ms - elapsed, + headers: original_headers, + }; + start_request = Instant::now(); + (res, start_request_time, content_type) = + send_get_header_impl(&relay, config, false, true).await?; + code = res.status(); + } + // Get the consensus fork version if provided (to avoid cloning later) let fork = get_consensus_version_header(res.headers()); From adfec629f35d4da63419e1a6711f3c9d02576908 Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Mon, 10 Nov 2025 10:59:38 -0500 Subject: [PATCH 06/26] Refactored and added some unit tests --- crates/pbs/src/mev_boost/get_header.rs | 107 +++++++++-------- tests/src/mock_relay.rs | 26 +++- tests/src/mock_validator.rs | 24 +++- tests/tests/pbs_get_header.rs | 160 ++++++++++++++++--------- tests/tests/pbs_mux.rs | 10 +- tests/tests/pbs_mux_refresh.rs | 19 +-- 6 files changed, 218 insertions(+), 128 deletions(-) diff --git a/crates/pbs/src/mev_boost/get_header.rs b/crates/pbs/src/mev_boost/get_header.rs index 592c660c..fd0e13c9 100644 --- a/crates/pbs/src/mev_boost/get_header.rs +++ b/crates/pbs/src/mev_boost/get_header.rs @@ -101,6 +101,11 @@ pub async fn get_header( let mut send_headers = HeaderMap::new(); send_headers.insert(USER_AGENT, get_user_agent_with_version(&req_headers)?); + // Get the accept types from the request and forward them + for value in req_headers.get_all(ACCEPT).iter() { + send_headers.append(ACCEPT, value.clone()); + } + let mut handles = Vec::with_capacity(relays.len()); for relay in relays.iter() { handles.push( @@ -308,9 +313,7 @@ struct ValidationContext { async fn send_get_header_impl( relay: &RelayClient, mut req_config: RequestContext, - accepts_ssz: bool, - accepts_json: bool, -) -> Result<(Response, u64, EncodingType), PbsError> { +) -> Result<(Response, u64, Option), PbsError> { // the timestamp in the header is the consensus block time which is fixed, // use the beginning of the request as proxy to make sure we use only the // last one received @@ -338,28 +341,22 @@ async fn send_get_header_impl( } }; - // Make sure the response type is acceptable - let content_type = match res.headers().get(CONTENT_TYPE) { - Some(header) => { - let header_str = header.to_str().map_err(|e| PbsError::RelayResponse { - error_msg: format!("cannot decode content-type header: {e}").to_string(), - code: (res.status().as_u16()), - })?; - if header_str.eq_ignore_ascii_case(&EncodingType::Ssz.to_string()) && accepts_ssz { - EncodingType::Ssz - } else if header_str.eq_ignore_ascii_case(&EncodingType::Json.to_string()) && - accepts_json - { - EncodingType::Json - } else { - return Err(PbsError::GeneralRequest(format!( - "relay returned unsupported content type: {header_str}" - ))); - } + // Get the content type; this is only really useful for OK responses, and + // doesn't handle encoding types besides SSZ and JSON + let mut content_type: Option = None; + if res.status() == StatusCode::OK && + let Some(header) = res.headers().get(CONTENT_TYPE) + { + let header_str = header.to_str().map_err(|e| PbsError::RelayResponse { + error_msg: format!("cannot decode content-type header: {e}").to_string(), + code: (res.status().as_u16()), + })?; + if header_str.eq_ignore_ascii_case(&EncodingType::Ssz.to_string()) { + content_type = Some(EncodingType::Ssz) + } else if header_str.eq_ignore_ascii_case(&EncodingType::Json.to_string()) { + content_type = Some(EncodingType::Json) } - None => EncodingType::Json, // Default to JSON if no content type is provided - }; - + } Ok((res, start_request_time, content_type)) } @@ -387,7 +384,7 @@ async fn send_one_get_header( headers: req_config.headers, }; let (mut res, mut start_request_time, mut content_type) = - send_get_header_impl(&relay, config, accepts_ssz, accepts_json).await?; + send_get_header_impl(&relay, config).await?; let mut code = res.status(); // If the request only supports SSZ, but the relay only supports JSON, resubmit @@ -422,13 +419,13 @@ async fn send_one_get_header( headers: original_headers, }; start_request = Instant::now(); - (res, start_request_time, content_type) = - send_get_header_impl(&relay, config, false, true).await?; + (res, start_request_time, content_type) = send_get_header_impl(&relay, config).await?; code = res.status(); } // Get the consensus fork version if provided (to avoid cloning later) let fork = get_consensus_version_header(res.headers()); + let content_type_header = res.headers().get(CONTENT_TYPE).cloned(); let request_latency = start_request.elapsed(); RELAY_LATENCY @@ -457,33 +454,41 @@ async fn send_one_get_header( } // Regenerate the header from the response - let get_header_response = match content_type { - EncodingType::Ssz => { - // Get the consensus fork version - this is required according to the spec - let fork = fork.ok_or(PbsError::RelayResponse { - error_msg: "relay did not provide consensus version header for ssz payload" - .to_string(), - code: code.as_u16(), - })?; - let data = - SignedBuilderBid::from_ssz_bytes_by_fork(&response_bytes, fork).map_err(|e| { - PbsError::RelayResponse { + let get_header_response = + match content_type { + Some(EncodingType::Ssz) => { + // Get the consensus fork version - this is required according to the spec + let fork = fork.ok_or(PbsError::RelayResponse { + error_msg: "relay did not provide consensus version header for ssz payload" + .to_string(), + code: code.as_u16(), + })?; + let data = SignedBuilderBid::from_ssz_bytes_by_fork(&response_bytes, fork) + .map_err(|e| PbsError::RelayResponse { error_msg: (format!("error decoding relay payload: {:?}", e)).to_string(), code: (code.as_u16()), + })?; + GetHeaderResponse { version: fork, data, metadata: Default::default() } + } + Some(EncodingType::Json) => { + match serde_json::from_slice::(&response_bytes) { + Ok(parsed) => parsed, + Err(err) => { + return Err(PbsError::JsonDecode { + err, + raw: String::from_utf8_lossy(&response_bytes).into_owned(), + }); } - })?; - GetHeaderResponse { version: fork, data, metadata: Default::default() } - } - EncodingType::Json => match serde_json::from_slice::(&response_bytes) { - Ok(parsed) => parsed, - Err(err) => { - return Err(PbsError::JsonDecode { - err, - raw: String::from_utf8_lossy(&response_bytes).into_owned(), - }); + } } - }, - }; + None => { + let error_msg = match content_type_header { + None => "relay response missing content type header".to_string(), + Some(ct) => format!("relay response has unsupported content type {ct:?}"), + }; + return Err(PbsError::RelayResponse { error_msg, code: code.as_u16() }); + } + }; debug!( relay_id = relay.id.as_ref(), @@ -492,7 +497,7 @@ async fn send_one_get_header( version =? get_header_response.version, value_eth = format_ether(*get_header_response.value()), block_hash = %get_header_response.block_hash(), - content_type = %content_type, + content_type = ?content_type, "received new header" ); diff --git a/tests/src/mock_relay.rs b/tests/src/mock_relay.rs index 9ad427c9..bcdd771c 100644 --- a/tests/src/mock_relay.rs +++ b/tests/src/mock_relay.rs @@ -1,4 +1,5 @@ use std::{ + collections::HashSet, net::SocketAddr, sync::{ Arc, RwLock, @@ -50,6 +51,7 @@ pub async fn start_mock_relay_service(state: Arc, port: u16) -> pub struct MockRelayState { pub chain: Chain, pub signer: BlsSecretKey, + pub supported_content_types: Arc>, large_body: bool, received_get_header: Arc, received_get_status: Arc, @@ -90,6 +92,9 @@ impl MockRelayState { received_register_validator: Default::default(), received_submit_block: Default::default(), response_override: RwLock::new(None), + supported_content_types: Arc::new( + [EncodingType::Json, EncodingType::Ssz].iter().cloned().collect(), + ), } } @@ -127,7 +132,20 @@ async fn handle_get_header( let consensus_version_header = get_consensus_version_header(&headers).unwrap_or(ForkName::Electra); - let (data, content_type) = match consensus_version_header { + let content_type = if state.supported_content_types.contains(&EncodingType::Ssz) && + accept_types.contains(&EncodingType::Ssz) + { + EncodingType::Ssz + } else if state.supported_content_types.contains(&EncodingType::Json) && + accept_types.contains(&EncodingType::Json) + { + EncodingType::Json + } else { + return (StatusCode::NOT_ACCEPTABLE, "No acceptable content type found".to_string()) + .into_response(); + }; + + let data = match consensus_version_header { // Add Fusaka and other forks here when necessary ForkName::Electra => { let mut header = ExecutionPayloadHeaderElectra { @@ -150,8 +168,8 @@ async fn handle_get_header( let object_root = message.tree_hash_root(); let signature = sign_builder_root(state.chain, &state.signer, object_root); let response = SignedBuilderBid { message, signature }; - if accept_types.contains(&EncodingType::Ssz) { - (response.as_ssz_bytes(), EncodingType::Ssz) + if content_type == EncodingType::Ssz { + response.as_ssz_bytes() } else { // Return JSON for everything else; this is fine for the mock let versioned_response = GetHeaderResponse { @@ -159,7 +177,7 @@ async fn handle_get_header( data: response, metadata: Default::default(), }; - (serde_json::to_vec(&versioned_response).unwrap(), EncodingType::Json) + serde_json::to_vec(&versioned_response).unwrap() } } _ => { diff --git a/tests/src/mock_validator.rs b/tests/src/mock_validator.rs index 80aed0c2..1bea491d 100644 --- a/tests/src/mock_validator.rs +++ b/tests/src/mock_validator.rs @@ -1,3 +1,5 @@ +use std::collections::HashSet; + use alloy::{primitives::B256, rpc::types::beacon::relay::ValidatorRegistration}; use cb_common::{ pbs::{BuilderApiVersion, RelayClient, SignedBlindedBeaconBlock}, @@ -27,7 +29,7 @@ impl MockValidator { pub async fn do_get_header( &self, pubkey: Option, - accept: Option, + accept: HashSet, fork_name: ForkName, ) -> eyre::Result { let default_pubkey = bls_pubkey_from_hex( @@ -35,14 +37,24 @@ impl MockValidator { )?; let url = self.comm_boost.get_header_url(0, &B256::ZERO, &pubkey.unwrap_or(default_pubkey))?; - let res = self + let accept = match accept.len() { + 0 => None, + 1 => Some(accept.into_iter().next().unwrap().to_string()), + _ => { + let accept_strings: Vec = + accept.into_iter().map(|e| e.to_string()).collect(); + Some(accept_strings.join(", ")) + } + }; + let mut res = self .comm_boost .client .get(url) - .header(ACCEPT, &accept.unwrap_or(EncodingType::Json).to_string()) - .header(CONSENSUS_VERSION_HEADER, &fork_name.to_string()) - .send() - .await?; + .header(CONSENSUS_VERSION_HEADER, &fork_name.to_string()); + if let Some(accept_header) = accept { + res = res.header(ACCEPT, accept_header); + } + let res = res.send().await?; Ok(res) } diff --git a/tests/tests/pbs_get_header.rs b/tests/tests/pbs_get_header.rs index eebb0113..435d82de 100644 --- a/tests/tests/pbs_get_header.rs +++ b/tests/tests/pbs_get_header.rs @@ -1,4 +1,4 @@ -use std::{sync::Arc, time::Duration}; +use std::{collections::HashSet, sync::Arc, time::Duration}; use alloy::primitives::{B256, U256}; use cb_common::{ @@ -15,23 +15,92 @@ use cb_tests::{ utils::{generate_mock_relay, get_pbs_static_config, setup_test_env, to_pbs_config}, }; use eyre::Result; -use lh_types::ForkVersionDecode; +use lh_types::{ForkVersionDecode, beacon_response::EmptyMetadata}; use reqwest::StatusCode; use tracing::info; use tree_hash::TreeHash; +/// Test requesting JSON when the relay supports JSON #[tokio::test] async fn test_get_header() -> Result<()> { + test_get_header_impl( + 3200, + HashSet::from([EncodingType::Json]), + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + 1, + ) + .await +} + +/// Test requesting SSZ when the relay supports SSZ +#[tokio::test] +async fn test_get_header_ssz() -> Result<()> { + test_get_header_impl( + 3210, + HashSet::from([EncodingType::Ssz]), + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + 1, + ) + .await +} + +/// Test requesting SSZ when the relay only supports JSON, which should cause +/// PBS to retry internally with JSON +#[tokio::test] +async fn test_get_header_ssz_into_json() -> Result<()> { + test_get_header_impl( + 3220, + HashSet::from([EncodingType::Ssz]), + HashSet::from([EncodingType::Json]), + 2, + ) + .await +} + +/// Test requesting multiple types when the relay supports SSZ, which should +/// return SSZ +#[tokio::test] +async fn test_get_header_multitype_ssz() -> Result<()> { + test_get_header_impl( + 3230, + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + HashSet::from([EncodingType::Ssz]), + 1, + ) + .await +} + +/// Test requesting multiple types when the relay supports JSON, which should +/// return JSON +#[tokio::test] +async fn test_get_header_multitype_json() -> Result<()> { + test_get_header_impl( + 3240, + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + HashSet::from([EncodingType::Json]), + 1, + ) + .await +} + +/// Core implementation for get_header tests +async fn test_get_header_impl( + pbs_port: u16, + accept_types: HashSet, + relay_types: HashSet, + expected_try_count: u64, +) -> Result<()> { + // Setup test environment setup_test_env(); let signer = random_secret(); let pubkey = signer.public_key(); - let chain = Chain::Holesky; - let pbs_port = 3200; let relay_port = pbs_port + 1; // Run a mock relay - let mock_state = Arc::new(MockRelayState::new(chain, signer)); + let mut mock_state = MockRelayState::new(chain, signer); + mock_state.supported_content_types = Arc::new(relay_types); + let mock_state = Arc::new(mock_state); let mock_relay = generate_mock_relay(relay_port, pubkey)?; tokio::spawn(start_mock_relay_service(mock_state.clone(), relay_port)); @@ -43,14 +112,39 @@ async fn test_get_header() -> Result<()> { // leave some time to start servers tokio::time::sleep(Duration::from_millis(100)).await; + // Send the get_header request let mock_validator = MockValidator::new(pbs_port)?; info!("Sending get header"); - let res = mock_validator.do_get_header(None, None, ForkName::Electra).await?; + let res = mock_validator.do_get_header(None, accept_types.clone(), ForkName::Electra).await?; assert_eq!(res.status(), StatusCode::OK); - let res = serde_json::from_slice::(&res.bytes().await?)?; - - assert_eq!(mock_state.received_get_header(), 1); + // Get the content type + let content_type = match res + .headers() + .get(reqwest::header::CONTENT_TYPE) + .and_then(|ct| ct.to_str().ok()) + .unwrap() + { + ct if ct == EncodingType::Ssz.to_string() => EncodingType::Ssz, + ct if ct == EncodingType::Json.to_string() => EncodingType::Json, + _ => panic!("unexpected content type"), + }; + assert!(accept_types.contains(&content_type)); + + // Get the data + let res = match content_type { + EncodingType::Json => serde_json::from_slice::(&res.bytes().await?)?, + EncodingType::Ssz => { + let fork = + get_consensus_version_header(res.headers()).expect("missing fork version header"); + assert_eq!(fork, ForkName::Electra); + let data = SignedBuilderBid::from_ssz_bytes_by_fork(&res.bytes().await?, fork).unwrap(); + GetHeaderResponse { version: fork, data, metadata: EmptyMetadata::default() } + } + }; + + // Validate the data + assert_eq!(mock_state.received_get_header(), expected_try_count); assert_eq!(res.version, ForkName::Electra); assert_eq!(res.data.message.header().block_hash().0[0], 1); assert_eq!(res.data.message.header().parent_hash().0, B256::ZERO); @@ -64,52 +158,6 @@ async fn test_get_header() -> Result<()> { Ok(()) } -#[tokio::test] -async fn test_get_header_ssz() -> Result<()> { - setup_test_env(); - let signer = random_secret(); - let pubkey = signer.public_key(); - - let chain = Chain::Holesky; - let pbs_port = 3210; - let relay_port = pbs_port + 1; - - // Run a mock relay - let mock_state = Arc::new(MockRelayState::new(chain, signer)); - let mock_relay = generate_mock_relay(relay_port, pubkey)?; - tokio::spawn(start_mock_relay_service(mock_state.clone(), relay_port)); - - // Run the PBS service - let config = to_pbs_config(chain, get_pbs_static_config(pbs_port), vec![mock_relay.clone()]); - let state = PbsState::new(config); - tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); - - // leave some time to start servers - tokio::time::sleep(Duration::from_millis(100)).await; - - let mock_validator = MockValidator::new(pbs_port)?; - info!("Sending get header"); - let res = - mock_validator.do_get_header(None, Some(EncodingType::Ssz), ForkName::Electra).await?; - assert_eq!(res.status(), StatusCode::OK); - - let fork = get_consensus_version_header(res.headers()).expect("missing fork version header"); - assert_eq!(fork, ForkName::Electra); - let data = SignedBuilderBid::from_ssz_bytes_by_fork(&res.bytes().await?, fork).unwrap(); - - assert_eq!(mock_state.received_get_header(), 1); - assert_eq!(data.message.header().block_hash().0[0], 1); - assert_eq!(data.message.header().parent_hash().0, B256::ZERO); - assert_eq!(*data.message.value(), U256::from(10)); - assert_eq!(*data.message.pubkey(), BlsPublicKeyBytes::from(mock_state.signer.public_key())); - assert_eq!(data.message.header().timestamp(), timestamp_of_slot_start_sec(0, chain)); - assert_eq!( - data.signature, - sign_builder_root(chain, &mock_state.signer, data.message.tree_hash_root()) - ); - Ok(()) -} - #[tokio::test] async fn test_get_header_returns_204_if_relay_down() -> Result<()> { setup_test_env(); @@ -137,7 +185,7 @@ async fn test_get_header_returns_204_if_relay_down() -> Result<()> { let mock_validator = MockValidator::new(pbs_port)?; info!("Sending get header"); - let res = mock_validator.do_get_header(None, None, ForkName::Electra).await?; + let res = mock_validator.do_get_header(None, HashSet::new(), ForkName::Electra).await?; assert_eq!(res.status(), StatusCode::NO_CONTENT); // 204 error assert_eq!(mock_state.received_get_header(), 0); // no header received diff --git a/tests/tests/pbs_mux.rs b/tests/tests/pbs_mux.rs index 34bc76de..ad1d4c05 100644 --- a/tests/tests/pbs_mux.rs +++ b/tests/tests/pbs_mux.rs @@ -1,4 +1,8 @@ -use std::{collections::HashMap, sync::Arc, time::Duration}; +use std::{ + collections::{HashMap, HashSet}, + sync::Arc, + time::Duration, +}; use cb_common::{ config::{HTTP_TIMEOUT_SECONDS_DEFAULT, MUXER_HTTP_MAX_LENGTH, RuntimeMuxConfig}, @@ -196,7 +200,7 @@ async fn test_mux() -> Result<()> { let mock_validator = MockValidator::new(pbs_port)?; info!("Sending get header with default"); assert_eq!( - mock_validator.do_get_header(None, None, ForkName::Electra).await?.status(), + mock_validator.do_get_header(None, HashSet::new(), ForkName::Electra).await?.status(), StatusCode::OK ); assert_eq!(mock_state.received_get_header(), 1); // only default relay was used @@ -205,7 +209,7 @@ async fn test_mux() -> Result<()> { info!("Sending get header with mux"); assert_eq!( mock_validator - .do_get_header(Some(validator_pubkey), None, ForkName::Electra) + .do_get_header(Some(validator_pubkey), HashSet::new(), ForkName::Electra) .await? .status(), StatusCode::OK diff --git a/tests/tests/pbs_mux_refresh.rs b/tests/tests/pbs_mux_refresh.rs index 1c8d3cdc..d4a5888a 100644 --- a/tests/tests/pbs_mux_refresh.rs +++ b/tests/tests/pbs_mux_refresh.rs @@ -1,4 +1,4 @@ -use std::{sync::Arc, time::Duration}; +use std::{collections::HashSet, sync::Arc, time::Duration}; use cb_common::{ config::{MuxConfig, MuxKeysLoader, PbsMuxes}, @@ -109,8 +109,9 @@ async fn test_auto_refresh() -> Result<()> { // relay only since it hasn't been seen in the mux yet let mock_validator = MockValidator::new(pbs_port)?; info!("Sending get header"); - let res = - mock_validator.do_get_header(Some(new_mux_pubkey.clone()), None, ForkName::Electra).await?; + let res = mock_validator + .do_get_header(Some(new_mux_pubkey.clone()), HashSet::new(), ForkName::Electra) + .await?; assert_eq!(res.status(), StatusCode::OK); assert_eq!(default_relay_state.received_get_header(), 1); // default relay was used assert_eq!(mux_relay_state.received_get_header(), 0); // mux relay was not used @@ -138,16 +139,18 @@ async fn test_auto_refresh() -> Result<()> { assert!(logs_contain(&format!("fetched 2 pubkeys for registry mux {mux_relay_id}"))); // Try to run a get_header on the new pubkey - now it should use the mux relay - let res = - mock_validator.do_get_header(Some(new_mux_pubkey.clone()), None, ForkName::Electra).await?; + let res = mock_validator + .do_get_header(Some(new_mux_pubkey.clone()), HashSet::new(), ForkName::Electra) + .await?; assert_eq!(res.status(), StatusCode::OK); assert_eq!(default_relay_state.received_get_header(), 1); // default relay was not used here assert_eq!(mux_relay_state.received_get_header(), 1); // mux relay was used // Now try to do a get_header with the old pubkey - it should only use the // default relay - let res = - mock_validator.do_get_header(Some(default_pubkey.clone()), None, ForkName::Electra).await?; + let res = mock_validator + .do_get_header(Some(default_pubkey.clone()), HashSet::new(), ForkName::Electra) + .await?; assert_eq!(res.status(), StatusCode::OK); assert_eq!(default_relay_state.received_get_header(), 2); // default relay was used assert_eq!(mux_relay_state.received_get_header(), 1); // mux relay was not used @@ -165,7 +168,7 @@ async fn test_auto_refresh() -> Result<()> { // Try to do a get_header with the removed pubkey - it should only use the // default relay let res = mock_validator - .do_get_header(Some(existing_mux_pubkey.clone()), None, ForkName::Electra) + .do_get_header(Some(existing_mux_pubkey.clone()), HashSet::new(), ForkName::Electra) .await?; assert_eq!(res.status(), StatusCode::OK); assert_eq!(default_relay_state.received_get_header(), 3); // default relay was used From b22eed81c4ba9e41a62fe219d44edd8117a541e7 Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Mon, 10 Nov 2025 11:22:05 -0500 Subject: [PATCH 07/26] Added explicit lowercase matching to EncodingType --- crates/common/src/utils.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/common/src/utils.rs b/crates/common/src/utils.rs index 221ca4d1..5486bcb0 100644 --- a/crates/common/src/utils.rs +++ b/crates/common/src/utils.rs @@ -510,7 +510,7 @@ impl std::fmt::Display for EncodingType { impl FromStr for EncodingType { type Err = String; fn from_str(value: &str) -> Result { - match value { + match value.to_ascii_lowercase().as_str() { "application/json" | "" => Ok(EncodingType::Json), "application/octet-stream" => Ok(EncodingType::Ssz), _ => Err(format!("unsupported encoding type: {value}")), From 0155533716f49b74675a56b721d7001cdc5db00e Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Mon, 10 Nov 2025 11:48:12 -0500 Subject: [PATCH 08/26] Added the Fulu fork slot for Mainnet --- crates/common/src/types.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/common/src/types.rs b/crates/common/src/types.rs index 077b4ccd..6d6d55f1 100644 --- a/crates/common/src/types.rs +++ b/crates/common/src/types.rs @@ -233,7 +233,8 @@ impl KnownChain { pub fn fulu_fork_slot(&self) -> u64 { match self { - KnownChain::Mainnet | KnownChain::Helder => u64::MAX, + KnownChain::Mainnet => 13164544, + KnownChain::Helder => u64::MAX, KnownChain::Holesky => 5283840, KnownChain::Sepolia => 8724480, KnownChain::Hoodi => 1622016, From 3ad487b6668f93662c3c07a721ad82a9b762bd8d Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Mon, 10 Nov 2025 12:52:40 -0500 Subject: [PATCH 09/26] Added the Fulu fork slot for Mainnet (#402) --- crates/common/src/types.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/common/src/types.rs b/crates/common/src/types.rs index 077b4ccd..6d6d55f1 100644 --- a/crates/common/src/types.rs +++ b/crates/common/src/types.rs @@ -233,7 +233,8 @@ impl KnownChain { pub fn fulu_fork_slot(&self) -> u64 { match self { - KnownChain::Mainnet | KnownChain::Helder => u64::MAX, + KnownChain::Mainnet => 13164544, + KnownChain::Helder => u64::MAX, KnownChain::Holesky => 5283840, KnownChain::Sepolia => 8724480, KnownChain::Hoodi => 1622016, From 8ebfbd039e4bf780e97b5483b6beeef32b078163 Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Mon, 10 Nov 2025 15:32:02 -0500 Subject: [PATCH 10/26] Cleaned up some error handling --- crates/pbs/src/routes/get_header.rs | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/crates/pbs/src/routes/get_header.rs b/crates/pbs/src/routes/get_header.rs index fcb95c76..44669ec1 100644 --- a/crates/pbs/src/routes/get_header.rs +++ b/crates/pbs/src/routes/get_header.rs @@ -38,11 +38,7 @@ pub async fn handle_get_header>( let accept_types = get_accept_types(&req_headers).map_err(|e| { error!(%e, "error parsing accept header"); PbsClientError::DecodeError(format!("error parsing accept header: {e}")) - }); - if let Err(e) = accept_types { - return Ok((StatusCode::BAD_REQUEST, e).into_response()); - } - let accept_types = accept_types.unwrap(); + })?; info!(ua, ms_into_slot, "new request"); From 2499bd5bb416667a77c71e5ba010a217085452a5 Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Mon, 10 Nov 2025 15:49:53 -0500 Subject: [PATCH 11/26] Made some strings static --- crates/common/src/utils.rs | 25 ++++++++++++++++--------- crates/pbs/src/mev_boost/get_header.rs | 2 +- crates/pbs/src/routes/get_header.rs | 2 +- crates/pbs/src/routes/submit_block.rs | 2 +- 4 files changed, 19 insertions(+), 12 deletions(-) diff --git a/crates/common/src/utils.rs b/crates/common/src/utils.rs index 5486bcb0..a680fa76 100644 --- a/crates/common/src/utils.rs +++ b/crates/common/src/utils.rs @@ -46,9 +46,9 @@ use crate::{ types::{BlsPublicKey, Chain, Jwt, JwtClaims, ModuleId}, }; -const APPLICATION_JSON: &str = "application/json"; -const APPLICATION_OCTET_STREAM: &str = "application/octet-stream"; -const WILDCARD: &str = "*/*"; +pub const APPLICATION_JSON: &str = "application/json"; +pub const APPLICATION_OCTET_STREAM: &str = "application/octet-stream"; +pub const WILDCARD: &str = "*/*"; const MILLIS_PER_SECOND: u64 = 1_000; pub const CONSENSUS_VERSION_HEADER: &str = "Eth-Consensus-Version"; @@ -498,21 +498,28 @@ pub enum EncodingType { Ssz, } -impl std::fmt::Display for EncodingType { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { +impl EncodingType { + /// Get the content type string for the encoding type + pub fn content_type(&self) -> &str { match self { - EncodingType::Json => write!(f, "application/json"), - EncodingType::Ssz => write!(f, "application/octet-stream"), + EncodingType::Json => APPLICATION_JSON, + EncodingType::Ssz => APPLICATION_OCTET_STREAM, } } } +impl std::fmt::Display for EncodingType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.content_type()) + } +} + impl FromStr for EncodingType { type Err = String; fn from_str(value: &str) -> Result { match value.to_ascii_lowercase().as_str() { - "application/json" | "" => Ok(EncodingType::Json), - "application/octet-stream" => Ok(EncodingType::Ssz), + APPLICATION_JSON | "" => Ok(EncodingType::Json), + APPLICATION_OCTET_STREAM => Ok(EncodingType::Ssz), _ => Err(format!("unsupported encoding type: {value}")), } } diff --git a/crates/pbs/src/mev_boost/get_header.rs b/crates/pbs/src/mev_boost/get_header.rs index fd0e13c9..4962da35 100644 --- a/crates/pbs/src/mev_boost/get_header.rs +++ b/crates/pbs/src/mev_boost/get_header.rs @@ -412,7 +412,7 @@ async fn send_one_get_header( // Also resets the start request timer original_headers.remove(ACCEPT); original_headers - .insert(ACCEPT, HeaderValue::from_str(&EncodingType::Json.to_string()).unwrap()); + .insert(ACCEPT, HeaderValue::from_str(EncodingType::Json.content_type()).unwrap()); let config = RequestContext { url: req_config.url.clone(), timeout_ms: req_config.timeout_ms - elapsed, diff --git a/crates/pbs/src/routes/get_header.rs b/crates/pbs/src/routes/get_header.rs index 44669ec1..1f29030e 100644 --- a/crates/pbs/src/routes/get_header.rs +++ b/crates/pbs/src/routes/get_header.rs @@ -73,7 +73,7 @@ pub async fn handle_get_header>( // This won't actually fail since the string is a const let content_type_header = - HeaderValue::from_str(&EncodingType::Ssz.to_string()).unwrap(); + HeaderValue::from_str(EncodingType::Ssz.content_type()).unwrap(); res.headers_mut().insert(CONSENSUS_VERSION_HEADER, consensus_version_header); res.headers_mut().insert(CONTENT_TYPE, content_type_header); diff --git a/crates/pbs/src/routes/submit_block.rs b/crates/pbs/src/routes/submit_block.rs index 53b13811..f3498f97 100644 --- a/crates/pbs/src/routes/submit_block.rs +++ b/crates/pbs/src/routes/submit_block.rs @@ -110,7 +110,7 @@ async fn handle_submit_block_impl>( // This won't actually fail since the string is a const let content_type_header = - HeaderValue::from_str(&EncodingType::Ssz.to_string()).unwrap(); + HeaderValue::from_str(EncodingType::Ssz.content_type()).unwrap(); response .headers_mut() From 41d879e558d4e0b31032600f91084b4b37dec4ca Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Mon, 10 Nov 2025 16:38:27 -0500 Subject: [PATCH 12/26] PbsClientError can noe be created from BodyDeserializeError --- crates/pbs/src/error.rs | 7 +++++++ crates/pbs/src/routes/get_header.rs | 4 +++- crates/pbs/src/routes/submit_block.rs | 9 ++++----- 3 files changed, 14 insertions(+), 6 deletions(-) diff --git a/crates/pbs/src/error.rs b/crates/pbs/src/error.rs index 4ebdc18f..1214fd6a 100644 --- a/crates/pbs/src/error.rs +++ b/crates/pbs/src/error.rs @@ -1,4 +1,5 @@ use axum::{http::StatusCode, response::IntoResponse}; +use cb_common::utils::BodyDeserializeError; #[derive(Debug)] /// Errors that the PbsService returns to client @@ -22,6 +23,12 @@ impl PbsClientError { } } +impl From for PbsClientError { + fn from(e: BodyDeserializeError) -> Self { + PbsClientError::DecodeError(format!("failed to deserialize body: {e}")) + } +} + impl IntoResponse for PbsClientError { fn into_response(self) -> axum::response::Response { let msg = match &self { diff --git a/crates/pbs/src/routes/get_header.rs b/crates/pbs/src/routes/get_header.rs index 1f29030e..896ab781 100644 --- a/crates/pbs/src/routes/get_header.rs +++ b/crates/pbs/src/routes/get_header.rs @@ -58,7 +58,9 @@ pub async fn handle_get_header>( let consensus_version_header = match HeaderValue::from_str( &max_bid.version.to_string(), ) { - Ok(consensus_version_header) => Ok(consensus_version_header), + Ok(consensus_version_header) => { + Ok::(consensus_version_header) + } Err(e) => { if accepts_json { info!("sending response as JSON"); diff --git a/crates/pbs/src/routes/submit_block.rs b/crates/pbs/src/routes/submit_block.rs index f3498f97..b7b0c53a 100644 --- a/crates/pbs/src/routes/submit_block.rs +++ b/crates/pbs/src/routes/submit_block.rs @@ -47,10 +47,7 @@ async fn handle_submit_block_impl>( api_version: BuilderApiVersion, ) -> Result { let signed_blinded_block = - Arc::new(deserialize_body(&req_headers, raw_request.body_bytes).await.map_err(|e| { - error!(%e, "failed to deserialize signed blinded block"); - PbsClientError::DecodeError(format!("failed to deserialize body: {e}")) - })?); + Arc::new(deserialize_body(&req_headers, raw_request.body_bytes).await?); tracing::Span::current().record("slot", signed_blinded_block.slot().as_u64() as i64); tracing::Span::current() .record("block_hash", tracing::field::debug(signed_blinded_block.block_hash())); @@ -94,7 +91,9 @@ async fn handle_submit_block_impl>( let mut response = payload_and_blobs.data.as_ssz_bytes().into_response(); let consensus_version_header = match HeaderValue::from_str(&payload_and_blobs.version.to_string()) { - Ok(consensus_version_header) => Ok(consensus_version_header), + Ok(consensus_version_header) => { + Ok::(consensus_version_header) + } Err(e) => { if accepts_json { info!("sending response as JSON"); From 42b8060e40f7b432cc6db34ddeddbbb85dab4188 Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Tue, 11 Nov 2025 10:03:53 -0500 Subject: [PATCH 13/26] Fix clippy --- crates/pbs/src/mev_boost/get_header.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/pbs/src/mev_boost/get_header.rs b/crates/pbs/src/mev_boost/get_header.rs index 4962da35..10fecd9e 100644 --- a/crates/pbs/src/mev_boost/get_header.rs +++ b/crates/pbs/src/mev_boost/get_header.rs @@ -465,7 +465,7 @@ async fn send_one_get_header( })?; let data = SignedBuilderBid::from_ssz_bytes_by_fork(&response_bytes, fork) .map_err(|e| PbsError::RelayResponse { - error_msg: (format!("error decoding relay payload: {:?}", e)).to_string(), + error_msg: (format!("error decoding relay payload: {e:?}")).to_string(), code: (code.as_u16()), })?; GetHeaderResponse { version: fork, data, metadata: Default::default() } From a9680f764504cb60ea3d8249608bc185563fde89 Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Tue, 11 Nov 2025 10:37:23 -0500 Subject: [PATCH 14/26] Removed consensus-version-header from submit_block response --- crates/pbs/src/routes/submit_block.rs | 25 ++----------------------- 1 file changed, 2 insertions(+), 23 deletions(-) diff --git a/crates/pbs/src/routes/submit_block.rs b/crates/pbs/src/routes/submit_block.rs index b7b0c53a..539ce631 100644 --- a/crates/pbs/src/routes/submit_block.rs +++ b/crates/pbs/src/routes/submit_block.rs @@ -8,8 +8,8 @@ use axum::{ use cb_common::{ pbs::{BuilderApiVersion, GetPayloadInfo}, utils::{ - CONSENSUS_VERSION_HEADER, EncodingType, RawRequest, deserialize_body, get_accept_types, - get_user_agent, timestamp_of_slot_start_millis, utcnow_ms, + EncodingType, RawRequest, deserialize_body, get_accept_types, get_user_agent, + timestamp_of_slot_start_millis, utcnow_ms, }, }; use reqwest::{StatusCode, header::CONTENT_TYPE}; @@ -89,31 +89,10 @@ async fn handle_submit_block_impl>( // Try SSZ if accepts_ssz { let mut response = payload_and_blobs.data.as_ssz_bytes().into_response(); - let consensus_version_header = - match HeaderValue::from_str(&payload_and_blobs.version.to_string()) { - Ok(consensus_version_header) => { - Ok::(consensus_version_header) - } - Err(e) => { - if accepts_json { - info!("sending response as JSON"); - return Ok((StatusCode::OK, axum::Json(payload_and_blobs)) - .into_response()); - } else { - return Err(PbsClientError::RelayError(format!( - "error decoding consensus version from relay payload: {e}" - ))); - } - } - }?; // This won't actually fail since the string is a const let content_type_header = HeaderValue::from_str(EncodingType::Ssz.content_type()).unwrap(); - - response - .headers_mut() - .insert(CONSENSUS_VERSION_HEADER, consensus_version_header); response.headers_mut().insert(CONTENT_TYPE, content_type_header); info!("sending response as SSZ"); return Ok(response); From d7bde7fea61e4cebde13769216b812391d8450f6 Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Tue, 11 Nov 2025 12:10:56 -0500 Subject: [PATCH 15/26] Added multi-type support to submit_block --- crates/pbs/src/mev_boost/get_header.rs | 101 +++++++------- crates/pbs/src/mev_boost/submit_block.rs | 166 ++++++++++++++++++----- 2 files changed, 180 insertions(+), 87 deletions(-) diff --git a/crates/pbs/src/mev_boost/get_header.rs b/crates/pbs/src/mev_boost/get_header.rs index 10fecd9e..a722654f 100644 --- a/crates/pbs/src/mev_boost/get_header.rs +++ b/crates/pbs/src/mev_boost/get_header.rs @@ -310,56 +310,6 @@ struct ValidationContext { parent_block: Arc>>, } -async fn send_get_header_impl( - relay: &RelayClient, - mut req_config: RequestContext, -) -> Result<(Response, u64, Option), PbsError> { - // the timestamp in the header is the consensus block time which is fixed, - // use the beginning of the request as proxy to make sure we use only the - // last one received - let start_request_time = utcnow_ms(); - req_config.headers.insert(HEADER_START_TIME_UNIX_MS, HeaderValue::from(start_request_time)); - - // The timeout header indicating how long a relay has to respond, so they can - // minimize timing games without losing the bid - req_config.headers.insert(HEADER_TIMEOUT_MS, HeaderValue::from(req_config.timeout_ms)); - - let res = match relay - .client - .get(req_config.url) - .timeout(Duration::from_millis(req_config.timeout_ms)) - .headers(req_config.headers) - .send() - .await - { - Ok(res) => res, - Err(err) => { - RELAY_STATUS_CODE - .with_label_values(&[TIMEOUT_ERROR_CODE_STR, GET_HEADER_ENDPOINT_TAG, &relay.id]) - .inc(); - return Err(err.into()); - } - }; - - // Get the content type; this is only really useful for OK responses, and - // doesn't handle encoding types besides SSZ and JSON - let mut content_type: Option = None; - if res.status() == StatusCode::OK && - let Some(header) = res.headers().get(CONTENT_TYPE) - { - let header_str = header.to_str().map_err(|e| PbsError::RelayResponse { - error_msg: format!("cannot decode content-type header: {e}").to_string(), - code: (res.status().as_u16()), - })?; - if header_str.eq_ignore_ascii_case(&EncodingType::Ssz.to_string()) { - content_type = Some(EncodingType::Ssz) - } else if header_str.eq_ignore_ascii_case(&EncodingType::Json.to_string()) { - content_type = Some(EncodingType::Json) - } - } - Ok((res, start_request_time, content_type)) -} - async fn send_one_get_header( params: GetHeaderParams, relay: RelayClient, @@ -410,7 +360,6 @@ async fn send_one_get_header( // Resubmit the request with JSON accept header // Also resets the start request timer - original_headers.remove(ACCEPT); original_headers .insert(ACCEPT, HeaderValue::from_str(EncodingType::Json.content_type()).unwrap()); let config = RequestContext { @@ -579,6 +528,56 @@ async fn send_one_get_header( Ok((start_request_time, Some(get_header_response))) } +async fn send_get_header_impl( + relay: &RelayClient, + mut req_config: RequestContext, +) -> Result<(Response, u64, Option), PbsError> { + // the timestamp in the header is the consensus block time which is fixed, + // use the beginning of the request as proxy to make sure we use only the + // last one received + let start_request_time = utcnow_ms(); + req_config.headers.insert(HEADER_START_TIME_UNIX_MS, HeaderValue::from(start_request_time)); + + // The timeout header indicating how long a relay has to respond, so they can + // minimize timing games without losing the bid + req_config.headers.insert(HEADER_TIMEOUT_MS, HeaderValue::from(req_config.timeout_ms)); + + let res = match relay + .client + .get(req_config.url) + .timeout(Duration::from_millis(req_config.timeout_ms)) + .headers(req_config.headers) + .send() + .await + { + Ok(res) => res, + Err(err) => { + RELAY_STATUS_CODE + .with_label_values(&[TIMEOUT_ERROR_CODE_STR, GET_HEADER_ENDPOINT_TAG, &relay.id]) + .inc(); + return Err(err.into()); + } + }; + + // Get the content type; this is only really useful for OK responses, and + // doesn't handle encoding types besides SSZ and JSON + let mut content_type: Option = None; + if res.status() == StatusCode::OK && + let Some(header) = res.headers().get(CONTENT_TYPE) + { + let header_str = header.to_str().map_err(|e| PbsError::RelayResponse { + error_msg: format!("cannot decode content-type header: {e}").to_string(), + code: (res.status().as_u16()), + })?; + if header_str.eq_ignore_ascii_case(&EncodingType::Ssz.to_string()) { + content_type = Some(EncodingType::Ssz) + } else if header_str.eq_ignore_ascii_case(&EncodingType::Json.to_string()) { + content_type = Some(EncodingType::Json) + } + } + Ok((res, start_request_time, content_type)) +} + struct HeaderData { block_hash: B256, parent_hash: B256, diff --git a/crates/pbs/src/mev_boost/submit_block.rs b/crates/pbs/src/mev_boost/submit_block.rs index 2b10dcaa..d6a710bc 100644 --- a/crates/pbs/src/mev_boost/submit_block.rs +++ b/crates/pbs/src/mev_boost/submit_block.rs @@ -8,15 +8,21 @@ use alloy::{eips::eip7594::CELLS_PER_EXT_BLOB, primitives::B256}; use axum::http::{HeaderMap, HeaderValue}; use cb_common::{ pbs::{ - BlindedBeaconBlock, BlobsBundle, BuilderApiVersion, ForkName, HEADER_CONSENSUS_VERSION, - HEADER_START_TIME_UNIX_MS, KzgCommitments, RelayClient, SignedBlindedBeaconBlock, - SubmitBlindedBlockResponse, + BlindedBeaconBlock, BlobsBundle, BuilderApiVersion, ForkName, ForkVersionDecode, + HEADER_CONSENSUS_VERSION, HEADER_START_TIME_UNIX_MS, KzgCommitments, PayloadAndBlobs, + RelayClient, SignedBlindedBeaconBlock, SubmitBlindedBlockResponse, error::{PbsError, ValidationError}, }, - utils::{get_user_agent_with_version, read_chunked_body_with_max, utcnow_ms}, + utils::{ + EncodingType, get_accept_types, get_user_agent_with_version, read_chunked_body_with_max, + utcnow_ms, + }, }; use futures::{FutureExt, future::select_ok}; -use reqwest::header::USER_AGENT; +use reqwest::{ + Response, StatusCode, + header::{ACCEPT, CONTENT_TYPE, USER_AGENT}, +}; use tracing::{debug, warn}; use url::Url; @@ -155,34 +161,54 @@ async fn send_submit_block( api_version: &BuilderApiVersion, fork_name: ForkName, ) -> Result, PbsError> { - let start_request = Instant::now(); - let res = match relay - .client - .post(url) - .timeout(Duration::from_millis(timeout_ms)) - .headers(headers) - .json(&signed_blinded_block) - .send() - .await - { - Ok(res) => res, - Err(err) => { - RELAY_STATUS_CODE - .with_label_values(&[ - TIMEOUT_ERROR_CODE_STR, - SUBMIT_BLINDED_BLOCK_ENDPOINT_TAG, - &relay.id, - ]) - .inc(); - return Err(err.into()); - } - }; + let mut original_headers = headers.clone(); + + // Check which types this request is for + let accept_types = get_accept_types(&headers).map_err(|e| { + PbsError::GeneralRequest(format!("error reading accept types: {e}").to_string()) + })?; + let accepts_ssz = accept_types.contains(&EncodingType::Ssz); + let accepts_json = accept_types.contains(&EncodingType::Json); + + // Send the request + let mut start_request = Instant::now(); + let (mut res, mut content_type) = + send_submit_block_impl(url.clone(), signed_blinded_block, relay, headers, timeout_ms) + .await?; + let mut code = res.status(); + + // If the request only supports SSZ, but the relay only supports JSON, resubmit + // to the relay with JSON - we'll convert it ourselves + if code == StatusCode::NOT_ACCEPTABLE && accepts_ssz && !accepts_json { + debug!( + relay_id = relay.id.as_ref(), + "relay does not support SSZ, resubmitting request with JSON accept header" + ); + + // Resubmit the request with JSON accept header + let elapsed = start_request.elapsed().as_millis() as u64; + original_headers + .insert(ACCEPT, HeaderValue::from_str(EncodingType::Json.content_type()).unwrap()); + start_request = Instant::now(); + (res, content_type) = send_submit_block_impl( + url, + signed_blinded_block, + relay, + original_headers, + timeout_ms - elapsed, + ) + .await?; + code = res.status(); + } + + // Get the consensus fork version if provided (to avoid cloning later) + let content_type_header = res.headers().get(CONTENT_TYPE).cloned(); + let request_latency = start_request.elapsed(); RELAY_LATENCY .with_label_values(&[SUBMIT_BLINDED_BLOCK_ENDPOINT_TAG, &relay.id]) .observe(request_latency.as_secs_f64()); - let code = res.status(); RELAY_STATUS_CODE .with_label_values(&[code.as_str(), SUBMIT_BLINDED_BLOCK_ENDPOINT_TAG, &relay.id]) .inc(); @@ -211,14 +237,33 @@ async fn send_submit_block( return Ok(None); } - let block_response = match serde_json::from_slice::(&response_bytes) - { - Ok(parsed) => parsed, - Err(err) => { - return Err(PbsError::JsonDecode { - err, - raw: String::from_utf8_lossy(&response_bytes).into_owned(), - }); + // Regenerate the block from the response + let block_response = match content_type { + Some(EncodingType::Ssz) => { + let data = PayloadAndBlobs::from_ssz_bytes_by_fork(&response_bytes, fork_name) + .map_err(|e| PbsError::RelayResponse { + error_msg: (format!("error decoding relay payload: {e:?}")).to_string(), + code: (code.as_u16()), + })?; + SubmitBlindedBlockResponse { version: fork_name, data, metadata: Default::default() } + } + Some(EncodingType::Json) => { + match serde_json::from_slice::(&response_bytes) { + Ok(parsed) => parsed, + Err(err) => { + return Err(PbsError::JsonDecode { + err, + raw: String::from_utf8_lossy(&response_bytes).into_owned(), + }); + } + } + } + None => { + let error_msg = match content_type_header { + None => "relay response missing content type header".to_string(), + Some(ct) => format!("relay response has unsupported content type {ct:?}"), + }; + return Err(PbsError::RelayResponse { error_msg, code: code.as_u16() }); } }; @@ -269,6 +314,55 @@ async fn send_submit_block( Ok(Some(block_response)) } +async fn send_submit_block_impl( + url: Url, + signed_blinded_block: &SignedBlindedBeaconBlock, + relay: &RelayClient, + headers: HeaderMap, + timeout_ms: u64, +) -> Result<(Response, Option), PbsError> { + // Send the request + let res = match relay + .client + .post(url) + .timeout(Duration::from_millis(timeout_ms)) + .headers(headers) + .json(&signed_blinded_block) + .send() + .await + { + Ok(res) => res, + Err(err) => { + RELAY_STATUS_CODE + .with_label_values(&[ + TIMEOUT_ERROR_CODE_STR, + SUBMIT_BLINDED_BLOCK_ENDPOINT_TAG, + &relay.id, + ]) + .inc(); + return Err(err.into()); + } + }; + + // Get the content type; this is only really useful for OK responses, and + // doesn't handle encoding types besides SSZ and JSON + let mut content_type: Option = None; + if res.status() == StatusCode::OK && + let Some(header) = res.headers().get(CONTENT_TYPE) + { + let header_str = header.to_str().map_err(|e| PbsError::RelayResponse { + error_msg: format!("cannot decode content-type header: {e}").to_string(), + code: (res.status().as_u16()), + })?; + if header_str.eq_ignore_ascii_case(&EncodingType::Ssz.to_string()) { + content_type = Some(EncodingType::Ssz) + } else if header_str.eq_ignore_ascii_case(&EncodingType::Json.to_string()) { + content_type = Some(EncodingType::Json) + } + } + Ok((res, content_type)) +} + fn validate_unblinded_block( expected_block_hash: B256, got_block_hash: B256, From ad3f01952c245d46165f67d7dd825f3849477cac Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Tue, 11 Nov 2025 14:44:08 -0500 Subject: [PATCH 16/26] Updated the mock relay with multi-type support on submit_block --- tests/src/mock_relay.rs | 51 ++++++++++++++++++----------------------- 1 file changed, 22 insertions(+), 29 deletions(-) diff --git a/tests/src/mock_relay.rs b/tests/src/mock_relay.rs index bcdd771c..1da110f7 100644 --- a/tests/src/mock_relay.rs +++ b/tests/src/mock_relay.rs @@ -223,21 +223,27 @@ async fn handle_submit_block_v1( raw_request: RawRequest, ) -> Response { state.received_submit_block.fetch_add(1, Ordering::Relaxed); - let accept_types = get_accept_types(&headers); + let accept_types = get_accept_types(&headers) + .map_err(|e| (StatusCode::BAD_REQUEST, format!("error parsing accept header: {e}"))); if let Err(e) = accept_types { - error!(%e, "error parsing accept header"); - return (StatusCode::BAD_REQUEST, format!("error parsing accept header: {e}")) - .into_response(); + return e.into_response(); } let accept_types = accept_types.unwrap(); - let consensus_version_header = - get_consensus_version_header(&headers).unwrap_or(ForkName::Electra); + let content_type = if state.supported_content_types.contains(&EncodingType::Ssz) && + accept_types.contains(&EncodingType::Ssz) + { + EncodingType::Ssz + } else if state.supported_content_types.contains(&EncodingType::Json) && + accept_types.contains(&EncodingType::Json) + { + EncodingType::Json + } else { + return (StatusCode::NOT_ACCEPTABLE, "No acceptable content type found".to_string()) + .into_response(); + }; - let (data, accept_type) = if state.large_body() { - ( - vec![1u8; 1 + MAX_SIZE_SUBMIT_BLOCK_RESPONSE], - accept_types.iter().next().unwrap_or(&EncodingType::Json), - ) + let data = if state.large_body() { + vec![1u8; 1 + MAX_SIZE_SUBMIT_BLOCK_RESPONSE] } else { let mut execution_payload = ExecutionPayloadElectra::test_random(); let submit_block = deserialize_body(&headers, raw_request.body_bytes).await.map_err(|e| { @@ -260,34 +266,21 @@ async fn handle_submit_block_v1( let response = PayloadAndBlobs { execution_payload: execution_payload.into(), blobs_bundle }; - if accept_types.contains(&EncodingType::Ssz) { - match consensus_version_header { - // Response isn't versioned for SSZ - ForkName::Electra => (response.as_ssz_bytes(), &EncodingType::Ssz), - _ => { - return ( - StatusCode::BAD_REQUEST, - format!("Unsupported fork {consensus_version_header}"), - ) - .into_response(); - } - } + if content_type == EncodingType::Ssz { + response.as_ssz_bytes() } else { - // Response is versioned for JSON + // Return JSON for everything else; this is fine for the mock let response = SubmitBlindedBlockResponse { version: ForkName::Electra, metadata: Default::default(), data: response, }; - (serde_json::to_vec(&response).unwrap(), &EncodingType::Json) + serde_json::to_vec(&response).unwrap() } }; let mut response = (StatusCode::OK, data).into_response(); - let consensus_version_header = - HeaderValue::from_str(&consensus_version_header.to_string()).unwrap(); - let content_type_header = HeaderValue::from_str(&accept_type.to_string()).unwrap(); - response.headers_mut().insert(CONSENSUS_VERSION_HEADER, consensus_version_header); + let content_type_header = HeaderValue::from_str(&content_type.to_string()).unwrap(); response.headers_mut().insert(CONTENT_TYPE, content_type_header); response } From 7dba8d174298d00ecd11ad573e5c36c0f048a819 Mon Sep 17 00:00:00 2001 From: pedro <30592532+PedroCM96@users.noreply.github.com> Date: Tue, 11 Nov 2025 21:09:57 +0100 Subject: [PATCH 17/26] Support lido modules (#392) Co-authored-by: Joe Clapis Co-authored-by: ltitanb <163874448+ltitanb@users.noreply.github.com> Co-authored-by: Nils Effinghausen --- Cargo.lock | 1 + config.example.toml | 2 +- crates/common/Cargo.toml | 1 + .../src/abi/LidoCSModuleNORegistry.json | 37 +++ crates/common/src/config/mux.rs | 207 ++++++------- crates/common/src/interop/lido/mod.rs | 2 + crates/common/src/interop/lido/types.rs | 15 + crates/common/src/interop/lido/utils.rs | 271 ++++++++++++++++++ crates/common/src/interop/mod.rs | 1 + crates/common/src/types.rs | 22 +- examples/configs/pbs_mux.toml | 2 +- tests/tests/pbs_mux_refresh.rs | 1 + 12 files changed, 445 insertions(+), 117 deletions(-) create mode 100644 crates/common/src/abi/LidoCSModuleNORegistry.json create mode 100644 crates/common/src/interop/lido/mod.rs create mode 100644 crates/common/src/interop/lido/types.rs create mode 100644 crates/common/src/interop/lido/utils.rs diff --git a/Cargo.lock b/Cargo.lock index b7d77991..83f908bb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1625,6 +1625,7 @@ dependencies = [ "eyre", "futures", "jsonwebtoken", + "lazy_static", "pbkdf2 0.12.2", "rand 0.9.2", "rayon", diff --git a/config.example.toml b/config.example.toml index 2bcd0efe..6ea6a1b6 100644 --- a/config.example.toml +++ b/config.example.toml @@ -150,7 +150,7 @@ validator_pubkeys = [ # OPTIONAL loader = "./tests/data/mux_keys.example.json" # loader = { url = "http://localhost:8000/keys" } -# loader = { registry = "lido", node_operator_id = 8, enable_refreshing = false } +# loader = { registry = "lido", node_operator_id = 8, lido_module_id = 1, enable_refreshing = false } # loader = { registry = "ssv", node_operator_id = 8, enable_refreshing = false } late_in_slot_time_ms = 1500 timeout_get_header_ms = 900 diff --git a/crates/common/Cargo.toml b/crates/common/Cargo.toml index 57a5fbb3..35367a12 100644 --- a/crates/common/Cargo.toml +++ b/crates/common/Cargo.toml @@ -26,6 +26,7 @@ ethereum_ssz_derive.workspace = true eyre.workspace = true futures.workspace = true jsonwebtoken.workspace = true +lazy_static.workspace = true lh_eth2.workspace = true lh_eth2_keystore.workspace = true lh_types.workspace = true diff --git a/crates/common/src/abi/LidoCSModuleNORegistry.json b/crates/common/src/abi/LidoCSModuleNORegistry.json new file mode 100644 index 00000000..a0b98aab --- /dev/null +++ b/crates/common/src/abi/LidoCSModuleNORegistry.json @@ -0,0 +1,37 @@ +[ + { + "constant": true, + "inputs": [ + { "name": "nodeOperatorId", "type": "uint256" } + ], + "name": "getNodeOperatorSummary", + "outputs": [ + { "name": "targetLimitMode", "type": "uint256" }, + { "name": "targetValidatorsCount", "type": "uint256" }, + { "name": "stuckValidatorsCount", "type": "uint256" }, + { "name": "refundedValidatorsCount", "type": "uint256" }, + { "name": "stuckPenaltyEndTimestamp", "type": "uint256" }, + { "name": "totalExitedValidators", "type": "uint256" }, + { "name": "totalDepositedValidators", "type": "uint256" }, + { "name": "depositableValidatorsCount", "type": "uint256" } + ], + "payable": false, + "stateMutability": "view", + "type": "function" + }, + { + "constant": true, + "inputs": [ + { "name": "nodeOperatorId", "type": "uint256" }, + { "name": "startIndex", "type": "uint256" }, + { "name": "keysCount", "type": "uint256" } + ], + "name": "getSigningKeys", + "outputs": [ + { "name": "", "type": "bytes" } + ], + "payable": false, + "stateMutability": "view", + "type": "function" + } +] diff --git a/crates/common/src/config/mux.rs b/crates/common/src/config/mux.rs index 419a097b..27950d1c 100644 --- a/crates/common/src/config/mux.rs +++ b/crates/common/src/config/mux.rs @@ -7,10 +7,9 @@ use std::{ }; use alloy::{ - primitives::{Address, U256, address}, + primitives::{Address, Bytes, U256}, providers::ProviderBuilder, rpc::{client::RpcClient, types::beacon::constants::BLS_PUBLIC_KEY_BYTES_LEN}, - sol, transports::http::Http, }; use eyre::{Context, bail, ensure}; @@ -22,7 +21,7 @@ use url::Url; use super::{MUX_PATH_ENV, PbsConfig, RelayConfig, load_optional_env_var}; use crate::{ config::{remove_duplicate_keys, safe_read_http_response}, - interop::ssv::utils::fetch_ssv_pubkeys_from_url, + interop::{lido::utils::*, ssv::utils::*}, pbs::RelayClient, types::{BlsPublicKey, Chain}, utils::default_bool, @@ -193,6 +192,8 @@ pub enum MuxKeysLoader { Registry { registry: NORegistry, node_operator_id: u64, + #[serde(default)] + lido_module_id: Option, #[serde(default = "default_bool::")] enable_refreshing: bool, }, @@ -239,30 +240,33 @@ impl MuxKeysLoader { .wrap_err("failed to fetch mux keys from HTTP endpoint") } - Self::Registry { registry, node_operator_id, enable_refreshing: _ } => match registry { - NORegistry::Lido => { - let Some(rpc_url) = rpc_url else { - bail!("Lido registry requires RPC URL to be set in the PBS config"); - }; - - fetch_lido_registry_keys( - rpc_url, - chain, - U256::from(*node_operator_id), - http_timeout, - ) - .await - } - NORegistry::SSV => { - fetch_ssv_pubkeys( - ssv_api_url, - chain, - U256::from(*node_operator_id), - http_timeout, - ) - .await + Self::Registry { registry, node_operator_id, lido_module_id, enable_refreshing: _ } => { + match registry { + NORegistry::Lido => { + let Some(rpc_url) = rpc_url else { + bail!("Lido registry requires RPC URL to be set in the PBS config"); + }; + + fetch_lido_registry_keys( + rpc_url, + chain, + U256::from(*node_operator_id), + lido_module_id.unwrap_or(1), + http_timeout, + ) + .await + } + NORegistry::SSV => { + fetch_ssv_pubkeys( + ssv_api_url, + chain, + U256::from(*node_operator_id), + http_timeout, + ) + .await + } } - }, + } }?; // Remove duplicates @@ -285,63 +289,28 @@ fn get_mux_path(mux_id: &str) -> String { format!("/{mux_id}-mux_keys.json") } -sol! { - #[allow(missing_docs)] - #[sol(rpc)] - LidoRegistry, - "src/abi/LidoNORegistry.json" -} - -// Fetching Lido Curated Module -fn lido_registry_address(chain: Chain) -> eyre::Result
{ - match chain { - Chain::Mainnet => Ok(address!("55032650b14df07b85bF18A3a3eC8E0Af2e028d5")), - Chain::Holesky => Ok(address!("595F64Ddc3856a3b5Ff4f4CC1d1fb4B46cFd2bAC")), - Chain::Hoodi => Ok(address!("5cDbE1590c083b5A2A64427fAA63A7cfDB91FbB5")), - Chain::Sepolia => Ok(address!("33d6E15047E8644F8DDf5CD05d202dfE587DA6E3")), - _ => bail!("Lido registry not supported for chain: {chain:?}"), - } -} - -async fn fetch_lido_registry_keys( - rpc_url: Url, - chain: Chain, - node_operator_id: U256, - http_timeout: Duration, -) -> eyre::Result> { - debug!(?chain, %node_operator_id, "loading operator keys from Lido registry"); - - // Create an RPC provider with HTTP timeout support - let client = Client::builder().timeout(http_timeout).build()?; - let http = Http::with_client(client, rpc_url); - let is_local = http.guess_local(); - let rpc_client = RpcClient::new(http, is_local); - let provider = ProviderBuilder::new().connect_client(rpc_client); - - let registry_address = lido_registry_address(chain)?; - let registry = LidoRegistry::new(registry_address, provider); - - let total_keys = registry.getTotalSigningKeyCount(node_operator_id).call().await?.try_into()?; - +async fn collect_registry_keys( + total_keys: u64, + mut fetch_batch: F, +) -> eyre::Result> +where + F: FnMut(u64, u64) -> Fut, + Fut: std::future::Future>, +{ if total_keys == 0 { return Ok(Vec::new()); } - debug!("fetching {total_keys} total keys"); const CALL_BATCH_SIZE: u64 = 250u64; let mut keys = vec![]; - let mut offset = 0; + let mut offset: u64 = 0; while offset < total_keys { let limit = CALL_BATCH_SIZE.min(total_keys - offset); - let pubkeys = registry - .getSigningKeys(node_operator_id, U256::from(offset), U256::from(limit)) - .call() - .await? - .pubkeys; + let pubkeys = fetch_batch(offset, limit).await?; ensure!( pubkeys.len() % BLS_PUBLIC_KEY_BYTES_LEN == 0, @@ -368,6 +337,59 @@ async fn fetch_lido_registry_keys( Ok(keys) } +async fn fetch_lido_csm_registry_keys( + registry_address: Address, + rpc_client: RpcClient, + node_operator_id: U256, +) -> eyre::Result> { + let provider = ProviderBuilder::new().connect_client(rpc_client); + let registry = get_lido_csm_registry(registry_address, provider); + let total_keys = fetch_lido_csm_keys_total(®istry, node_operator_id).await?; + + collect_registry_keys(total_keys, |offset, limit| { + fetch_lido_csm_keys_batch(®istry, node_operator_id, offset, limit) + }) + .await +} + +async fn fetch_lido_module_registry_keys( + registry_address: Address, + rpc_client: RpcClient, + node_operator_id: U256, +) -> eyre::Result> { + let provider = ProviderBuilder::new().connect_client(rpc_client); + let registry = get_lido_module_registry(registry_address, provider); + let total_keys: u64 = fetch_lido_module_keys_total(®istry, node_operator_id).await?; + + collect_registry_keys(total_keys, |offset, limit| { + fetch_lido_module_keys_batch(®istry, node_operator_id, offset, limit) + }) + .await +} + +async fn fetch_lido_registry_keys( + rpc_url: Url, + chain: Chain, + node_operator_id: U256, + lido_module_id: u8, + http_timeout: Duration, +) -> eyre::Result> { + debug!(?chain, %node_operator_id, ?lido_module_id, "loading operator keys from Lido registry"); + + // Create an RPC provider with HTTP timeout support + let client = Client::builder().timeout(http_timeout).build()?; + let http = Http::with_client(client, rpc_url); + let is_local = http.guess_local(); + let rpc_client = RpcClient::new(http, is_local); + let registry_address = lido_registry_address(chain, lido_module_id)?; + + if is_csm_module(chain, lido_module_id) { + fetch_lido_csm_registry_keys(registry_address, rpc_client, node_operator_id).await + } else { + fetch_lido_module_registry_keys(registry_address, rpc_client, node_operator_id).await + } +} + async fn fetch_ssv_pubkeys( mut api_url: Url, chain: Chain, @@ -421,46 +443,3 @@ async fn fetch_ssv_pubkeys( Ok(pubkeys) } - -#[cfg(test)] -mod tests { - use alloy::{primitives::U256, providers::ProviderBuilder}; - use url::Url; - - use super::*; - - #[tokio::test] - async fn test_lido_registry_address() -> eyre::Result<()> { - let url = Url::parse("https://ethereum-rpc.publicnode.com")?; - let provider = ProviderBuilder::new().connect_http(url); - - let registry = - LidoRegistry::new(address!("55032650b14df07b85bF18A3a3eC8E0Af2e028d5"), provider); - - const LIMIT: usize = 3; - let node_operator_id = U256::from(1); - - let total_keys: u64 = - registry.getTotalSigningKeyCount(node_operator_id).call().await?.try_into()?; - - assert!(total_keys > LIMIT as u64); - - let pubkeys = registry - .getSigningKeys(node_operator_id, U256::ZERO, U256::from(LIMIT)) - .call() - .await? - .pubkeys; - - let mut vec = vec![]; - for chunk in pubkeys.chunks(BLS_PUBLIC_KEY_BYTES_LEN) { - vec.push( - BlsPublicKey::deserialize(chunk) - .map_err(|_| eyre::eyre!("invalid BLS public key"))?, - ); - } - - assert_eq!(vec.len(), LIMIT); - - Ok(()) - } -} diff --git a/crates/common/src/interop/lido/mod.rs b/crates/common/src/interop/lido/mod.rs new file mode 100644 index 00000000..b4ab6a6a --- /dev/null +++ b/crates/common/src/interop/lido/mod.rs @@ -0,0 +1,2 @@ +pub mod types; +pub mod utils; diff --git a/crates/common/src/interop/lido/types.rs b/crates/common/src/interop/lido/types.rs new file mode 100644 index 00000000..48aad122 --- /dev/null +++ b/crates/common/src/interop/lido/types.rs @@ -0,0 +1,15 @@ +use alloy::sol; + +sol! { + #[allow(missing_docs)] + #[sol(rpc)] + LidoRegistry, + "src/abi/LidoNORegistry.json" +} + +sol! { + #[allow(missing_docs)] + #[sol(rpc)] + LidoCSMRegistry, + "src/abi/LidoCSModuleNORegistry.json" +} diff --git a/crates/common/src/interop/lido/utils.rs b/crates/common/src/interop/lido/utils.rs new file mode 100644 index 00000000..02ff7c42 --- /dev/null +++ b/crates/common/src/interop/lido/utils.rs @@ -0,0 +1,271 @@ +use std::collections::HashMap; + +use alloy::primitives::{Address, Bytes, U256, address}; +use eyre::Context; +use lazy_static::lazy_static; + +use crate::{ + interop::lido::types::{ + LidoCSMRegistry::{self, getNodeOperatorSummaryReturn}, + LidoRegistry, + }, + types::{Chain, HoleskyLidoModule, HoodiLidoModule, MainnetLidoModule}, +}; + +lazy_static! { + static ref LIDO_REGISTRY_ADDRESSES_BY_MODULE: HashMap> = { + let mut map: HashMap> = HashMap::new(); + + // --- Mainnet --- + let mut mainnet = HashMap::new(); + mainnet.insert( + MainnetLidoModule::Curated as u8, + address!("55032650b14df07b85bF18A3a3eC8E0Af2e028d5"), + ); + mainnet.insert( + MainnetLidoModule::SimpleDVT as u8, + address!("aE7B191A31f627b4eB1d4DaC64eaB9976995b433"), + ); + mainnet.insert( + MainnetLidoModule::CommunityStaking as u8, + address!("dA7dE2ECdDfccC6c3AF10108Db212ACBBf9EA83F"), + ); + map.insert(Chain::Mainnet, mainnet); + + // --- Holesky --- + let mut holesky = HashMap::new(); + holesky.insert( + HoleskyLidoModule::Curated as u8, + address!("595F64Ddc3856a3b5Ff4f4CC1d1fb4B46cFd2bAC"), + ); + holesky.insert( + HoleskyLidoModule::SimpleDVT as u8, + address!("11a93807078f8BB880c1BD0ee4C387537de4b4b6"), + ); + holesky.insert( + HoleskyLidoModule::Sandbox as u8, + address!("D6C2ce3BB8bea2832496Ac8b5144819719f343AC"), + ); + holesky.insert( + HoleskyLidoModule::CommunityStaking as u8, + address!("4562c3e63c2e586cD1651B958C22F88135aCAd4f"), + ); + map.insert(Chain::Holesky, holesky); + + // --- Hoodi --- + let mut hoodi = HashMap::new(); + hoodi.insert( + HoodiLidoModule::Curated as u8, + address!("5cDbE1590c083b5A2A64427fAA63A7cfDB91FbB5"), + ); + hoodi.insert( + HoodiLidoModule::SimpleDVT as u8, + address!("0B5236BECA68004DB89434462DfC3BB074d2c830"), + ); + hoodi.insert( + HoodiLidoModule::Sandbox as u8, + address!("682E94d2630846a503BDeE8b6810DF71C9806891"), + ); + hoodi.insert( + HoodiLidoModule::CommunityStaking as u8, + address!("79CEf36D84743222f37765204Bec41E92a93E59d"), + ); + map.insert(Chain::Hoodi, hoodi); + + // --- Sepolia -- + let mut sepolia = HashMap::new(); + sepolia.insert(1, address!("33d6E15047E8644F8DDf5CD05d202dfE587DA6E3")); + map.insert(Chain::Sepolia, sepolia); + + map + }; +} + +// Fetching appropiate registry address +pub fn lido_registry_address(chain: Chain, lido_module_id: u8) -> eyre::Result
{ + LIDO_REGISTRY_ADDRESSES_BY_MODULE + .get(&chain) + .ok_or_else(|| eyre::eyre!("Lido registry not supported for chain: {chain:?}"))? + .get(&lido_module_id) + .copied() + .ok_or_else(|| { + eyre::eyre!("Lido module id {:?} not found for chain: {chain:?}", lido_module_id) + }) +} + +pub fn is_csm_module(chain: Chain, module_id: u8) -> bool { + match chain { + Chain::Mainnet => module_id == MainnetLidoModule::CommunityStaking as u8, + Chain::Holesky => module_id == HoleskyLidoModule::CommunityStaking as u8, + Chain::Hoodi => module_id == HoodiLidoModule::CommunityStaking as u8, + _ => false, + } +} + +pub fn get_lido_csm_registry

( + registry_address: Address, + provider: P, +) -> LidoCSMRegistry::LidoCSMRegistryInstance

+where + P: Clone + Send + Sync + 'static + alloy::providers::Provider, +{ + LidoCSMRegistry::new(registry_address, provider) +} + +pub fn get_lido_module_registry

( + registry_address: Address, + provider: P, +) -> LidoRegistry::LidoRegistryInstance

+where + P: Clone + Send + Sync + 'static + alloy::providers::Provider, +{ + LidoRegistry::new(registry_address, provider) +} + +pub async fn fetch_lido_csm_keys_total

( + registry: &LidoCSMRegistry::LidoCSMRegistryInstance

, + node_operator_id: U256, +) -> eyre::Result +where + P: Clone + Send + Sync + 'static + alloy::providers::Provider, +{ + let summary: getNodeOperatorSummaryReturn = + registry.getNodeOperatorSummary(node_operator_id).call().await?; + + let total_u256 = summary.totalDepositedValidators + summary.depositableValidatorsCount; + + let total_u64 = u64::try_from(total_u256) + .wrap_err_with(|| format!("total keys ({total_u256}) does not fit into u64"))?; + + Ok(total_u64) +} + +pub async fn fetch_lido_module_keys_total

( + registry: &LidoRegistry::LidoRegistryInstance

, + node_operator_id: U256, +) -> eyre::Result +where + P: Clone + Send + Sync + 'static + alloy::providers::Provider, +{ + let total_keys: u64 = + registry.getTotalSigningKeyCount(node_operator_id).call().await?.try_into()?; + + Ok(total_keys) +} + +pub async fn fetch_lido_csm_keys_batch

( + registry: &LidoCSMRegistry::LidoCSMRegistryInstance

, + node_operator_id: U256, + offset: u64, + limit: u64, +) -> eyre::Result +where + P: Clone + Send + Sync + 'static + alloy::providers::Provider, +{ + let pubkeys = registry + .getSigningKeys(node_operator_id, U256::from(offset), U256::from(limit)) + .call() + .await?; + + Ok(pubkeys) +} + +pub async fn fetch_lido_module_keys_batch

( + registry: &LidoRegistry::LidoRegistryInstance

, + node_operator_id: U256, + offset: u64, + limit: u64, +) -> eyre::Result +where + P: Clone + Send + Sync + 'static + alloy::providers::Provider, +{ + let pubkeys = registry + .getSigningKeys(node_operator_id, U256::from(offset), U256::from(limit)) + .call() + .await? + .pubkeys; + + Ok(pubkeys) +} + +#[cfg(test)] +mod tests { + use alloy::{ + primitives::{U256, address}, + providers::ProviderBuilder, + rpc::types::beacon::constants::BLS_PUBLIC_KEY_BYTES_LEN, + }; + use url::Url; + + use super::*; + use crate::{interop::lido::types::LidoRegistry, types::BlsPublicKey}; + + #[tokio::test] + async fn test_lido_registry_address() -> eyre::Result<()> { + let url = Url::parse("https://ethereum-rpc.publicnode.com")?; + let provider = ProviderBuilder::new().connect_http(url); + + let registry = + LidoRegistry::new(address!("55032650b14df07b85bF18A3a3eC8E0Af2e028d5"), provider); + + const LIMIT: usize = 3; + let node_operator_id = U256::from(1); + + let total_keys: u64 = + registry.getTotalSigningKeyCount(node_operator_id).call().await?.try_into()?; + + assert!(total_keys > LIMIT as u64); + + let pubkeys = registry + .getSigningKeys(node_operator_id, U256::ZERO, U256::from(LIMIT)) + .call() + .await? + .pubkeys; + + let mut vec = vec![]; + for chunk in pubkeys.chunks(BLS_PUBLIC_KEY_BYTES_LEN) { + vec.push( + BlsPublicKey::deserialize(chunk) + .map_err(|_| eyre::eyre!("invalid BLS public key"))?, + ); + } + + assert_eq!(vec.len(), LIMIT); + + Ok(()) + } + + #[tokio::test] + async fn test_lido_csm_registry_address() -> eyre::Result<()> { + let url = Url::parse("https://ethereum-rpc.publicnode.com")?; + let provider = ProviderBuilder::new().connect_http(url); + + let registry = + LidoCSMRegistry::new(address!("dA7dE2ECdDfccC6c3AF10108Db212ACBBf9EA83F"), provider); + + const LIMIT: usize = 3; + let node_operator_id = U256::from(1); + + let summary = registry.getNodeOperatorSummary(node_operator_id).call().await?; + + let total_keys_u256 = summary.totalDepositedValidators + summary.depositableValidatorsCount; + let total_keys: u64 = total_keys_u256.try_into()?; + + assert!(total_keys > LIMIT as u64, "expected more than {LIMIT} keys, got {total_keys}"); + + let pubkeys = + registry.getSigningKeys(node_operator_id, U256::ZERO, U256::from(LIMIT)).call().await?; + + let mut vec = Vec::new(); + for chunk in pubkeys.chunks(BLS_PUBLIC_KEY_BYTES_LEN) { + vec.push( + BlsPublicKey::deserialize(chunk) + .map_err(|_| eyre::eyre!("invalid BLS public key"))?, + ); + } + + assert_eq!(vec.len(), LIMIT, "expected {LIMIT} keys, got {}", vec.len()); + + Ok(()) + } +} diff --git a/crates/common/src/interop/mod.rs b/crates/common/src/interop/mod.rs index 42502f6f..4d0230a9 100644 --- a/crates/common/src/interop/mod.rs +++ b/crates/common/src/interop/mod.rs @@ -1 +1,2 @@ +pub mod lido; pub mod ssv; diff --git a/crates/common/src/types.rs b/crates/common/src/types.rs index 6d6d55f1..89934471 100644 --- a/crates/common/src/types.rs +++ b/crates/common/src/types.rs @@ -29,7 +29,7 @@ pub struct JwtClaims { pub module: String, } -#[derive(Clone, Copy, PartialEq, Eq)] +#[derive(Clone, Copy, PartialEq, Eq, Hash)] pub enum Chain { Mainnet, Holesky, @@ -44,6 +44,26 @@ pub enum Chain { }, } +pub enum MainnetLidoModule { + Curated = 1, + SimpleDVT = 2, + CommunityStaking = 3, +} + +pub enum HoleskyLidoModule { + Curated = 1, + SimpleDVT = 2, + Sandbox = 3, + CommunityStaking = 4, +} + +pub enum HoodiLidoModule { + Curated = 1, + SimpleDVT = 2, + Sandbox = 3, + CommunityStaking = 4, +} + pub type ForkVersion = [u8; 4]; impl std::fmt::Display for Chain { diff --git a/examples/configs/pbs_mux.toml b/examples/configs/pbs_mux.toml index 3ea9f355..fcf4ea8c 100644 --- a/examples/configs/pbs_mux.toml +++ b/examples/configs/pbs_mux.toml @@ -33,7 +33,7 @@ target_first_request_ms = 200 [[mux]] id = "lido-mux" -loader = { registry = "lido", node_operator_id = 8 } +loader = { registry = "lido", node_operator_id = 8, lido_module_id = 1 } [[mux.relays]] id = "relay-3" diff --git a/tests/tests/pbs_mux_refresh.rs b/tests/tests/pbs_mux_refresh.rs index 44979fbe..da582ec7 100644 --- a/tests/tests/pbs_mux_refresh.rs +++ b/tests/tests/pbs_mux_refresh.rs @@ -72,6 +72,7 @@ async fn test_auto_refresh() -> Result<()> { let loader = MuxKeysLoader::Registry { enable_refreshing: true, node_operator_id: 1, + lido_module_id: None, registry: cb_common::config::NORegistry::SSV, }; let muxes = PbsMuxes { From aaa0967d72434120ad09ad4beec7c50223530a16 Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Tue, 11 Nov 2025 16:27:37 -0500 Subject: [PATCH 18/26] Updated the submit_block unit tests --- tests/src/mock_validator.rs | 27 ++++++++--- tests/tests/pbs_get_header.rs | 2 +- tests/tests/pbs_mux.rs | 14 +++++- tests/tests/pbs_post_blinded_blocks.rs | 67 ++++++++++++++++++++------ 4 files changed, 84 insertions(+), 26 deletions(-) diff --git a/tests/src/mock_validator.rs b/tests/src/mock_validator.rs index 1bea491d..092b97a5 100644 --- a/tests/src/mock_validator.rs +++ b/tests/src/mock_validator.rs @@ -79,7 +79,7 @@ impl MockValidator { pub async fn do_submit_block_v1( &self, signed_blinded_block_opt: Option, - accept: EncodingType, + accept: HashSet, content_type: EncodingType, fork_name: ForkName, ) -> eyre::Result { @@ -96,7 +96,7 @@ impl MockValidator { pub async fn do_submit_block_v2( &self, signed_blinded_block_opt: Option, - accept: EncodingType, + accept: HashSet, content_type: EncodingType, fork_name: ForkName, ) -> eyre::Result { @@ -113,7 +113,7 @@ impl MockValidator { async fn do_submit_block_impl( &self, signed_blinded_block_opt: Option, - accept: EncodingType, + accept: HashSet, content_type: EncodingType, fork_name: ForkName, api_version: BuilderApiVersion, @@ -127,16 +127,27 @@ impl MockValidator { EncodingType::Ssz => signed_blinded_block.as_ssz_bytes(), }; - Ok(self + let accept = match accept.len() { + 0 => None, + 1 => Some(accept.into_iter().next().unwrap().to_string()), + _ => { + let accept_strings: Vec = + accept.into_iter().map(|e| e.to_string()).collect(); + Some(accept_strings.join(", ")) + } + }; + let mut res = self .comm_boost .client .post(url) .body(body) .header(CONSENSUS_VERSION_HEADER, &fork_name.to_string()) - .header(CONTENT_TYPE, &content_type.to_string()) - .header(ACCEPT, &accept.to_string()) - .send() - .await?) + .header(CONTENT_TYPE, &content_type.to_string()); + if let Some(accept_header) = accept { + res = res.header(ACCEPT, accept_header); + } + let res = res.send().await?; + Ok(res) } } diff --git a/tests/tests/pbs_get_header.rs b/tests/tests/pbs_get_header.rs index 435d82de..8a87a3e4 100644 --- a/tests/tests/pbs_get_header.rs +++ b/tests/tests/pbs_get_header.rs @@ -105,7 +105,7 @@ async fn test_get_header_impl( tokio::spawn(start_mock_relay_service(mock_state.clone(), relay_port)); // Run the PBS service - let config = to_pbs_config(chain, get_pbs_static_config(pbs_port), vec![mock_relay.clone()]); + let config = to_pbs_config(chain, get_pbs_static_config(pbs_port), vec![mock_relay]); let state = PbsState::new(config); tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); diff --git a/tests/tests/pbs_mux.rs b/tests/tests/pbs_mux.rs index ad1d4c05..6b5afe44 100644 --- a/tests/tests/pbs_mux.rs +++ b/tests/tests/pbs_mux.rs @@ -230,7 +230,12 @@ async fn test_mux() -> Result<()> { info!("Sending submit block v1"); assert_eq!( mock_validator - .do_submit_block_v1(None, EncodingType::Json, EncodingType::Json, ForkName::Electra) + .do_submit_block_v1( + None, + HashSet::from([EncodingType::Json]), + EncodingType::Json, + ForkName::Electra + ) .await? .status(), StatusCode::OK @@ -241,7 +246,12 @@ async fn test_mux() -> Result<()> { info!("Sending submit block v2"); assert_eq!( mock_validator - .do_submit_block_v2(None, EncodingType::Json, EncodingType::Json, ForkName::Electra) + .do_submit_block_v2( + None, + HashSet::from([EncodingType::Json]), + EncodingType::Json, + ForkName::Electra + ) .await? .status(), StatusCode::ACCEPTED diff --git a/tests/tests/pbs_post_blinded_blocks.rs b/tests/tests/pbs_post_blinded_blocks.rs index 79725b3e..a4e533be 100644 --- a/tests/tests/pbs_post_blinded_blocks.rs +++ b/tests/tests/pbs_post_blinded_blocks.rs @@ -1,4 +1,4 @@ -use std::{sync::Arc, time::Duration}; +use std::{collections::HashSet, sync::Arc, time::Duration}; use cb_common::{ pbs::{BuilderApiVersion, GetPayloadInfo, PayloadAndBlobs, SubmitBlindedBlockResponse}, @@ -19,7 +19,14 @@ use tracing::info; #[tokio::test] async fn test_submit_block_v1() -> Result<()> { - let res = submit_block_impl(3800, BuilderApiVersion::V1, EncodingType::Json).await?; + let res = submit_block_impl( + 3800, + BuilderApiVersion::V1, + HashSet::from([EncodingType::Json]), + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + EncodingType::Json, + ) + .await?; assert_eq!(res.status(), StatusCode::OK); let signed_blinded_block = load_test_signed_blinded_block(); @@ -34,7 +41,14 @@ async fn test_submit_block_v1() -> Result<()> { #[tokio::test] async fn test_submit_block_v2() -> Result<()> { - let res = submit_block_impl(3850, BuilderApiVersion::V2, EncodingType::Json).await?; + let res = submit_block_impl( + 3850, + BuilderApiVersion::V2, + HashSet::from([EncodingType::Json]), + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + EncodingType::Json, + ) + .await?; assert_eq!(res.status(), StatusCode::ACCEPTED); assert_eq!(res.bytes().await?.len(), 0); Ok(()) @@ -42,7 +56,14 @@ async fn test_submit_block_v2() -> Result<()> { #[tokio::test] async fn test_submit_block_v1_ssz() -> Result<()> { - let res = submit_block_impl(3810, BuilderApiVersion::V1, EncodingType::Ssz).await?; + let res = submit_block_impl( + 3810, + BuilderApiVersion::V1, + HashSet::from([EncodingType::Ssz]), + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + EncodingType::Ssz, + ) + .await?; assert_eq!(res.status(), StatusCode::OK); let signed_blinded_block = load_test_signed_blinded_block(); @@ -58,7 +79,14 @@ async fn test_submit_block_v1_ssz() -> Result<()> { #[tokio::test] async fn test_submit_block_v2_ssz() -> Result<()> { - let res = submit_block_impl(3860, BuilderApiVersion::V2, EncodingType::Ssz).await?; + let res = submit_block_impl( + 3860, + BuilderApiVersion::V2, + HashSet::from([EncodingType::Ssz]), + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + EncodingType::Ssz, + ) + .await?; assert_eq!(res.status(), StatusCode::ACCEPTED); assert_eq!(res.bytes().await?.len(), 0); Ok(()) @@ -87,7 +115,12 @@ async fn test_submit_block_too_large() -> Result<()> { let mock_validator = MockValidator::new(pbs_port)?; info!("Sending submit block"); let res = mock_validator - .do_submit_block_v1(None, EncodingType::Json, EncodingType::Json, ForkName::Electra) + .do_submit_block_v1( + None, + HashSet::from([EncodingType::Json]), + EncodingType::Json, + ForkName::Electra, + ) .await; // response size exceeds max size: max: 20971520 @@ -99,29 +132,33 @@ async fn test_submit_block_too_large() -> Result<()> { async fn submit_block_impl( pbs_port: u16, api_version: BuilderApiVersion, + accept_types: HashSet, + relay_types: HashSet, serialization_mode: EncodingType, ) -> Result { - let accept = serialization_mode; - + // Setup test environment setup_test_env(); let signer = random_secret(); let pubkey = signer.public_key(); - let chain = Chain::Holesky; + let relay_port = pbs_port + 1; // Run a mock relay - let relays = vec![generate_mock_relay(pbs_port + 1, pubkey)?]; - let mock_state = Arc::new(MockRelayState::new(chain, signer)); - tokio::spawn(start_mock_relay_service(mock_state.clone(), pbs_port + 1)); + let mut mock_state = MockRelayState::new(chain, signer); + mock_state.supported_content_types = Arc::new(relay_types); + let mock_state = Arc::new(mock_state); + let mock_relay = generate_mock_relay(relay_port, pubkey)?; + tokio::spawn(start_mock_relay_service(mock_state.clone(), relay_port)); // Run the PBS service - let config = to_pbs_config(chain, get_pbs_static_config(pbs_port), relays); + let config = to_pbs_config(chain, get_pbs_static_config(pbs_port), vec![mock_relay]); let state = PbsState::new(config); tokio::spawn(PbsService::run::<(), DefaultBuilderApi>(state)); // leave some time to start servers tokio::time::sleep(Duration::from_millis(100)).await; + // Send the submit block request let signed_blinded_block = load_test_signed_blinded_block(); let mock_validator = MockValidator::new(pbs_port)?; info!("Sending submit block"); @@ -130,7 +167,7 @@ async fn submit_block_impl( mock_validator .do_submit_block_v1( Some(signed_blinded_block), - accept, + accept_types, serialization_mode, ForkName::Electra, ) @@ -140,7 +177,7 @@ async fn submit_block_impl( mock_validator .do_submit_block_v2( Some(signed_blinded_block), - accept, + accept_types, serialization_mode, ForkName::Electra, ) From 83ca8f8a21ede19002d9ce124ed86821c7b7d5f9 Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Tue, 11 Nov 2025 20:37:28 -0500 Subject: [PATCH 19/26] Added more multitype tests to submit_block, not done yet though --- crates/common/src/utils.rs | 4 +- crates/pbs/src/mev_boost/submit_block.rs | 49 ++++++---- tests/src/mock_relay.rs | 13 ++- tests/tests/pbs_post_blinded_blocks.rs | 108 ++++++++++++++++++++++- 4 files changed, 148 insertions(+), 26 deletions(-) diff --git a/crates/common/src/utils.rs b/crates/common/src/utils.rs index a680fa76..ddc93e1b 100644 --- a/crates/common/src/utils.rs +++ b/crates/common/src/utils.rs @@ -458,8 +458,8 @@ pub fn get_accept_types(req_headers: &HeaderMap) -> eyre::Result( send_headers.insert(USER_AGENT, get_user_agent_with_version(&req_headers)?); send_headers.insert(HEADER_CONSENSUS_VERSION, consensus_version); + // Get the accept types from the request and forward them + for value in req_headers.get_all(ACCEPT).iter() { + send_headers.append(ACCEPT, value.clone()); + } + + // Copy the content type header + send_headers.insert( + CONTENT_TYPE, + HeaderValue::from_str(get_content_type(&req_headers).content_type()).unwrap(), + ); + let mut handles = Vec::with_capacity(state.all_relays().len()); for relay in state.all_relays().iter().cloned() { handles.push( @@ -180,15 +191,17 @@ async fn send_submit_block( // If the request only supports SSZ, but the relay only supports JSON, resubmit // to the relay with JSON - we'll convert it ourselves if code == StatusCode::NOT_ACCEPTABLE && accepts_ssz && !accepts_json { + // TODO: needs to handle the case where the content-type is wrong too debug!( relay_id = relay.id.as_ref(), - "relay does not support SSZ, resubmitting request with JSON accept header" + "relay does not support SSZ, resubmitting request with JSON accept and content-type" ); - // Resubmit the request with JSON accept header + // Resubmit the request with JSON accept and content-type headers let elapsed = start_request.elapsed().as_millis() as u64; - original_headers - .insert(ACCEPT, HeaderValue::from_str(EncodingType::Json.content_type()).unwrap()); + let json_header_value = HeaderValue::from_str(EncodingType::Json.content_type()).unwrap(); + original_headers.insert(ACCEPT, json_header_value.clone()); + original_headers.insert(CONTENT_TYPE, json_header_value); start_request = Instant::now(); (res, content_type) = send_submit_block_impl( url, @@ -321,16 +334,16 @@ async fn send_submit_block_impl( headers: HeaderMap, timeout_ms: u64, ) -> Result<(Response, Option), PbsError> { + // Get the content type of the request + let content_type = get_content_type(&headers); + // Send the request - let res = match relay - .client - .post(url) - .timeout(Duration::from_millis(timeout_ms)) - .headers(headers) - .json(&signed_blinded_block) - .send() - .await - { + let res = relay.client.post(url).timeout(Duration::from_millis(timeout_ms)).headers(headers); + let body = match content_type { + EncodingType::Json => serde_json::to_vec(&signed_blinded_block).unwrap(), + EncodingType::Ssz => signed_blinded_block.as_ssz_bytes(), + }; + let res = match res.body(body).header(CONTENT_TYPE, &content_type.to_string()).send().await { Ok(res) => res, Err(err) => { RELAY_STATUS_CODE diff --git a/tests/src/mock_relay.rs b/tests/src/mock_relay.rs index 1da110f7..7a55ca26 100644 --- a/tests/src/mock_relay.rs +++ b/tests/src/mock_relay.rs @@ -27,7 +27,8 @@ use cb_common::{ types::{BlsSecretKey, Chain}, utils::{ CONSENSUS_VERSION_HEADER, EncodingType, RawRequest, TestRandomSeed, deserialize_body, - get_accept_types, get_consensus_version_header, timestamp_of_slot_start_sec, + get_accept_types, get_consensus_version_header, get_content_type, + timestamp_of_slot_start_sec, }, }; use cb_pbs::MAX_SIZE_SUBMIT_BLOCK_RESPONSE; @@ -285,7 +286,15 @@ async fn handle_submit_block_v1( response } -async fn handle_submit_block_v2(State(state): State>) -> Response { +async fn handle_submit_block_v2( + headers: HeaderMap, + State(state): State>, +) -> Response { state.received_submit_block.fetch_add(1, Ordering::Relaxed); + let content_type = get_content_type(&headers); + if !state.supported_content_types.contains(&content_type) { + return (StatusCode::NOT_ACCEPTABLE, "No acceptable content type found".to_string()) + .into_response(); + }; (StatusCode::ACCEPTED, "").into_response() } diff --git a/tests/tests/pbs_post_blinded_blocks.rs b/tests/tests/pbs_post_blinded_blocks.rs index a4e533be..8adcd282 100644 --- a/tests/tests/pbs_post_blinded_blocks.rs +++ b/tests/tests/pbs_post_blinded_blocks.rs @@ -25,6 +25,7 @@ async fn test_submit_block_v1() -> Result<()> { HashSet::from([EncodingType::Json]), HashSet::from([EncodingType::Ssz, EncodingType::Json]), EncodingType::Json, + 1, ) .await?; assert_eq!(res.status(), StatusCode::OK); @@ -42,11 +43,12 @@ async fn test_submit_block_v1() -> Result<()> { #[tokio::test] async fn test_submit_block_v2() -> Result<()> { let res = submit_block_impl( - 3850, + 3810, BuilderApiVersion::V2, HashSet::from([EncodingType::Json]), HashSet::from([EncodingType::Ssz, EncodingType::Json]), EncodingType::Json, + 1, ) .await?; assert_eq!(res.status(), StatusCode::ACCEPTED); @@ -57,11 +59,12 @@ async fn test_submit_block_v2() -> Result<()> { #[tokio::test] async fn test_submit_block_v1_ssz() -> Result<()> { let res = submit_block_impl( - 3810, + 3820, BuilderApiVersion::V1, HashSet::from([EncodingType::Ssz]), HashSet::from([EncodingType::Ssz, EncodingType::Json]), EncodingType::Ssz, + 1, ) .await?; assert_eq!(res.status(), StatusCode::OK); @@ -80,11 +83,12 @@ async fn test_submit_block_v1_ssz() -> Result<()> { #[tokio::test] async fn test_submit_block_v2_ssz() -> Result<()> { let res = submit_block_impl( - 3860, + 3830, BuilderApiVersion::V2, HashSet::from([EncodingType::Ssz]), HashSet::from([EncodingType::Ssz, EncodingType::Json]), EncodingType::Ssz, + 1, ) .await?; assert_eq!(res.status(), StatusCode::ACCEPTED); @@ -92,6 +96,101 @@ async fn test_submit_block_v2_ssz() -> Result<()> { Ok(()) } +/// Test that a v1 submit block request in SSZ is converted to JSON if the relay +/// only supports JSON +#[tokio::test] +async fn test_submit_block_v1_ssz_into_json() -> Result<()> { + let res = submit_block_impl( + 3840, + BuilderApiVersion::V1, + HashSet::from([EncodingType::Ssz]), + HashSet::from([EncodingType::Json]), + EncodingType::Ssz, + 2, + ) + .await?; + assert_eq!(res.status(), StatusCode::OK); + + let signed_blinded_block = load_test_signed_blinded_block(); + + let response_body = + PayloadAndBlobs::from_ssz_bytes_by_fork(&res.bytes().await?, ForkName::Electra).unwrap(); + assert_eq!( + response_body.execution_payload.block_hash(), + signed_blinded_block.block_hash().into() + ); + Ok(()) +} + +/// Test that a v2 submit block request in SSZ is converted to JSON if the relay +/// only supports JSON +#[tokio::test] +async fn test_submit_block_v2_ssz_into_json() -> Result<()> { + let res = submit_block_impl( + 3850, + BuilderApiVersion::V2, + HashSet::from([EncodingType::Ssz]), + HashSet::from([EncodingType::Json]), + EncodingType::Ssz, + 2, + ) + .await?; + assert_eq!(res.status(), StatusCode::ACCEPTED); + assert_eq!(res.bytes().await?.len(), 0); + Ok(()) +} + +/// Test v1 requesting multiple types when the relay supports SSZ, which should +/// return SSZ +#[tokio::test] +async fn test_submit_block_v1_multitype_ssz() -> Result<()> { + let res = submit_block_impl( + 3860, + BuilderApiVersion::V1, + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + HashSet::from([EncodingType::Ssz]), + EncodingType::Ssz, + 1, + ) + .await?; + assert_eq!(res.status(), StatusCode::OK); + + let signed_blinded_block = load_test_signed_blinded_block(); + + let response_body = + PayloadAndBlobs::from_ssz_bytes_by_fork(&res.bytes().await?, ForkName::Electra).unwrap(); + assert_eq!( + response_body.execution_payload.block_hash(), + signed_blinded_block.block_hash().into() + ); + Ok(()) +} + +/// Test v1 requesting multiple types when the relay supports SSZ, which should +/// return JSON +#[tokio::test] +async fn test_submit_block_v1_multitype_json() -> Result<()> { + let res = submit_block_impl( + 3870, + BuilderApiVersion::V1, + HashSet::from([EncodingType::Ssz, EncodingType::Json]), + HashSet::from([EncodingType::Json]), + EncodingType::Json, + 1, + ) + .await?; + assert_eq!(res.status(), StatusCode::OK); + + let signed_blinded_block = load_test_signed_blinded_block(); + + let response_body = serde_json::from_slice::(&res.bytes().await?)?; + assert_eq!( + response_body.data.execution_payload.block_hash(), + signed_blinded_block.block_hash().into() + ); + Ok(()) +} + #[tokio::test] async fn test_submit_block_too_large() -> Result<()> { setup_test_env(); @@ -135,6 +234,7 @@ async fn submit_block_impl( accept_types: HashSet, relay_types: HashSet, serialization_mode: EncodingType, + expected_try_count: u64, ) -> Result { // Setup test environment setup_test_env(); @@ -184,6 +284,6 @@ async fn submit_block_impl( .await? } }; - assert_eq!(mock_state.received_submit_block(), 1); + assert_eq!(mock_state.received_submit_block(), expected_try_count); Ok(res) } From d36ef3b23355be6d4cbb3334eee4fbb740491fe2 Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Mon, 17 Nov 2025 10:07:18 -0500 Subject: [PATCH 20/26] Switched relay response handling to switch to JSON on any 4xx --- crates/pbs/src/mev_boost/get_header.rs | 2 +- crates/pbs/src/mev_boost/submit_block.rs | 3 +-- tests/tests/pbs_get_header.rs | 2 +- tests/tests/pbs_post_blinded_blocks.rs | 11 ++++++----- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/crates/pbs/src/mev_boost/get_header.rs b/crates/pbs/src/mev_boost/get_header.rs index a722654f..e7992c31 100644 --- a/crates/pbs/src/mev_boost/get_header.rs +++ b/crates/pbs/src/mev_boost/get_header.rs @@ -339,7 +339,7 @@ async fn send_one_get_header( // If the request only supports SSZ, but the relay only supports JSON, resubmit // to the relay with JSON - we'll convert it ourselves - if code == StatusCode::NOT_ACCEPTABLE && accepts_ssz && !accepts_json { + if code.is_client_error() && accepts_ssz && !accepts_json { debug!( relay_id = relay.id.as_ref(), "relay does not support SSZ, resubmitting request with JSON accept header" diff --git a/crates/pbs/src/mev_boost/submit_block.rs b/crates/pbs/src/mev_boost/submit_block.rs index 571ff0a7..a4666949 100644 --- a/crates/pbs/src/mev_boost/submit_block.rs +++ b/crates/pbs/src/mev_boost/submit_block.rs @@ -190,8 +190,7 @@ async fn send_submit_block( // If the request only supports SSZ, but the relay only supports JSON, resubmit // to the relay with JSON - we'll convert it ourselves - if code == StatusCode::NOT_ACCEPTABLE && accepts_ssz && !accepts_json { - // TODO: needs to handle the case where the content-type is wrong too + if code.is_client_error() && accepts_ssz && !accepts_json { debug!( relay_id = relay.id.as_ref(), "relay does not support SSZ, resubmitting request with JSON accept and content-type" diff --git a/tests/tests/pbs_get_header.rs b/tests/tests/pbs_get_header.rs index 8a87a3e4..67fa5f30 100644 --- a/tests/tests/pbs_get_header.rs +++ b/tests/tests/pbs_get_header.rs @@ -71,7 +71,7 @@ async fn test_get_header_multitype_ssz() -> Result<()> { } /// Test requesting multiple types when the relay supports JSON, which should -/// return JSON +/// still work #[tokio::test] async fn test_get_header_multitype_json() -> Result<()> { test_get_header_impl( diff --git a/tests/tests/pbs_post_blinded_blocks.rs b/tests/tests/pbs_post_blinded_blocks.rs index 8adcd282..c6201e52 100644 --- a/tests/tests/pbs_post_blinded_blocks.rs +++ b/tests/tests/pbs_post_blinded_blocks.rs @@ -166,8 +166,8 @@ async fn test_submit_block_v1_multitype_ssz() -> Result<()> { Ok(()) } -/// Test v1 requesting multiple types when the relay supports SSZ, which should -/// return JSON +/// Test v1 requesting multiple types when the relay supports JSON, which should +/// still return SSZ #[tokio::test] async fn test_submit_block_v1_multitype_json() -> Result<()> { let res = submit_block_impl( @@ -175,7 +175,7 @@ async fn test_submit_block_v1_multitype_json() -> Result<()> { BuilderApiVersion::V1, HashSet::from([EncodingType::Ssz, EncodingType::Json]), HashSet::from([EncodingType::Json]), - EncodingType::Json, + EncodingType::Ssz, 1, ) .await?; @@ -183,9 +183,10 @@ async fn test_submit_block_v1_multitype_json() -> Result<()> { let signed_blinded_block = load_test_signed_blinded_block(); - let response_body = serde_json::from_slice::(&res.bytes().await?)?; + let response_body = + PayloadAndBlobs::from_ssz_bytes_by_fork(&res.bytes().await?, ForkName::Electra).unwrap(); assert_eq!( - response_body.data.execution_payload.block_hash(), + response_body.execution_payload.block_hash(), signed_blinded_block.block_hash().into() ); Ok(()) From 1c24fb1fb9032664bbf70ea6f99818fd071b8fe4 Mon Sep 17 00:00:00 2001 From: ltitanb <163874448+ltitanb@users.noreply.github.com> Date: Wed, 26 Nov 2025 14:53:06 +0000 Subject: [PATCH 21/26] clarify timing games config (#411) --- config.example.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/config.example.toml b/config.example.toml index 6ea6a1b6..f15b2262 100644 --- a/config.example.toml +++ b/config.example.toml @@ -89,6 +89,8 @@ headers = { X-MyCustomHeader = "MyCustomValue" } # OPTIONAL get_params = { param1 = "value1", param2 = "value2" } # Whether to enable timing games, as tuned by `target_first_request_ms` and `frequency_get_header_ms`. +# NOTE: if neither `target_first_request_ms` nor `frequency_get_header_ms` is set, this flag has no effect. +# # These values should be carefully chosen for each relay, as each relay has different latency and timing games setups. # They should only be used by advanced users, and if mis-configured can result in unforeseen effects, e.g. fetching a lower header value, # or getting a temporary IP ban. From e089e7dd835513f6d93498b85b798ee433a6f433 Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Mon, 1 Dec 2025 09:49:02 -0500 Subject: [PATCH 22/26] Allow builds from other branches (#414) --- .github/workflows/release.yml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index b934fc6e..be779b85 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -37,7 +37,6 @@ jobs: - name: Checkout code uses: actions/checkout@v4 with: - ref: "stable" fetch-depth: 0 submodules: true @@ -108,7 +107,6 @@ jobs: - name: Checkout code uses: actions/checkout@v4 with: - ref: "stable" fetch-depth: 0 submodules: true @@ -168,7 +166,6 @@ jobs: - name: Checkout code uses: actions/checkout@v4 with: - ref: "stable" fetch-depth: 0 submodules: true @@ -221,7 +218,6 @@ jobs: - name: Checkout code uses: actions/checkout@v4 with: - ref: "stable" fetch-depth: 0 submodules: true From ee25871811e8bf541915ea00cfc1767a6e5476ab Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Mon, 8 Dec 2025 07:57:34 -0500 Subject: [PATCH 23/26] Add --version support (#408) --- Cargo.toml | 2 +- bin/cli.rs | 12 ++++++++++++ bin/pbs.rs | 12 ++++++++++++ bin/signer.rs | 12 ++++++++++++ 4 files changed, 37 insertions(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 23679360..53b21d12 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -33,7 +33,7 @@ cb-metrics = { path = "crates/metrics" } cb-pbs = { path = "crates/pbs" } cb-signer = { path = "crates/signer" } cipher = "0.4" -clap = { version = "4.5.4", features = ["derive", "env"] } +clap = { version = "4.5.48", features = ["derive", "env"] } color-eyre = "0.6.3" ctr = "0.9.2" derive_more = { version = "2.0.1", features = ["deref", "display", "from", "into"] } diff --git a/bin/cli.rs b/bin/cli.rs index d3fa736c..234dc9bd 100644 --- a/bin/cli.rs +++ b/bin/cli.rs @@ -1,8 +1,20 @@ use clap::Parser; +/// Version string with a leading 'v' +const VERSION: &str = concat!("v", env!("CARGO_PKG_VERSION")); + +/// Subcommands and global arguments for the module +#[derive(Parser, Debug)] +#[command(name = "Commit-Boost CLI", version = VERSION, about, long_about = None)] +struct Cli {} + /// Main entry point of the Commit-Boost CLI #[tokio::main] async fn main() -> eyre::Result<()> { + // Parse the CLI arguments (currently only used for version info, more can be + // added later) + let _cli = Cli::parse(); + color_eyre::install()?; // set default backtrace unless provided diff --git a/bin/pbs.rs b/bin/pbs.rs index 69945fe8..0b7c3f72 100644 --- a/bin/pbs.rs +++ b/bin/pbs.rs @@ -7,8 +7,20 @@ use clap::Parser; use eyre::Result; use tracing::{error, info}; +/// Version string with a leading 'v' +const VERSION: &str = concat!("v", env!("CARGO_PKG_VERSION")); + +/// Subcommands and global arguments for the module +#[derive(Parser, Debug)] +#[command(name = "Commit-Boost PBS Service", version = VERSION, about, long_about = None)] +struct Cli {} + #[tokio::main] async fn main() -> Result<()> { + // Parse the CLI arguments (currently only used for version info, more can be + // added later) + let _cli = Cli::parse(); + color_eyre::install()?; let _guard = initialize_tracing_log(PBS_MODULE_NAME, LogsSettings::from_env_config()?); diff --git a/bin/signer.rs b/bin/signer.rs index 2d9a60ad..01f3c970 100644 --- a/bin/signer.rs +++ b/bin/signer.rs @@ -7,8 +7,20 @@ use clap::Parser; use eyre::Result; use tracing::{error, info}; +/// Version string with a leading 'v' +const VERSION: &str = concat!("v", env!("CARGO_PKG_VERSION")); + +/// Subcommands and global arguments for the module +#[derive(Parser, Debug)] +#[command(name = "Commit-Boost Signer Service", version = VERSION, about, long_about = None)] +struct Cli {} + #[tokio::main] async fn main() -> Result<()> { + // Parse the CLI arguments (currently only used for version info, more can be + // added later) + let _cli = Cli::parse(); + color_eyre::install()?; let _guard = initialize_tracing_log(SIGNER_MODULE_NAME, LogsSettings::from_env_config()?); From d2b8226eb6e7d608eb6b06f24b452142c6932e43 Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Wed, 10 Dec 2025 10:11:12 -0500 Subject: [PATCH 24/26] Optimized get_header --- crates/common/src/pbs/types/mod.rs | 11 ++ crates/pbs/src/mev_boost/get_header.rs | 211 +++++++++++-------------- crates/pbs/src/routes/get_header.rs | 5 +- tests/tests/pbs_get_header.rs | 7 +- 4 files changed, 112 insertions(+), 122 deletions(-) diff --git a/crates/common/src/pbs/types/mod.rs b/crates/common/src/pbs/types/mod.rs index 8ad87c08..a10cfe2a 100644 --- a/crates/common/src/pbs/types/mod.rs +++ b/crates/common/src/pbs/types/mod.rs @@ -54,6 +54,17 @@ pub struct GetHeaderParams { pub pubkey: BlsPublicKey, } +/// Which encoding types the original requester accepts in the response. +/// As the builder spec adds more encoding types, this struct can be expanded. +#[derive(Clone)] +pub struct AcceptTypes { + /// Whether SSZ encoding is accepted + pub ssz: bool, + + /// Whether JSON encoding is accepted + pub json: bool, +} + pub trait GetHeaderInfo { fn block_hash(&self) -> B256; fn value(&self) -> &U256; diff --git a/crates/pbs/src/mev_boost/get_header.rs b/crates/pbs/src/mev_boost/get_header.rs index e7992c31..7d51619c 100644 --- a/crates/pbs/src/mev_boost/get_header.rs +++ b/crates/pbs/src/mev_boost/get_header.rs @@ -20,8 +20,8 @@ use cb_common::{ signature::verify_signed_message, types::{BlsPublicKey, BlsPublicKeyBytes, BlsSignature, Chain}, utils::{ - EncodingType, get_accept_types, get_consensus_version_header, get_user_agent_with_version, - ms_into_slot, read_chunked_body_with_max, timestamp_of_slot_start_sec, utcnow_ms, + EncodingType, get_consensus_version_header, get_user_agent_with_version, ms_into_slot, + read_chunked_body_with_max, timestamp_of_slot_start_sec, utcnow_ms, }, }; use futures::future::join_all; @@ -45,6 +45,40 @@ use crate::{ utils::check_gas_limit, }; +/// Info about an incoming get_header request. +/// Sent from get_header to each send_timed_get_header call. +#[derive(Clone)] +struct RequestInfo { + /// The blockchain parameters of the get_header request (what slot it's for, + /// which pubkey is requesting it, etc) + params: GetHeaderParams, + + /// Common baseline of headers to send with each request + headers: Arc, + + /// The chain the request is for + chain: Chain, + + /// Context for validating the header returned by the relay + validation: ValidationContext, +} + +// Context for validating the header +#[derive(Clone)] +struct ValidationContext { + // Whether to skip signature verification + skip_sigverify: bool, + + // Minimum acceptable bid, in wei + min_bid_wei: U256, + + // Whether extra validation of the parent block is enabled + extra_validation_enabled: bool, + + // The parent block, if fetched + parent_block: Arc>>, +} + /// Implements https://ethereum.github.io/builder-specs/#/Builder/getHeader /// Returns 200 if at least one relay returns 200, else 204 pub async fn get_header( @@ -101,27 +135,34 @@ pub async fn get_header( let mut send_headers = HeaderMap::new(); send_headers.insert(USER_AGENT, get_user_agent_with_version(&req_headers)?); - // Get the accept types from the request and forward them - for value in req_headers.get_all(ACCEPT).iter() { - send_headers.append(ACCEPT, value.clone()); - } + // Create the Accept headers for requests since the module handles both SSZ and + // JSON + let accept_types = + [EncodingType::Ssz.content_type(), EncodingType::Json.content_type()].join(","); + send_headers.insert(ACCEPT, HeaderValue::from_str(&accept_types).unwrap()); + + // Send requests to all relays concurrently + let slot = params.slot as i64; + let request_info = Arc::new(RequestInfo { + params, + headers: Arc::new(send_headers), + chain: state.config.chain, + validation: ValidationContext { + skip_sigverify: state.pbs_config().skip_sigverify, + min_bid_wei: state.pbs_config().min_bid_wei, + extra_validation_enabled: state.extra_validation_enabled(), + parent_block, + }, + }); let mut handles = Vec::with_capacity(relays.len()); for relay in relays.iter() { handles.push( send_timed_get_header( - params.clone(), + request_info.clone(), relay.clone(), - state.config.chain, - send_headers.clone(), ms_into_slot, max_timeout_ms, - ValidationContext { - skip_sigverify: state.pbs_config().skip_sigverify, - min_bid_wei: state.pbs_config().min_bid_wei, - extra_validation_enabled: state.extra_validation_enabled(), - parent_block: parent_block.clone(), - }, ) .in_current_span(), ); @@ -134,7 +175,7 @@ pub async fn get_header( match res { Ok(Some(res)) => { - RELAY_LAST_SLOT.with_label_values(&[relay_id]).set(params.slot as i64); + RELAY_LAST_SLOT.with_label_values(&[relay_id]).set(slot); let value_gwei = (res.data.message.value() / U256::from(1_000_000_000)) .try_into() .unwrap_or_default(); @@ -179,15 +220,13 @@ async fn fetch_parent_block( } async fn send_timed_get_header( - params: GetHeaderParams, + request_info: Arc, relay: RelayClient, - chain: Chain, - headers: HeaderMap, ms_into_slot: u64, mut timeout_left_ms: u64, - validation: ValidationContext, ) -> Result, PbsError> { - let url = relay.get_header_url(params.slot, ¶ms.parent_hash, ¶ms.pubkey)?; + let params = &request_info.params; + let url = Arc::new(relay.get_header_url(params.slot, ¶ms.parent_hash, ¶ms.pubkey)?); if relay.config.enable_timing_games { if let Some(target_ms) = relay.config.target_first_request_ms { @@ -218,18 +257,12 @@ async fn send_timed_get_header( ); loop { - let params = params.clone(); handles.push(tokio::spawn( send_one_get_header( - params, + request_info.clone(), relay.clone(), - chain, - RequestContext { - timeout_ms: timeout_left_ms, - url: url.clone(), - headers: headers.clone(), - }, - validation.clone(), + url.clone(), + timeout_left_ms, ) .in_current_span(), )); @@ -285,92 +318,30 @@ async fn send_timed_get_header( } // if no timing games or no repeated send, just send one request - send_one_get_header( - params, - relay, - chain, - RequestContext { timeout_ms: timeout_left_ms, url, headers }, - validation, - ) - .await - .map(|(_, maybe_header)| maybe_header) -} - -struct RequestContext { - url: Url, - timeout_ms: u64, - headers: HeaderMap, -} - -#[derive(Clone)] -struct ValidationContext { - skip_sigverify: bool, - min_bid_wei: U256, - extra_validation_enabled: bool, - parent_block: Arc>>, + send_one_get_header(request_info, relay, url, timeout_left_ms) + .await + .map(|(_, maybe_header)| maybe_header) } +/// Handles requesting a header from a relay, decoding, and validation. +/// Used by send_timed_get_header to handle each individual request. async fn send_one_get_header( - params: GetHeaderParams, + request_info: Arc, relay: RelayClient, - chain: Chain, - req_config: RequestContext, - validation: ValidationContext, + url: Arc, + timeout_left_ms: u64, ) -> Result<(u64, Option), PbsError> { - let mut original_headers = req_config.headers.clone(); - - // Check which types this request is for - let accept_types = get_accept_types(&req_config.headers).map_err(|e| { - PbsError::GeneralRequest(format!("error reading accept types: {e}").to_string()) - })?; - let accepts_ssz = accept_types.contains(&EncodingType::Ssz); - let accepts_json = accept_types.contains(&EncodingType::Json); - // Send the header request - let mut start_request = Instant::now(); - let config = RequestContext { - url: req_config.url.clone(), - timeout_ms: req_config.timeout_ms, - headers: req_config.headers, - }; - let (mut res, mut start_request_time, mut content_type) = - send_get_header_impl(&relay, config).await?; - let mut code = res.status(); - - // If the request only supports SSZ, but the relay only supports JSON, resubmit - // to the relay with JSON - we'll convert it ourselves - if code.is_client_error() && accepts_ssz && !accepts_json { - debug!( - relay_id = relay.id.as_ref(), - "relay does not support SSZ, resubmitting request with JSON accept header" - ); - - // Make sure there's enough time left to resubmit - let elapsed = start_request.elapsed().as_millis() as u64; - if elapsed >= req_config.timeout_ms { - RELAY_STATUS_CODE - .with_label_values(&[TIMEOUT_ERROR_CODE_STR, GET_HEADER_ENDPOINT_TAG, &relay.id]) - .inc(); - return Err(PbsError::RelayResponse { - error_msg: "not enough time left to resubmit request with JSON accept header" - .to_string(), - code: TIMEOUT_ERROR_CODE, - }); - } - - // Resubmit the request with JSON accept header - // Also resets the start request timer - original_headers - .insert(ACCEPT, HeaderValue::from_str(EncodingType::Json.content_type()).unwrap()); - let config = RequestContext { - url: req_config.url.clone(), - timeout_ms: req_config.timeout_ms - elapsed, - headers: original_headers, - }; - start_request = Instant::now(); - (res, start_request_time, content_type) = send_get_header_impl(&relay, config).await?; - code = res.status(); - } + let start_request = Instant::now(); + let (res, start_request_time, content_type) = send_get_header_impl( + &relay, + url, + timeout_left_ms, + (*request_info.headers).clone(), /* Create a copy of the HeaderMap because the impl will + * modify it */ + ) + .await?; + let code = res.status(); // Get the consensus fork version if provided (to avoid cloning later) let fork = get_consensus_version_header(res.headers()); @@ -450,6 +421,9 @@ async fn send_one_get_header( "received new header" ); + let chain = request_info.chain; + let params = &request_info.params; + let validation = &request_info.validation; match &get_header_response.data.message.header() { ExecutionPayloadHeaderRef::Bellatrix(_) | ExecutionPayloadHeaderRef::Capella(_) | @@ -528,25 +502,30 @@ async fn send_one_get_header( Ok((start_request_time, Some(get_header_response))) } +/// Sends a get_header request to a relay, returning the response, the time the +/// request was started, and the encoding type of the response (if any). +/// Used by send_one_get_header to perform the actual request submission. async fn send_get_header_impl( relay: &RelayClient, - mut req_config: RequestContext, + url: Arc, + timeout_left_ms: u64, + mut headers: HeaderMap, ) -> Result<(Response, u64, Option), PbsError> { // the timestamp in the header is the consensus block time which is fixed, // use the beginning of the request as proxy to make sure we use only the // last one received let start_request_time = utcnow_ms(); - req_config.headers.insert(HEADER_START_TIME_UNIX_MS, HeaderValue::from(start_request_time)); + headers.insert(HEADER_START_TIME_UNIX_MS, HeaderValue::from(start_request_time)); // The timeout header indicating how long a relay has to respond, so they can // minimize timing games without losing the bid - req_config.headers.insert(HEADER_TIMEOUT_MS, HeaderValue::from(req_config.timeout_ms)); + headers.insert(HEADER_TIMEOUT_MS, HeaderValue::from(timeout_left_ms)); let res = match relay .client - .get(req_config.url) - .timeout(Duration::from_millis(req_config.timeout_ms)) - .headers(req_config.headers) + .get(url.as_ref().clone()) + .timeout(Duration::from_millis(timeout_left_ms)) + .headers(headers) .send() .await { diff --git a/crates/pbs/src/routes/get_header.rs b/crates/pbs/src/routes/get_header.rs index 896ab781..a11d77e2 100644 --- a/crates/pbs/src/routes/get_header.rs +++ b/crates/pbs/src/routes/get_header.rs @@ -39,6 +39,8 @@ pub async fn handle_get_header>( error!(%e, "error parsing accept header"); PbsClientError::DecodeError(format!("error parsing accept header: {e}")) })?; + let accepts_ssz = accept_types.contains(&EncodingType::Ssz); + let accepts_json = accept_types.contains(&EncodingType::Json); info!(ua, ms_into_slot, "new request"); @@ -49,9 +51,6 @@ pub async fn handle_get_header>( BEACON_NODE_STATUS.with_label_values(&["200", GET_HEADER_ENDPOINT_TAG]).inc(); - let accepts_ssz = accept_types.contains(&EncodingType::Ssz); - let accepts_json = accept_types.contains(&EncodingType::Json); - // Handle SSZ if accepts_ssz { let mut res = max_bid.data.as_ssz_bytes().into_response(); diff --git a/tests/tests/pbs_get_header.rs b/tests/tests/pbs_get_header.rs index 67fa5f30..45a2dd4a 100644 --- a/tests/tests/pbs_get_header.rs +++ b/tests/tests/pbs_get_header.rs @@ -44,15 +44,16 @@ async fn test_get_header_ssz() -> Result<()> { .await } -/// Test requesting SSZ when the relay only supports JSON, which should cause -/// PBS to retry internally with JSON +/// Test requesting SSZ when the relay only supports JSON, which should be +/// handled because PBS supports both types internally and re-maps them on the +/// fly #[tokio::test] async fn test_get_header_ssz_into_json() -> Result<()> { test_get_header_impl( 3220, HashSet::from([EncodingType::Ssz]), HashSet::from([EncodingType::Json]), - 2, + 1, ) .await } From fcd7425109e8eb042d6eb8f613a4f3158afc06e6 Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Wed, 10 Dec 2025 14:14:29 -0500 Subject: [PATCH 25/26] Some refactoring of send_get_header_impl to process the payload --- crates/pbs/src/mev_boost/get_header.rs | 313 +++++++++++++------------ 1 file changed, 157 insertions(+), 156 deletions(-) diff --git a/crates/pbs/src/mev_boost/get_header.rs b/crates/pbs/src/mev_boost/get_header.rs index 7d51619c..256cfbf0 100644 --- a/crates/pbs/src/mev_boost/get_header.rs +++ b/crates/pbs/src/mev_boost/get_header.rs @@ -12,7 +12,7 @@ use axum::http::{HeaderMap, HeaderValue}; use cb_common::{ constants::APPLICATION_BUILDER_DOMAIN, pbs::{ - EMPTY_TX_ROOT_HASH, ExecutionPayloadHeaderRef, ForkVersionDecode, GetHeaderInfo, + EMPTY_TX_ROOT_HASH, ExecutionPayloadHeaderRef, ForkName, ForkVersionDecode, GetHeaderInfo, GetHeaderParams, GetHeaderResponse, HEADER_START_TIME_UNIX_MS, HEADER_TIMEOUT_MS, RelayClient, SignedBuilderBid, error::{PbsError, ValidationError}, @@ -27,7 +27,7 @@ use cb_common::{ use futures::future::join_all; use parking_lot::RwLock; use reqwest::{ - Response, StatusCode, + StatusCode, header::{ACCEPT, CONTENT_TYPE, USER_AGENT}, }; use tokio::time::sleep; @@ -332,8 +332,7 @@ async fn send_one_get_header( timeout_left_ms: u64, ) -> Result<(u64, Option), PbsError> { // Send the header request - let start_request = Instant::now(); - let (res, start_request_time, content_type) = send_get_header_impl( + let (start_request_time, get_header_response) = send_get_header_impl( &relay, url, timeout_left_ms, @@ -341,152 +340,62 @@ async fn send_one_get_header( * modify it */ ) .await?; - let code = res.status(); - - // Get the consensus fork version if provided (to avoid cloning later) - let fork = get_consensus_version_header(res.headers()); - let content_type_header = res.headers().get(CONTENT_TYPE).cloned(); - - let request_latency = start_request.elapsed(); - RELAY_LATENCY - .with_label_values(&[GET_HEADER_ENDPOINT_TAG, &relay.id]) - .observe(request_latency.as_secs_f64()); - - RELAY_STATUS_CODE.with_label_values(&[code.as_str(), GET_HEADER_ENDPOINT_TAG, &relay.id]).inc(); - - let response_bytes = read_chunked_body_with_max(res, MAX_SIZE_GET_HEADER_RESPONSE).await?; - let header_size_bytes = response_bytes.len(); - if !code.is_success() { - return Err(PbsError::RelayResponse { - error_msg: String::from_utf8_lossy(&response_bytes).into_owned(), - code: code.as_u16(), - }); + let get_header_response = match get_header_response { + None => { + // Break if there's no header + return Ok((start_request_time, None)); + } + Some(res) => res, }; - if code == StatusCode::NO_CONTENT { - debug!( - relay_id = relay.id.as_ref(), - ?code, - latency = ?request_latency, - response = ?response_bytes, - "no header from relay" - ); - return Ok((start_request_time, None)); - } - - // Regenerate the header from the response - let get_header_response = - match content_type { - Some(EncodingType::Ssz) => { - // Get the consensus fork version - this is required according to the spec - let fork = fork.ok_or(PbsError::RelayResponse { - error_msg: "relay did not provide consensus version header for ssz payload" - .to_string(), - code: code.as_u16(), - })?; - let data = SignedBuilderBid::from_ssz_bytes_by_fork(&response_bytes, fork) - .map_err(|e| PbsError::RelayResponse { - error_msg: (format!("error decoding relay payload: {e:?}")).to_string(), - code: (code.as_u16()), - })?; - GetHeaderResponse { version: fork, data, metadata: Default::default() } - } - Some(EncodingType::Json) => { - match serde_json::from_slice::(&response_bytes) { - Ok(parsed) => parsed, - Err(err) => { - return Err(PbsError::JsonDecode { - err, - raw: String::from_utf8_lossy(&response_bytes).into_owned(), - }); - } - } - } - None => { - let error_msg = match content_type_header { - None => "relay response missing content type header".to_string(), - Some(ct) => format!("relay response has unsupported content type {ct:?}"), - }; - return Err(PbsError::RelayResponse { error_msg, code: code.as_u16() }); - } - }; - - debug!( - relay_id = relay.id.as_ref(), - header_size_bytes, - latency = ?request_latency, - version =? get_header_response.version, - value_eth = format_ether(*get_header_response.value()), - block_hash = %get_header_response.block_hash(), - content_type = ?content_type, - "received new header" - ); - let chain = request_info.chain; - let params = &request_info.params; - let validation = &request_info.validation; - match &get_header_response.data.message.header() { + // Extract the basic header data needed for validation + let header_data = match &get_header_response.data.message.header() { ExecutionPayloadHeaderRef::Bellatrix(_) | ExecutionPayloadHeaderRef::Capella(_) | ExecutionPayloadHeaderRef::Deneb(_) | ExecutionPayloadHeaderRef::Gloas(_) => { - return Err(PbsError::Validation(ValidationError::UnsupportedFork)) - } - ExecutionPayloadHeaderRef::Electra(res) => { - let header_data = HeaderData { - block_hash: res.block_hash.0, - parent_hash: res.parent_hash.0, - tx_root: res.transactions_root, - value: *get_header_response.value(), - timestamp: res.timestamp, - }; - - validate_header_data( - &header_data, - chain, - params.parent_hash, - validation.min_bid_wei, - params.slot, - )?; - - if !validation.skip_sigverify { - validate_signature( - chain, - relay.pubkey(), - get_header_response.data.message.pubkey(), - &get_header_response.data.message, - &get_header_response.data.signature, - )?; - } - } - ExecutionPayloadHeaderRef::Fulu(res) => { - let header_data = HeaderData { - block_hash: res.block_hash.0, - parent_hash: res.parent_hash.0, - tx_root: res.transactions_root, - value: *get_header_response.value(), - timestamp: res.timestamp, - }; - - validate_header_data( - &header_data, - chain, - params.parent_hash, - validation.min_bid_wei, - params.slot, - )?; - - if !validation.skip_sigverify { - validate_signature( - chain, - relay.pubkey(), - get_header_response.data.message.pubkey(), - &get_header_response.data.message, - &get_header_response.data.signature, - )?; - } + Err(PbsError::Validation(ValidationError::UnsupportedFork)) } + ExecutionPayloadHeaderRef::Electra(res) => Ok(HeaderData { + block_hash: res.block_hash.0, + parent_hash: res.parent_hash.0, + tx_root: res.transactions_root, + value: *get_header_response.value(), + timestamp: res.timestamp, + }), + ExecutionPayloadHeaderRef::Fulu(res) => Ok(HeaderData { + block_hash: res.block_hash.0, + parent_hash: res.parent_hash.0, + tx_root: res.transactions_root, + value: *get_header_response.value(), + timestamp: res.timestamp, + }), + }?; + + // Validate the header + let chain = request_info.chain; + let params = &request_info.params; + let validation = &request_info.validation; + validate_header_data( + &header_data, + chain, + params.parent_hash, + validation.min_bid_wei, + params.slot, + )?; + + // Validate the relay signature + if !validation.skip_sigverify { + validate_signature( + chain, + relay.pubkey(), + get_header_response.data.message.pubkey(), + &get_header_response.data.message, + &get_header_response.data.signature, + )?; } + // Validate the parent block if enabled if validation.extra_validation_enabled { let parent_block = validation.parent_block.read(); if let Some(parent_block) = parent_block.as_ref() { @@ -510,10 +419,11 @@ async fn send_get_header_impl( url: Arc, timeout_left_ms: u64, mut headers: HeaderMap, -) -> Result<(Response, u64, Option), PbsError> { +) -> Result<(u64, Option), PbsError> { // the timestamp in the header is the consensus block time which is fixed, // use the beginning of the request as proxy to make sure we use only the // last one received + let start_request = Instant::now(); let start_request_time = utcnow_ms(); headers.insert(HEADER_START_TIME_UNIX_MS, HeaderValue::from(start_request_time)); @@ -538,23 +448,114 @@ async fn send_get_header_impl( } }; - // Get the content type; this is only really useful for OK responses, and - // doesn't handle encoding types besides SSZ and JSON - let mut content_type: Option = None; - if res.status() == StatusCode::OK && - let Some(header) = res.headers().get(CONTENT_TYPE) - { - let header_str = header.to_str().map_err(|e| PbsError::RelayResponse { + // Log the response code and latency + let code = res.status(); + let request_latency = start_request.elapsed(); + RELAY_LATENCY + .with_label_values(&[GET_HEADER_ENDPOINT_TAG, &relay.id]) + .observe(request_latency.as_secs_f64()); + RELAY_STATUS_CODE.with_label_values(&[code.as_str(), GET_HEADER_ENDPOINT_TAG, &relay.id]).inc(); + + // According to the spec, OK is the only allowed success code so this can break + // early + if code != StatusCode::OK { + if code == StatusCode::NO_CONTENT { + let response_bytes = + read_chunked_body_with_max(res, MAX_SIZE_GET_HEADER_RESPONSE).await?; + debug!( + relay_id = relay.id.as_ref(), + ?code, + latency = ?request_latency, + response = ?response_bytes, + "no header from relay" + ); + return Ok((start_request_time, None)); + } else { + return Err(PbsError::RelayResponse { + error_msg: format!("unexpected status code from relay: {code}"), + code: code.as_u16(), + }); + } + } + + // Get the content type + let content_type = match res.headers().get(CONTENT_TYPE) { + None => { + // Assume a missing content type means JSON; shouldn't happen in practice with + // any respectable HTTP server but just in case + EncodingType::Json + } + Some(header_value) => match header_value.to_str().map_err(|e| PbsError::RelayResponse { error_msg: format!("cannot decode content-type header: {e}").to_string(), - code: (res.status().as_u16()), - })?; - if header_str.eq_ignore_ascii_case(&EncodingType::Ssz.to_string()) { - content_type = Some(EncodingType::Ssz) - } else if header_str.eq_ignore_ascii_case(&EncodingType::Json.to_string()) { - content_type = Some(EncodingType::Json) + code: (code.as_u16()), + })? { + header_str if header_str.eq_ignore_ascii_case(&EncodingType::Ssz.to_string()) => { + EncodingType::Ssz + } + header_str if header_str.eq_ignore_ascii_case(&EncodingType::Json.to_string()) => { + EncodingType::Json + } + header_str => { + return Err(PbsError::RelayResponse { + error_msg: format!("unsupported content type: {header_str}"), + code: code.as_u16(), + }) + } + }, + }; + + // Decode the body + let fork = get_consensus_version_header(res.headers()); + let response_bytes = read_chunked_body_with_max(res, MAX_SIZE_GET_HEADER_RESPONSE).await?; + let get_header_response = match content_type { + EncodingType::Json => decode_json_payload(&response_bytes)?, + EncodingType::Ssz => { + let fork = fork.ok_or(PbsError::RelayResponse { + error_msg: "relay did not provide consensus version header for ssz payload" + .to_string(), + code: code.as_u16(), + })?; + decode_ssz_payload(&response_bytes, fork)? } + }; + + // Log and return + debug!( + relay_id = relay.id.as_ref(), + header_size_bytes = response_bytes.len(), + latency = ?request_latency, + version =? get_header_response.version, + value_eth = format_ether(*get_header_response.value()), + block_hash = %get_header_response.block_hash(), + content_type = ?content_type, + "received new header" + ); + Ok((start_request_time, Some(get_header_response))) +} + +/// Decode a JSON-encoded get_header response +fn decode_json_payload(response_bytes: &[u8]) -> Result { + match serde_json::from_slice::(response_bytes) { + Ok(parsed) => Ok(parsed), + Err(err) => Err(PbsError::JsonDecode { + err, + raw: String::from_utf8_lossy(response_bytes).into_owned(), + }), } - Ok((res, start_request_time, content_type)) +} + +/// Decode an SSZ-encoded get_header response +fn decode_ssz_payload( + response_bytes: &[u8], + fork: ForkName, +) -> Result { + let data = SignedBuilderBid::from_ssz_bytes_by_fork(response_bytes, fork).map_err(|e| { + PbsError::RelayResponse { + error_msg: (format!("error decoding relay payload: {e:?}")).to_string(), + code: 200, + } + })?; + Ok(GetHeaderResponse { version: fork, data, metadata: Default::default() }) } struct HeaderData { From 1a50d7d48eba1d36f63505ec5e9e853c8d9ae37b Mon Sep 17 00:00:00 2001 From: Joe Clapis Date: Tue, 16 Dec 2025 19:55:19 -0500 Subject: [PATCH 26/26] Refactored get_header and submit_block based on feedback --- crates/pbs/src/mev_boost/get_header.rs | 1 - crates/pbs/src/mev_boost/submit_block.rs | 435 +++++++++++++---------- crates/pbs/src/routes/submit_block.rs | 21 +- tests/src/mock_relay.rs | 26 +- tests/tests/pbs_post_blinded_blocks.rs | 2 +- 5 files changed, 274 insertions(+), 211 deletions(-) diff --git a/crates/pbs/src/mev_boost/get_header.rs b/crates/pbs/src/mev_boost/get_header.rs index 256cfbf0..d495166a 100644 --- a/crates/pbs/src/mev_boost/get_header.rs +++ b/crates/pbs/src/mev_boost/get_header.rs @@ -139,7 +139,6 @@ pub async fn get_header( // JSON let accept_types = [EncodingType::Ssz.content_type(), EncodingType::Json.content_type()].join(","); - send_headers.insert(ACCEPT, HeaderValue::from_str(&accept_types).unwrap()); // Send requests to all relays concurrently diff --git a/crates/pbs/src/mev_boost/submit_block.rs b/crates/pbs/src/mev_boost/submit_block.rs index a4666949..89d18fca 100644 --- a/crates/pbs/src/mev_boost/submit_block.rs +++ b/crates/pbs/src/mev_boost/submit_block.rs @@ -1,5 +1,4 @@ use std::{ - str::FromStr, sync::Arc, time::{Duration, Instant}, }; @@ -9,18 +8,18 @@ use axum::http::{HeaderMap, HeaderValue}; use cb_common::{ pbs::{ BlindedBeaconBlock, BlobsBundle, BuilderApiVersion, ForkName, ForkVersionDecode, - HEADER_CONSENSUS_VERSION, HEADER_START_TIME_UNIX_MS, KzgCommitments, PayloadAndBlobs, - RelayClient, SignedBlindedBeaconBlock, SubmitBlindedBlockResponse, + HEADER_START_TIME_UNIX_MS, KzgCommitments, PayloadAndBlobs, RelayClient, + SignedBlindedBeaconBlock, SubmitBlindedBlockResponse, error::{PbsError, ValidationError}, }, utils::{ - EncodingType, get_accept_types, get_content_type, get_user_agent_with_version, - read_chunked_body_with_max, utcnow_ms, + CONSENSUS_VERSION_HEADER, EncodingType, get_consensus_version_header, + get_user_agent_with_version, read_chunked_body_with_max, utcnow_ms, }, }; use futures::{FutureExt, future::select_ok}; use reqwest::{ - Response, StatusCode, + StatusCode, header::{ACCEPT, CONTENT_TYPE, USER_AGENT}, }; use ssz::Encode; @@ -34,6 +33,20 @@ use crate::{ state::{BuilderApiState, PbsState}, }; +/// Info about a proposal submission request. +/// Sent from submit_block to the submit_block_with_timeout function. +#[derive(Clone)] +struct ProposalInfo { + /// The signed blinded block to submit + signed_blinded_block: Arc, + + /// Common baseline of headers to send with each request + headers: Arc, + + /// The version of the submit_block route being used + api_version: BuilderApiVersion, +} + /// Implements https://ethereum.github.io/builder-specs/#/Builder/submitBlindedBlock and /// https://ethereum.github.io/builder-specs/#/Builder/submitBlindedBlockV2. Use `api_version` to /// distinguish between the two. @@ -45,47 +58,30 @@ pub async fn submit_block( ) -> eyre::Result> { debug!(?req_headers, "received headers"); - let fork_name = req_headers - .get(HEADER_CONSENSUS_VERSION) - .and_then(|h| { - let str = h.to_str().ok()?; - ForkName::from_str(str).ok() - }) - .unwrap_or_else(|| { - let slot = signed_blinded_block.slot().as_u64(); - state.config.chain.fork_by_slot(slot) - }); - - // safe because ForkName is visible ASCII chars - let consensus_version = HeaderValue::from_str(&fork_name.to_string()).unwrap(); - // prepare headers let mut send_headers = HeaderMap::new(); send_headers.insert(HEADER_START_TIME_UNIX_MS, HeaderValue::from(utcnow_ms())); send_headers.insert(USER_AGENT, get_user_agent_with_version(&req_headers)?); - send_headers.insert(HEADER_CONSENSUS_VERSION, consensus_version); - - // Get the accept types from the request and forward them - for value in req_headers.get_all(ACCEPT).iter() { - send_headers.append(ACCEPT, value.clone()); - } - - // Copy the content type header - send_headers.insert( - CONTENT_TYPE, - HeaderValue::from_str(get_content_type(&req_headers).content_type()).unwrap(), - ); + // Create the Accept headers for requests since the module handles both SSZ and + // JSON + let accept_types = + [EncodingType::Ssz.content_type(), EncodingType::Json.content_type()].join(","); + send_headers.insert(ACCEPT, HeaderValue::from_str(&accept_types).unwrap()); + + // Send requests to all relays concurrently + let proposal_info = Arc::new(ProposalInfo { + signed_blinded_block, + headers: Arc::new(send_headers), + api_version, + }); let mut handles = Vec::with_capacity(state.all_relays().len()); - for relay in state.all_relays().iter().cloned() { + for relay in state.all_relays().iter() { handles.push( tokio::spawn(submit_block_with_timeout( - signed_blinded_block.clone(), - relay, - send_headers.clone(), + proposal_info.clone(), + relay.clone(), state.pbs_config().timeout_get_payload_ms, - api_version, - fork_name, )) .map(|join_result| match join_result { Ok(res) => res, @@ -104,14 +100,11 @@ pub async fn submit_block( /// Submit blinded block to relay, retry connection errors until the /// given timeout has passed async fn submit_block_with_timeout( - signed_blinded_block: Arc, + proposal_info: Arc, relay: RelayClient, - headers: HeaderMap, timeout_ms: u64, - api_version: BuilderApiVersion, - fork_name: ForkName, ) -> Result, PbsError> { - let mut url = relay.submit_block_url(api_version)?; + let mut url = Arc::new(relay.submit_block_url(proposal_info.api_version)?); let mut remaining_timeout_ms = timeout_ms; let mut retry = 0; let mut backoff = Duration::from_millis(250); @@ -119,14 +112,11 @@ async fn submit_block_with_timeout( loop { let start_request = Instant::now(); match send_submit_block( + proposal_info.clone(), url.clone(), - &signed_blinded_block, &relay, - headers.clone(), remaining_timeout_ms, retry, - &api_version, - fork_name, ) .await { @@ -144,12 +134,14 @@ async fn submit_block_with_timeout( } } - Err(err) if err.is_not_found() && matches!(api_version, BuilderApiVersion::V2) => { + Err(err) + if err.is_not_found() && proposal_info.api_version == BuilderApiVersion::V2 => + { warn!( relay_id = relay.id.as_ref(), "relay does not support v2 endpoint, retrying with v1" ); - url = relay.submit_block_url(BuilderApiVersion::V1)?; + url = Arc::new(relay.submit_block_url(BuilderApiVersion::V1)?); } Err(err) => return Err(err), @@ -163,135 +155,37 @@ async fn submit_block_with_timeout( // back #[allow(clippy::too_many_arguments)] async fn send_submit_block( - url: Url, - signed_blinded_block: &SignedBlindedBeaconBlock, + proposal_info: Arc, + url: Arc, relay: &RelayClient, - headers: HeaderMap, timeout_ms: u64, retry: u32, - api_version: &BuilderApiVersion, - fork_name: ForkName, ) -> Result, PbsError> { - let mut original_headers = headers.clone(); - - // Check which types this request is for - let accept_types = get_accept_types(&headers).map_err(|e| { - PbsError::GeneralRequest(format!("error reading accept types: {e}").to_string()) - })?; - let accepts_ssz = accept_types.contains(&EncodingType::Ssz); - let accepts_json = accept_types.contains(&EncodingType::Json); - // Send the request - let mut start_request = Instant::now(); - let (mut res, mut content_type) = - send_submit_block_impl(url.clone(), signed_blinded_block, relay, headers, timeout_ms) - .await?; - let mut code = res.status(); - - // If the request only supports SSZ, but the relay only supports JSON, resubmit - // to the relay with JSON - we'll convert it ourselves - if code.is_client_error() && accepts_ssz && !accepts_json { - debug!( - relay_id = relay.id.as_ref(), - "relay does not support SSZ, resubmitting request with JSON accept and content-type" - ); - - // Resubmit the request with JSON accept and content-type headers - let elapsed = start_request.elapsed().as_millis() as u64; - let json_header_value = HeaderValue::from_str(EncodingType::Json.content_type()).unwrap(); - original_headers.insert(ACCEPT, json_header_value.clone()); - original_headers.insert(CONTENT_TYPE, json_header_value); - start_request = Instant::now(); - (res, content_type) = send_submit_block_impl( - url, - signed_blinded_block, - relay, - original_headers, - timeout_ms - elapsed, - ) - .await?; - code = res.status(); - } - - // Get the consensus fork version if provided (to avoid cloning later) - let content_type_header = res.headers().get(CONTENT_TYPE).cloned(); - - let request_latency = start_request.elapsed(); - RELAY_LATENCY - .with_label_values(&[SUBMIT_BLINDED_BLOCK_ENDPOINT_TAG, &relay.id]) - .observe(request_latency.as_secs_f64()); - - RELAY_STATUS_CODE - .with_label_values(&[code.as_str(), SUBMIT_BLINDED_BLOCK_ENDPOINT_TAG, &relay.id]) - .inc(); - - let response_bytes = read_chunked_body_with_max(res, MAX_SIZE_SUBMIT_BLOCK_RESPONSE).await?; - if !code.is_success() { - let err = PbsError::RelayResponse { - error_msg: String::from_utf8_lossy(&response_bytes).into_owned(), - code: code.as_u16(), - }; - - // we requested the payload from all relays, but some may have not received it - warn!(relay_id = relay.id.as_ref(), retry, %err, "failed to get payload (this might be ok if other relays have it)"); - return Err(err); - }; - - if api_version != &BuilderApiVersion::V1 { - // v2 response is going to be empty, so just break here - debug!( - relay_id = relay.id.as_ref(), - retry, - latency = ?request_latency, - "successful request" - ); - - return Ok(None); - } - - // Regenerate the block from the response - let block_response = match content_type { - Some(EncodingType::Ssz) => { - let data = PayloadAndBlobs::from_ssz_bytes_by_fork(&response_bytes, fork_name) - .map_err(|e| PbsError::RelayResponse { - error_msg: (format!("error decoding relay payload: {e:?}")).to_string(), - code: (code.as_u16()), - })?; - SubmitBlindedBlockResponse { version: fork_name, data, metadata: Default::default() } - } - Some(EncodingType::Json) => { - match serde_json::from_slice::(&response_bytes) { - Ok(parsed) => parsed, - Err(err) => { - return Err(PbsError::JsonDecode { - err, - raw: String::from_utf8_lossy(&response_bytes).into_owned(), - }); - } - } - } + let block_response = send_submit_block_impl( + relay, + url, + timeout_ms, + (*proposal_info.headers).clone(), + &proposal_info.signed_blinded_block, + retry, + proposal_info.api_version, + ) + .await?; + let block_response = match block_response { None => { - let error_msg = match content_type_header { - None => "relay response missing content type header".to_string(), - Some(ct) => format!("relay response has unsupported content type {ct:?}"), - }; - return Err(PbsError::RelayResponse { error_msg, code: code.as_u16() }); + // Break if there's no response (v2 accepted) + return Ok(None); } + Some(res) => res, }; - debug!( - relay_id = relay.id.as_ref(), - retry, - latency = ?request_latency, - version =% block_response.version, - "received unblinded block" - ); - + // Extract the info needed for validation let got_block_hash = block_response.data.execution_payload.block_hash().0; // request has different type so cant be deserialized in the wrong version, // response has a "version" field - match &signed_blinded_block.message() { + match &proposal_info.signed_blinded_block.message() { BlindedBeaconBlock::Electra(blinded_block) => { let expected_block_hash = blinded_block.body.execution_payload.execution_payload_header.block_hash.0; @@ -302,7 +196,7 @@ async fn send_submit_block( got_block_hash, expected_commitments, &block_response.data.blobs_bundle, - fork_name, + block_response.version, ) } @@ -316,7 +210,7 @@ async fn send_submit_block( got_block_hash, expected_commitments, &block_response.data.blobs_bundle, - fork_name, + block_response.version, ) } @@ -327,22 +221,28 @@ async fn send_submit_block( } async fn send_submit_block_impl( - url: Url, - signed_blinded_block: &SignedBlindedBeaconBlock, relay: &RelayClient, - headers: HeaderMap, + url: Arc, timeout_ms: u64, -) -> Result<(Response, Option), PbsError> { - // Get the content type of the request - let content_type = get_content_type(&headers); - - // Send the request - let res = relay.client.post(url).timeout(Duration::from_millis(timeout_ms)).headers(headers); - let body = match content_type { - EncodingType::Json => serde_json::to_vec(&signed_blinded_block).unwrap(), - EncodingType::Ssz => signed_blinded_block.as_ssz_bytes(), - }; - let res = match res.body(body).header(CONTENT_TYPE, &content_type.to_string()).send().await { + headers: HeaderMap, + signed_blinded_block: &SignedBlindedBeaconBlock, + retry: u32, + api_version: BuilderApiVersion, +) -> Result, PbsError> { + let start_request = Instant::now(); + + // Try SSZ first + let mut res = match relay + .client + .post(url.as_ref().clone()) + .timeout(Duration::from_millis(timeout_ms)) + .headers(headers.clone()) + .body(signed_blinded_block.as_ssz_bytes()) + .header(CONTENT_TYPE, EncodingType::Ssz.to_string()) + .header(CONSENSUS_VERSION_HEADER, signed_blinded_block.fork_name_unchecked().to_string()) + .send() + .await + { Ok(res) => res, Err(err) => { RELAY_STATUS_CODE @@ -356,23 +256,168 @@ async fn send_submit_block_impl( } }; - // Get the content type; this is only really useful for OK responses, and - // doesn't handle encoding types besides SSZ and JSON - let mut content_type: Option = None; - if res.status() == StatusCode::OK && - let Some(header) = res.headers().get(CONTENT_TYPE) - { - let header_str = header.to_str().map_err(|e| PbsError::RelayResponse { + // If we got a client error, retry with JSON - the spec says that this should be + // a 406 or 415, but we're a little more permissive here + if res.status().is_client_error() { + warn!( + relay_id = relay.id.as_ref(), + "relay does not support SSZ, resubmitting block with JSON content-type" + ); + res = match relay + .client + .post(url.as_ref().clone()) + .timeout(Duration::from_millis(timeout_ms)) + .headers(headers) + .body(serde_json::to_vec(&signed_blinded_block).unwrap()) + .header(CONTENT_TYPE, EncodingType::Json.to_string()) + .send() + .await + { + Ok(res) => res, + Err(err) => { + RELAY_STATUS_CODE + .with_label_values(&[ + TIMEOUT_ERROR_CODE_STR, + SUBMIT_BLINDED_BLOCK_ENDPOINT_TAG, + &relay.id, + ]) + .inc(); + return Err(err.into()); + } + }; + } + + // Log the response code and latency + let code = res.status(); + let request_latency = start_request.elapsed(); + RELAY_LATENCY + .with_label_values(&[SUBMIT_BLINDED_BLOCK_ENDPOINT_TAG, &relay.id]) + .observe(request_latency.as_secs_f64()); + RELAY_STATUS_CODE + .with_label_values(&[code.as_str(), SUBMIT_BLINDED_BLOCK_ENDPOINT_TAG, &relay.id]) + .inc(); + + // If this was API v2 and succeeded then we can just return here + if api_version != BuilderApiVersion::V1 { + debug!( + relay_id = relay.id.as_ref(), + retry, + latency = ?request_latency, + "received 202 Accepted for v2 submit_block" + ); + + match code { + StatusCode::ACCEPTED => { + return Ok(None); + } + StatusCode::OK => { + warn!( + relay_id = relay.id.as_ref(), + "relay sent OK response for v2 submit_block, expected 202 Accepted" + ); + return Ok(None); + } + _ => { + return Err(PbsError::RelayResponse { + error_msg: format!( + "relay sent unexpected code for builder route v2 {}: {code}", + relay.id.as_ref() + ), + code: code.as_u16(), + }); + } + } + } + + // If the code is not OK, return early + if code != StatusCode::OK { + let response_bytes = + read_chunked_body_with_max(res, MAX_SIZE_SUBMIT_BLOCK_RESPONSE).await?; + let err = PbsError::RelayResponse { + error_msg: String::from_utf8_lossy(&response_bytes).into_owned(), + code: code.as_u16(), + }; + + // we requested the payload from all relays, but some may have not received it + warn!(relay_id = relay.id.as_ref(), %err, "failed to get payload (this might be ok if other relays have it)"); + return Err(err); + } + + // We're on v1 so decode the payload normally - get the content type + let content_type = match res.headers().get(CONTENT_TYPE) { + None => { + // Assume a missing content type means JSON; shouldn't happen in practice with + // any respectable HTTP server but just in case + EncodingType::Json + } + Some(header_value) => match header_value.to_str().map_err(|e| PbsError::RelayResponse { error_msg: format!("cannot decode content-type header: {e}").to_string(), - code: (res.status().as_u16()), - })?; - if header_str.eq_ignore_ascii_case(&EncodingType::Ssz.to_string()) { - content_type = Some(EncodingType::Ssz) - } else if header_str.eq_ignore_ascii_case(&EncodingType::Json.to_string()) { - content_type = Some(EncodingType::Json) + code: (code.as_u16()), + })? { + header_str if header_str.eq_ignore_ascii_case(&EncodingType::Ssz.to_string()) => { + EncodingType::Ssz + } + header_str if header_str.eq_ignore_ascii_case(&EncodingType::Json.to_string()) => { + EncodingType::Json + } + header_str => { + return Err(PbsError::RelayResponse { + error_msg: format!("unsupported content type: {header_str}"), + code: code.as_u16(), + }) + } + }, + }; + + // Decode the body + let fork = get_consensus_version_header(res.headers()); + let response_bytes = read_chunked_body_with_max(res, MAX_SIZE_SUBMIT_BLOCK_RESPONSE).await?; + let block_response = match content_type { + EncodingType::Json => decode_json_payload(&response_bytes)?, + EncodingType::Ssz => { + let fork = fork.ok_or(PbsError::RelayResponse { + error_msg: "relay did not provide consensus version header for ssz payload" + .to_string(), + code: code.as_u16(), + })?; + decode_ssz_payload(&response_bytes, fork)? } + }; + + // Log and return + debug!( + relay_id = relay.id.as_ref(), + retry, + latency = ?request_latency, + version =% block_response.version, + "received unblinded block" + ); + Ok(Some(block_response)) +} + +/// Decode a JSON-encoded submit_block response +fn decode_json_payload(response_bytes: &[u8]) -> Result { + match serde_json::from_slice::(response_bytes) { + Ok(parsed) => Ok(parsed), + Err(err) => Err(PbsError::JsonDecode { + err, + raw: String::from_utf8_lossy(response_bytes).into_owned(), + }), } - Ok((res, content_type)) +} + +/// Decode an SSZ-encoded submit_block response +fn decode_ssz_payload( + response_bytes: &[u8], + fork: ForkName, +) -> Result { + let data = PayloadAndBlobs::from_ssz_bytes_by_fork(response_bytes, fork).map_err(|e| { + PbsError::RelayResponse { + error_msg: (format!("error decoding relay payload: {e:?}")).to_string(), + code: 200, + } + })?; + Ok(SubmitBlindedBlockResponse { version: fork, data, metadata: Default::default() }) } fn validate_unblinded_block( diff --git a/crates/pbs/src/routes/submit_block.rs b/crates/pbs/src/routes/submit_block.rs index 539ce631..78829641 100644 --- a/crates/pbs/src/routes/submit_block.rs +++ b/crates/pbs/src/routes/submit_block.rs @@ -8,8 +8,8 @@ use axum::{ use cb_common::{ pbs::{BuilderApiVersion, GetPayloadInfo}, utils::{ - EncodingType, RawRequest, deserialize_body, get_accept_types, get_user_agent, - timestamp_of_slot_start_millis, utcnow_ms, + CONSENSUS_VERSION_HEADER, EncodingType, RawRequest, deserialize_body, get_accept_types, + get_user_agent, timestamp_of_slot_start_millis, utcnow_ms, }, }; use reqwest::{StatusCode, header::CONTENT_TYPE}; @@ -62,14 +62,12 @@ async fn handle_submit_block_impl>( let block_hash = signed_blinded_block.block_hash(); let slot_start_ms = timestamp_of_slot_start_millis(slot.into(), state.config.chain); let ua = get_user_agent(&req_headers); - let response_types = get_accept_types(&req_headers).map_err(|e| { + let accept_types = get_accept_types(&req_headers).map_err(|e| { error!(%e, "error parsing accept header"); PbsClientError::DecodeError(format!("error parsing accept header: {e}")) - }); - if let Err(e) = response_types { - return Ok((StatusCode::BAD_REQUEST, e).into_response()); - } - let response_types = response_types.unwrap(); + })?; + let accepts_ssz = accept_types.contains(&EncodingType::Ssz); + let accepts_json = accept_types.contains(&EncodingType::Json); info!(ua, ms_into_slot = now.saturating_sub(slot_start_ms), "new request"); @@ -83,9 +81,6 @@ async fn handle_submit_block_impl>( .with_label_values(&["200", SUBMIT_BLINDED_BLOCK_ENDPOINT_TAG]) .inc(); - let accepts_ssz = response_types.contains(&EncodingType::Ssz); - let accepts_json = response_types.contains(&EncodingType::Json); - // Try SSZ if accepts_ssz { let mut response = payload_and_blobs.data.as_ssz_bytes().into_response(); @@ -94,6 +89,10 @@ async fn handle_submit_block_impl>( let content_type_header = HeaderValue::from_str(EncodingType::Ssz.content_type()).unwrap(); response.headers_mut().insert(CONTENT_TYPE, content_type_header); + response.headers_mut().insert( + CONSENSUS_VERSION_HEADER, + HeaderValue::from_str(&payload_and_blobs.version.to_string()).unwrap(), + ); info!("sending response as SSZ"); return Ok(response); } diff --git a/tests/src/mock_relay.rs b/tests/src/mock_relay.rs index 7a55ca26..15c6cbbc 100644 --- a/tests/src/mock_relay.rs +++ b/tests/src/mock_relay.rs @@ -230,7 +230,8 @@ async fn handle_submit_block_v1( return e.into_response(); } let accept_types = accept_types.unwrap(); - let content_type = if state.supported_content_types.contains(&EncodingType::Ssz) && + let consensus_version_header = get_consensus_version_header(&headers); + let response_content_type = if state.supported_content_types.contains(&EncodingType::Ssz) && accept_types.contains(&EncodingType::Ssz) { EncodingType::Ssz @@ -243,6 +244,13 @@ async fn handle_submit_block_v1( .into_response(); }; + // Error out if the request content type is not supported + let content_type = get_content_type(&headers); + if !state.supported_content_types.contains(&content_type) { + return (StatusCode::UNSUPPORTED_MEDIA_TYPE, "Unsupported content type".to_string()) + .into_response(); + }; + let data = if state.large_body() { vec![1u8; 1 + MAX_SIZE_SUBMIT_BLOCK_RESPONSE] } else { @@ -267,7 +275,7 @@ async fn handle_submit_block_v1( let response = PayloadAndBlobs { execution_payload: execution_payload.into(), blobs_bundle }; - if content_type == EncodingType::Ssz { + if response_content_type == EncodingType::Ssz { response.as_ssz_bytes() } else { // Return JSON for everything else; this is fine for the mock @@ -281,7 +289,19 @@ async fn handle_submit_block_v1( }; let mut response = (StatusCode::OK, data).into_response(); - let content_type_header = HeaderValue::from_str(&content_type.to_string()).unwrap(); + if response_content_type == EncodingType::Ssz { + let consensus_version_header = match consensus_version_header { + Some(header) => header, + None => { + return (StatusCode::BAD_REQUEST, "Missing consensus version header".to_string()) + .into_response() + } + }; + let consensus_version_header = + HeaderValue::from_str(&consensus_version_header.to_string()).unwrap(); + response.headers_mut().insert(CONSENSUS_VERSION_HEADER, consensus_version_header); + } + let content_type_header = HeaderValue::from_str(&response_content_type.to_string()).unwrap(); response.headers_mut().insert(CONTENT_TYPE, content_type_header); response } diff --git a/tests/tests/pbs_post_blinded_blocks.rs b/tests/tests/pbs_post_blinded_blocks.rs index c6201e52..b5a16e89 100644 --- a/tests/tests/pbs_post_blinded_blocks.rs +++ b/tests/tests/pbs_post_blinded_blocks.rs @@ -176,7 +176,7 @@ async fn test_submit_block_v1_multitype_json() -> Result<()> { HashSet::from([EncodingType::Ssz, EncodingType::Json]), HashSet::from([EncodingType::Json]), EncodingType::Ssz, - 1, + 2, ) .await?; assert_eq!(res.status(), StatusCode::OK);