Skip to content
Merged
13 changes: 13 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,19 @@ members = [
"client",
]
exclude = [
"examples/crates",
"examples/github",
"examples/submission_api",
"examples/blob_api",
"examples/block_api",
"examples/storage_api",
"examples/chain_api",
"examples/parallel_submission",
"examples/multisig",
"examples/batch",
"examples/estimating_fees",
"examples/custom_ext_event_storage",
"examples/subscriptions",
"ffi",
"scripts",
]
Expand Down
2 changes: 1 addition & 1 deletion client/examples/blob_api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ async fn main() -> Result<(), Error> {
client
.tx()
.data_availability()
.submit_blob_metadata(2, blob_hash, blob.len() as u64, commitments);
.submit_blob_metadata(2, blob_hash, blob.len() as u64, commitments, None, None);

let tx = unsigned_tx.sign(&signer, Options::default()).await.unwrap().encode();

Expand Down
4 changes: 3 additions & 1 deletion client/src/transaction_api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1192,8 +1192,10 @@ impl DataAvailability {
blob_hash: H256,
size: u64,
commitments: Vec<u8>,
eval_point_seed: Option<[u8; 32]>,
eval_claim: Option<[u8; 16]>,
) -> SubmittableTransaction {
let value = avail::data_availability::tx::SubmitBlobMetadata { app_id, blob_hash, size, commitments };
let value = avail::data_availability::tx::SubmitBlobMetadata { app_id, blob_hash, size, commitments, eval_point_seed, eval_claim };
SubmittableTransaction::from_encodable(self.0.clone(), value)
}
}
Expand Down
67 changes: 54 additions & 13 deletions core/src/header.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use codec::{Compact, Decode, Encode};
use primitive_types::H256;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use subxt_core::config::{Hasher, Header, substrate::BlakeTwo256};
use subxt_core::config::{substrate::BlakeTwo256, Hasher, Header as SubxtHeader};

pub use subxt_core::config::substrate::{Digest, DigestItem};

Expand All @@ -19,10 +19,11 @@ pub struct AvailHeader {
}

impl AvailHeader {
/// Data root of all DA data in this block, regardless of PCS (KZG/Fri).
pub fn data_root(&self) -> H256 {
match &self.extension {
HeaderExtension::V3(ext) => ext.commitment.data_root,
HeaderExtension::V4(ext) => ext.commitment.data_root,
HeaderExtension::Kzg(KzgHeader::V4(ext)) => ext.commitment.data_root,
HeaderExtension::Fri(FriHeader::V1(ext)) => ext.data_root,
}
}

Expand All @@ -31,7 +32,7 @@ impl AvailHeader {
}
}

impl Header for AvailHeader {
impl SubxtHeader for AvailHeader {
type Hasher = BlakeTwo256;
type Number = u32;

Expand Down Expand Up @@ -65,11 +66,32 @@ where
}
}

/// Top-level DA header extension: *which PCS + which version inside*.
#[derive(Debug, Clone, Serialize, Deserialize, Encode, Decode)]
#[repr(u8)]
// #[serde(rename_all = "camelCase")]
pub enum HeaderExtension {
V3(V3HeaderExtension) = 2,
V4(V4HeaderExtension) = 3,
/// KZG-based DA header (current mainnet scheme, v4).
Kzg(KzgHeader),
/// Fri/Binius-based DA header (new scheme).
Fri(FriHeader),
}

impl Default for HeaderExtension {
fn default() -> Self {
HeaderExtension::Fri(FriHeader::V1(FriV1HeaderExtension::default()))
}
}

/// KZG header variants (only v4 is used on-chain now).
#[derive(Debug, Clone, Serialize, Deserialize, Encode, Decode)]
pub enum KzgHeader {
V4(V4HeaderExtension),
}

/// Fri header variants (v1 for now).
#[derive(Debug, Clone, Serialize, Deserialize, Encode, Decode)]
pub enum FriHeader {
V1(FriV1HeaderExtension),
}

#[derive(Debug, Clone, Serialize, Deserialize, Default)]
Expand All @@ -92,12 +114,6 @@ impl Decode for V3HeaderExtension {
}
}

impl Default for HeaderExtension {
fn default() -> Self {
Self::V3(Default::default())
}
}

#[derive(Debug, Clone, Serialize, Deserialize, Default)]
#[serde(rename_all = "camelCase")]
pub struct CompactDataLookup {
Expand Down Expand Up @@ -184,3 +200,28 @@ pub struct V4CompactDataLookup {
pub index: Vec<DataLookupItem>,
pub rows_per_tx: Vec<u16>,
}

/// Fri blob commitment: one entry per blob in the block.
#[derive(Debug, Clone, Serialize, Deserialize, Encode, Decode, Default)]
#[serde(rename_all = "camelCase")]
pub struct FriBlobCommitment {
/// Blob size in bytes (original data).
pub size_bytes: u64,
/// Commitment to the encoded blob (Merkle root, 32 bytes).
pub commitment: H256,
}

/// Version tag for Fri parameters.
/// This mirrors `FriParamsVersion(pub u8)` on-chain.
#[derive(Debug, Clone, Serialize, Deserialize, Encode, Decode, Default)]
#[serde(rename_all = "camelCase")]
pub struct FriParamsVersion(pub u8);

/// Fri v1 header extension: aggregate of all blob commitments for the block.
#[derive(Debug, Clone, Serialize, Deserialize, Encode, Decode, Default)]
#[serde(rename_all = "camelCase")]
pub struct FriV1HeaderExtension {
pub blobs: Vec<FriBlobCommitment>,
pub data_root: H256,
pub params_version: FriParamsVersion,
}
8 changes: 7 additions & 1 deletion core/src/types/pallets.rs
Original file line number Diff line number Diff line change
Expand Up @@ -624,13 +624,17 @@ pub mod data_availability {
pub blob_hash: H256,
pub size: u64,
pub commitments: Vec<u8>,
pub eval_point_seed: Option<[u8; 32]>,
pub eval_claim: Option<[u8; 16]>,
}
impl Encode for SubmitBlobMetadata {
fn encode_to<T: codec::Output + ?Sized>(&self, dest: &mut T) {
Compact(self.app_id).encode_to(dest);
dest.write(&self.blob_hash.encode());
dest.write(&self.size.encode());
dest.write(&self.commitments.encode());
dest.write(&self.eval_point_seed.encode());
dest.write(&self.eval_claim.encode());
}
}
impl Decode for SubmitBlobMetadata {
Expand All @@ -639,7 +643,9 @@ pub mod data_availability {
let blob_hash = Decode::decode(input)?;
let size = Decode::decode(input)?;
let commitments = Decode::decode(input)?;
Ok(Self { app_id, blob_hash, size, commitments })
let eval_point_seed = Decode::decode(input)?;
let eval_claim = Decode::decode(input)?;
Ok(Self { app_id, blob_hash, size, commitments, eval_point_seed, eval_claim })
}
}
impl HasHeader for SubmitBlobMetadata {
Expand Down
2 changes: 2 additions & 0 deletions examples/blob_api/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
target
Cargo.lock
8 changes: 8 additions & 0 deletions examples/blob_api/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
[package]
name = "blob-api-example"
edition = "2024"

[dependencies]
avail-rust = { package = "avail-rust-client", path = "./../../client", default-features = false, features = ["native", "reqwest"] }
hex = "0.4.3"
tokio = { version = "1.45.0", features = ["rt-multi-thread", "macros"] }
77 changes: 77 additions & 0 deletions examples/blob_api/src/main.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
use avail_rust::{avail_rust_core::rpc::blob::submit_blob, prelude::*};

#[tokio::main]
async fn main() -> Result<(), Error> {
let client = Client::new(LOCAL_ENDPOINT).await?;

// For testing blob submission tx
let blob = hex::decode("4141414141414141414141414141414141414141414141414141414141414141").unwrap();
let blob_hash = H256::from_slice(&hex::decode("59cad5948673622c1d64e2322488bf01619f7ff45789741b15a9f782ce9290a8").unwrap());
let eval_point_seed: [u8; 32] = hex::decode("8ed022d17e8ba7e14b1a62c62b55cd528f0bb9ac742e82e94584471b6ad6c48e").unwrap().try_into().expect("32 bytes");
let eval_claim: [u8; 16] = hex::decode("679d078d1c77e2787c9908b731ddc03d").unwrap().try_into().expect("16 bytes");
let commitments = hex::decode("329fef926f1a3b8c7d2b9ee76a599474f73b12156c5642fd835147751415b6c2").unwrap();

let signer = alice();
let unsigned_tx = client.tx().data_availability().submit_blob_metadata(
2,
blob_hash,
blob.len() as u64,
commitments,
Some(eval_point_seed),
Some(eval_claim),
);

let tx = unsigned_tx.sign(&signer, Options::default()).await.unwrap().encode();

if let Err(e) = submit_blob(&client.rpc_client, &tx, &blob).await {
println!("An error has occured: {e}");
} else {
println!("Blob submitted");
}

// For testing blob RPCs
// let blob_hash = H256::from_slice(&hex::decode("59cad5948673622c1d64e2322488bf01619f7ff45789741b15a9f782ce9290a8").unwrap());
// let block_hash = H256::from_slice(&hex::decode("9139401fe68807814ea852d97e646a022ad07885b03a3a99ecfbb99735435824").unwrap());

//
// getBlob
//
// Mode 1: using a specific block
// let blob_from_block = client
// .chain()
// .blob_get_blob(blob_hash, Some(block_hash))
// .await?;
// println!("getBlob (block): {:?}", blob_from_block);

// // Mode 2: using indexed storage info
// let blob_canonical = client
// .chain()
// .blob_get_blob(blob_hash, None)
// .await?;
// println!("getBlob (indexed_info): {:?}", blob_canonical);

// //
// // getBlobInfo
// //
// let info = client.chain().blob_get_blob_info(blob_hash).await?;
// println!("blobInfo: {:?}", info);

// //
// // inclusionProof
// //
// // Using Indexed info
// let proof = client
// .chain()
// .blob_inclusion_proof(blob_hash, None)
// .await?;
// println!("inclusion proof (indexed_info): {:?}", proof);

// // Using a specific block
// let proof_block = client
// .chain()
// .blob_inclusion_proof(blob_hash, Some(block_hash))
// .await?;
// println!("inclusion proof(block): {:?}", proof_block);

Ok(())
}