Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
169 changes: 83 additions & 86 deletions Cargo.lock

Large diffs are not rendered by default.

34 changes: 34 additions & 0 deletions jolt-core/src/poly/commitment/commitment_scheme.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,15 @@ pub trait CommitmentScheme: Clone + Sync + Send + 'static {
type Field: JoltField + Sized;
type ProverSetup: Clone + Sync + Send + Debug + CanonicalSerialize + CanonicalDeserialize;
type VerifierSetup: Clone + Sync + Send + Debug + CanonicalSerialize + CanonicalDeserialize;
type CompressedCommitment: Default
+ Debug
+ Sync
+ Send
+ PartialEq
+ CanonicalSerialize
+ CanonicalDeserialize
+ AppendToTranscript
+ Clone;
type Commitment: Default
+ Debug
+ Sync
Expand Down Expand Up @@ -50,6 +59,22 @@ pub trait CommitmentScheme: Clone + Sync + Send + 'static {
setup: &Self::ProverSetup,
) -> (Self::Commitment, Self::OpeningProofHint);

/// Commits to a multilinear polynomial using the provided setup, where the commitment is compressed.
///
/// # Arguments
/// * `poly` - The multilinear polynomial to commit to
/// * `setup` - The prover setup for the commitment scheme
///
/// # Returns
/// A tuple containing the compressed commitment to the polynomial and a hint that can be used
/// to optimize opening proof generation
fn commit_compressed(
_poly: &MultilinearPolynomial<Self::Field>,
_setup: &Self::ProverSetup,
) -> (Self::CompressedCommitment, Self::OpeningProofHint) {
panic!("`commit_compressed` is not implemented for this commitment scheme. CompressedCommitment of type `{}` not supported.", std::any::type_name::<Self::CompressedCommitment>());
}

/// Commits to multiple multilinear polynomials in batch.
///
/// # Arguments
Expand Down Expand Up @@ -147,4 +172,13 @@ pub trait StreamingCommitmentScheme: CommitmentScheme {
onehot_k: Option<usize>,
tier1_commitments: &[Self::ChunkState],
) -> (Self::Commitment, Self::OpeningProofHint);

/// Compute tier 2 commitment from accumulated tier 1 commitments, where the output commitment is compressed.
fn aggregate_chunks_compressed(
_setup: &Self::ProverSetup,
_onehot_k: Option<usize>,
_tier1_commitments: &[Self::ChunkState],
) -> (Self::CompressedCommitment, Self::OpeningProofHint) {
panic!("`aggregate_chunks_compressed` is not implemented for this commitment scheme. CompressedCommitment of type `{}` not supported.", std::any::type_name::<Self::CompressedCommitment>());
}
}
161 changes: 148 additions & 13 deletions jolt-core/src/poly/commitment/dory/commitment_scheme.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,19 +3,21 @@
use super::dory_globals::DoryGlobals;
use super::jolt_dory_routines::{JoltG1Routines, JoltG2Routines};
use super::wrappers::{
jolt_to_ark, ArkDoryProof, ArkFr, ArkG1, ArkGT, ArkworksProverSetup, ArkworksVerifierSetup,
JoltToDoryTranscript, BN254,
jolt_to_ark, ArkDoryProof, ArkFr, ArkG1, ArkG2, ArkGT, ArkGTCompressed, JoltBn254,
JoltToDoryTranscript,
};
use crate::poly::commitment::dory::setup::{DoryProverSetup, DoryVerifierSetup};
use crate::{
field::JoltField,
poly::commitment::commitment_scheme::{CommitmentScheme, StreamingCommitmentScheme},
poly::multilinear_polynomial::MultilinearPolynomial,
transcripts::Transcript,
utils::{errors::ProofVerifyError, math::Math, small_scalar::SmallScalar},
};
use ark_bn254::{G1Affine, G1Projective};
use ark_bn254::{Bn254 as ArkBn254, G1Affine, G1Projective};
use ark_ec::pairing::{CompressedPairing, MillerLoopOutput, Pairing};
use ark_ec::CurveGroup;
use ark_ff::Zero;
use ark_ff::{One, Zero};
use dory::primitives::{
arithmetic::{Group, PairingCurve},
poly::Polynomial,
Expand All @@ -26,20 +28,21 @@
use tracing::trace_span;

#[derive(Clone)]
pub struct DoryCommitmentScheme;
pub struct DoryCommitmentScheme {}

impl CommitmentScheme for DoryCommitmentScheme {
type Field = ark_bn254::Fr;
type ProverSetup = ArkworksProverSetup;
type VerifierSetup = ArkworksVerifierSetup;
type ProverSetup = DoryProverSetup;
type VerifierSetup = DoryVerifierSetup;
type Commitment = ArkGT;
type Proof = ArkDoryProof;
type BatchedProof = Vec<ArkDoryProof>;
type OpeningProofHint = Vec<ArkG1>;
type CompressedCommitment = ArkGTCompressed;

fn setup_prover(max_num_vars: usize) -> Self::ProverSetup {
let _span = trace_span!("DoryCommitmentScheme::setup_prover").entered();
let setup = ArkworksProverSetup::new_from_urs(&mut OsRng, max_num_vars);
let setup = DoryProverSetup::new_from_urs(&mut OsRng, max_num_vars);

DoryGlobals::init_prepared_cache(&setup.g1_vec, &setup.g2_vec);

Expand All @@ -64,7 +67,28 @@

let (tier_2, row_commitments) = <MultilinearPolynomial<ark_bn254::Fr> as Polynomial<
ArkFr,
>>::commit::<BN254, JoltG1Routines>(
>>::commit::<JoltBn254, JoltG1Routines>(
poly, nu, sigma, setup
)
.expect("commitment should succeed");

(tier_2, row_commitments)
}

fn commit_compressed(
poly: &MultilinearPolynomial<ark_bn254::Fr>,
setup: &Self::ProverSetup,
) -> (Self::CompressedCommitment, Self::OpeningProofHint) {
let _span = trace_span!("DoryCommitmentScheme::commit").entered();

let num_cols = DoryGlobals::get_num_columns();
let num_rows = DoryGlobals::get_max_num_rows();
let sigma = num_cols.log_2();
let nu = num_rows.log_2();

let (tier_2, row_commitments) = <MultilinearPolynomial<ark_bn254::Fr> as Polynomial<
ArkFr,
>>::commit_compressed::<JoltBn254, JoltG1Routines>(

Check failure on line 91 in jolt-core/src/poly/commitment/dory/commitment_scheme.rs

View workflow job for this annotation

GitHub Actions / clippy

cannot find method or associated constant `commit_compressed` in trait `Polynomial`

Check failure on line 91 in jolt-core/src/poly/commitment/dory/commitment_scheme.rs

View workflow job for this annotation

GitHub Actions / jolt binary check

cannot find method or associated constant `commit_compressed` in trait `Polynomial`

Check failure on line 91 in jolt-core/src/poly/commitment/dory/commitment_scheme.rs

View workflow job for this annotation

GitHub Actions / Test tracer

cannot find method or associated constant `commit_compressed` in trait `Polynomial`

Check failure on line 91 in jolt-core/src/poly/commitment/dory/commitment_scheme.rs

View workflow job for this annotation

GitHub Actions / Jolt SDK Verifier Tests

cannot find method or associated constant `commit_compressed` in trait `Polynomial`

Check failure on line 91 in jolt-core/src/poly/commitment/dory/commitment_scheme.rs

View workflow job for this annotation

GitHub Actions / Test jolt-core

cannot find method or associated constant `commit_compressed` in trait `Polynomial`

Check failure on line 91 in jolt-core/src/poly/commitment/dory/commitment_scheme.rs

View workflow job for this annotation

GitHub Actions / Build Wasm

cannot find method or associated constant `commit_compressed` in trait `Polynomial`
poly, nu, sigma, setup
)
.expect("commitment should succeed");
Expand Down Expand Up @@ -118,7 +142,7 @@

let mut dory_transcript = JoltToDoryTranscript::<ProofTranscript>::new(transcript);

dory::prove::<ArkFr, BN254, JoltG1Routines, JoltG2Routines, _, _>(
dory::prove::<ArkFr, JoltBn254, JoltG1Routines, JoltG2Routines, _, _>(
poly,
&ark_point,
row_commitments,
Expand Down Expand Up @@ -153,7 +177,7 @@

let mut dory_transcript = JoltToDoryTranscript::<ProofTranscript>::new(transcript);

dory::verify::<ArkFr, BN254, JoltG1Routines, JoltG2Routines, _>(
dory::verify::<ArkFr, JoltBn254, JoltG1Routines, JoltG2Routines, _>(
*commitment,
ark_eval,
&ark_point,
Expand Down Expand Up @@ -315,17 +339,128 @@
}

let g2_bases = &setup.g2_vec[..row_commitments.len()];
let tier_2 = <BN254 as PairingCurve>::multi_pair_g2_setup(&row_commitments, g2_bases);
let tier_2 = JoltBn254::multi_pair_g2_setup(&row_commitments, g2_bases);

(tier_2, row_commitments)
} else {
let row_commitments: Vec<ArkG1> =
chunks.iter().flat_map(|chunk| chunk.clone()).collect();

let g2_bases = &setup.g2_vec[..row_commitments.len()];
let tier_2 = JoltBn254::multi_pair_g2_setup(&row_commitments, g2_bases);

(tier_2, row_commitments)
}
}

#[tracing::instrument(
skip_all,
name = "DoryCommitmentScheme::compute_tier2_commitment_compressed"
)]
fn aggregate_chunks_compressed(
setup: &Self::ProverSetup,
onehot_k: Option<usize>,
chunks: &[Self::ChunkState],
) -> (Self::CompressedCommitment, Self::OpeningProofHint) {
if let Some(K) = onehot_k {
let row_len = DoryGlobals::get_num_columns();
let T = DoryGlobals::get_T();
let rows_per_k = T / row_len;
let num_rows = K * T / row_len;

let mut row_commitments = vec![ArkG1(G1Projective::zero()); num_rows];
for (chunk_index, commitments) in chunks.iter().enumerate() {
row_commitments
.par_iter_mut()
.skip(chunk_index)
.step_by(rows_per_k)
.zip(commitments.par_iter())
.for_each(|(dest, src)| *dest = *src);
}

let g2_bases = &setup.g2_vec[..row_commitments.len()];
let tier_2 = multi_pair_g2_setup_optimized_compressed(&row_commitments, g2_bases);

(tier_2, row_commitments)
} else {
let row_commitments: Vec<ArkG1> =
chunks.iter().flat_map(|chunk| chunk.clone()).collect();

let g2_bases = &setup.g2_vec[..row_commitments.len()];
let tier_2 = <BN254 as PairingCurve>::multi_pair_g2_setup(&row_commitments, g2_bases);
let tier_2 = multi_pair_g2_setup_optimized_compressed(&row_commitments, g2_bases);

(tier_2, row_commitments)
}
}
}

fn determine_chunk_size(total: usize) -> usize {
const MIN_CHUNK: usize = 32;
const MAX_CHUNK: usize = 128;

if total < MIN_CHUNK {
return total;
}

let num_threads = rayon::current_num_threads();
let chunk = total.div_ceil(num_threads);
chunk.clamp(MIN_CHUNK, MAX_CHUNK)
}

/// Optimized multi-pairing dispatch for G2 from setup
fn multi_pair_g2_setup_optimized_compressed(ps: &[ArkG1], qs: &[ArkG2]) -> ArkGTCompressed {
let combined = multi_pair_g1_setup_parallel(ps, qs);

let result = ArkBn254::compressed_final_exponentiation(combined)
.expect("Final exponentiation should not fail");
ArkGTCompressed(result)
}

/// Parallel multi-pairing with G1 from setup (uses cache if available)
#[tracing::instrument(skip_all, name = "multi_pair_g1_setup_parallel", fields(len = ps.len(), chunk_size = determine_chunk_size(ps.len())))]
fn multi_pair_g1_setup_parallel(
ps: &[ArkG1],
qs: &[ArkG2],
) -> MillerLoopOutput<ark_ec::bn::Bn<ark_bn254::Config>> {
use ark_bn254::G1Affine;
use ark_bn254::G2Affine;
use rayon::prelude::*;

let chunk_size = determine_chunk_size(ps.len());

// NOTE: no cache as in the dory arkworks implementation.

qs.par_chunks(chunk_size)
.enumerate()
.map(|(chunk_idx, qs_chunk)| {
let start_idx = chunk_idx * chunk_size;
let end_idx = start_idx + qs_chunk.len();

let qs_prep: Vec<<ArkBn254 as ark_ec::pairing::Pairing>::G2Prepared> = qs_chunk
.iter()
.map(|q| {
let affine: G2Affine = q.0.into();
affine.into()
})
.collect();

let ps_prep: Vec<<ArkBn254 as ark_ec::pairing::Pairing>::G1Prepared> = ps
[start_idx..end_idx]
.iter()
.map(|p| {
let affine: G1Affine = p.0.into();
affine.into()
})
.collect();

ArkBn254::multi_miller_loop(ps_prep, qs_prep)
})
.reduce(
|| {
ark_ec::pairing::MillerLoopOutput(
<<ArkBn254 as ark_ec::pairing::Pairing>::TargetField>::one(),
)
},
|a, b| ark_ec::pairing::MillerLoopOutput(a.0 * b.0),
)
}
5 changes: 4 additions & 1 deletion jolt-core/src/poly/commitment/dory/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
mod commitment_scheme;
mod dory_globals;
mod jolt_dory_routines;
mod serde;
mod setup;
mod wrappers;

#[cfg(test)]
Expand All @@ -14,7 +16,8 @@ mod tests;
pub use commitment_scheme::DoryCommitmentScheme;
pub use dory_globals::{DoryContext, DoryGlobals};
pub use jolt_dory_routines::{JoltG1Routines, JoltG2Routines};
pub use setup::{DoryProverSetup, DoryVerifierSetup};
pub use wrappers::{
ArkDoryProof, ArkFr, ArkG1, ArkG2, ArkGT, ArkworksProverSetup, ArkworksVerifierSetup,
JoltFieldWrapper, BN254,
JoltBn254, JoltFieldWrapper,
};
Loading
Loading