Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
beaa0b1
why u no work
dknopik Jan 10, 2025
6530f20
working lagrange interpolation
dknopik Jan 13, 2025
0970790
Merge branch 'unstable' into bls-interpolation
dknopik Jan 13, 2025
d1d4ce0
bench
dknopik Jan 13, 2025
eef1f8c
integrate signature recovery
dknopik Jan 13, 2025
1aad41f
Ensure the "workspace.dependencies" table is sorted
dknopik Jan 14, 2025
8cc75ea
Merge branch 'check-workspace-deps' into bls-interpolation
dknopik Jan 14, 2025
0553283
Clean up API, allow split with arbitrary IDs
dknopik Jan 14, 2025
612d53e
cargo fmt
dknopik Jan 14, 2025
b0c2e4c
flipped condition
dknopik Jan 14, 2025
ec28268
add `blsful`-ish implementation
dknopik Jan 14, 2025
6e10f3f
switch to uncompressed serialization
dknopik Jan 15, 2025
bf02384
zeroize
dknopik Jan 15, 2025
8b6ee95
improve single thread performance
dknopik Jan 16, 2025
848e2e9
do not bother with `MaybeUninit`
dknopik Jan 16, 2025
bb8654f
Merge branch 'unstable' into bls-interpolation
dknopik Jan 16, 2025
31e82f1
cargo fmt and sort
dknopik Jan 16, 2025
718cb04
Merge branch 'unstable' into bls-interpolation
diegomrsantos Jan 16, 2025
52f3e40
Merge branch 'unstable' into bls-interpolation
dknopik Jan 20, 2025
b31277f
clean up, `blsful` as default, big scary warning for `blst`
dknopik Jan 20, 2025
d48b45f
rename to `bls_lagrange`
dknopik Jan 20, 2025
4455d2e
Merge remote-tracking branch 'origin/bls-interpolation' into bls-inte…
dknopik Jan 20, 2025
db0e310
bump MSRV due to updated dependency
dknopik Jan 21, 2025
227602f
Merge branch 'unstable' into bls-interpolation
dknopik Jan 23, 2025
d7ee5fd
simplify Cargo.toml and add TODO
dknopik Jan 23, 2025
3ba37d8
Merge branch 'unstable' into bls-interpolation
dknopik Jan 27, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
199 changes: 184 additions & 15 deletions Cargo.lock

Large diffs are not rendered by default.

11 changes: 11 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
members = [
"anchor",
"anchor/client",
"anchor/common/bls_lagrange",
"anchor/common/qbft",
"anchor/common/ssv_types",
"anchor/common/version",
Expand All @@ -24,6 +25,7 @@ edition = "2021"
# This table has three subsections: first the internal dependencies, then the lighthouse dependencies, then all other.
[workspace.dependencies]
anchor_validator_store = { path = "anchor/validator_store" }
bls_lagrange = { path = "anchor/common/bls_lagrange" }
client = { path = "anchor/client" }
database = { path = "anchor/database" }
eth = { path = "anchor/eth" }
Expand Down Expand Up @@ -71,6 +73,9 @@ alloy = { version = "0.6.4", features = [
async-channel = "1.9"
axum = "0.7.7"
base64 = "0.22.1"
blst = { git = "https://github.com/dknopik/blst", branch = "sk-conversion" }
# the custom repo is needed because they fix to a specific version of blst, which conflicts with the line above
blstrs_plus = { git = "https://github.com/dknopik/blstrs", branch = "pls" }
clap = { version = "4.5.15", features = ["derive", "wrap_help"] }
dashmap = "6.1.0"
derive_more = { version = "1.0.0", features = ["full"] }
Expand All @@ -83,6 +88,7 @@ indexmap = "2.7.0"
num_cpus = "1"
openssl = "0.10.68"
parking_lot = "0.12"
rand = "0.8.5"
reqwest = "0.12.12"
rusqlite = "0.28.0"
serde = { version = "1.0.208", features = ["derive"] }
Expand All @@ -99,8 +105,13 @@ tracing = "0.1.40"
tracing-subscriber = { version = "0.3.18", features = ["fmt", "env-filter"] }
tree_hash = "0.8"
tree_hash_derive = "0.8"
vsss-rs = "5.1.0"
zeroize = "1.8.1"

[patch.crates-io]
# todo: remove when https://github.com/supranational/blst/pull/248 is merged
blst = { git = "https://github.com/dknopik/blst", branch = "sk-conversion" }

[profile.maxperf]
inherits = "release"

Expand Down
21 changes: 21 additions & 0 deletions anchor/common/bls_lagrange/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
[package]
name = "bls_lagrange"
version = "0.1.0"
edition = "2021"

[dependencies]
bls = { workspace = true }
blst = { workspace = true, optional = true }
blstrs_plus = { workspace = true, optional = true }
rand = { workspace = true }
vsss-rs = { workspace = true, optional = true }
zeroize = { workspace = true }

[dev-dependencies]
blst = { workspace = true }

[features]
default = ["blsful"]
blsful = ["dep:blstrs_plus", "dep:vsss-rs"]
blst = ["dep:blst"]
blst_single_thread = ["blst"]
126 changes: 126 additions & 0 deletions anchor/common/bls_lagrange/src/blsful.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@
use crate::Error;
use blstrs_plus::{G2Projective, Scalar};
use rand::{CryptoRng, Rng};
use std::num::NonZeroU64;
use vsss_rs::{
shamir, IdentifierPrimeField, ParticipantIdGeneratorType, ReadableShareSet, ValueGroup,
};
use zeroize::Zeroizing;

#[derive(Debug, Clone)]
pub struct KeyId {
num: u64,
identifier: IdentifierPrimeField<Scalar>,
}

impl TryFrom<u64> for KeyId {
type Error = Error;

fn try_from(value: u64) -> Result<Self, Error> {
if value != 0 {
Ok(KeyId {
num: value,
identifier: IdentifierPrimeField(Scalar::from(value)),
})
} else {
Err(Error::ZeroId)
}
}
}
impl From<NonZeroU64> for KeyId {
fn from(value: NonZeroU64) -> Self {
KeyId {
num: value.get(),
identifier: IdentifierPrimeField(Scalar::from(value.get())),
}
}
}

impl From<KeyId> for u64 {
fn from(value: KeyId) -> Self {
value.num
}
}

pub fn split_with_rng(
key: bls::SecretKey,
threshold: u64,
ids: impl IntoIterator<Item = KeyId>,
rng: &mut (impl CryptoRng + Rng),
) -> Result<Vec<(KeyId, bls::SecretKey)>, Error> {
let result = Scalar::from_be_bytes(
key.serialize()
.as_bytes()
.try_into()
.map_err(|_| Error::InternalError)?,
);
let key = if result.is_some().into() {
IdentifierPrimeField(result.unwrap())
} else {
return Err(Error::InternalError);
};

let ids = ids.into_iter().map(|k| k.identifier).collect::<Vec<_>>();

let result = Zeroizing::new(
shamir::split_secret_with_participant_generator(
threshold as usize,
ids.len(),
&key,
rng,
&[ParticipantIdGeneratorType::List { list: &ids }],
)
.map_err(|_| Error::InternalError)?,
);

result
.iter()
.map(|(identifier, share)| {
bls::SecretKey::deserialize(&share.0.to_be_bytes())
.map_err(|_| Error::InternalError)
.map(move |sk| {
let bytes = identifier.0.to_be_bytes();
debug_assert_eq!(bytes[..24], [0; 24]);
(
KeyId {
num: u64::from_be_bytes((&bytes[24..]).try_into().unwrap()),
identifier: *identifier,
},
sk,
)
})
})
.collect()
}

pub fn combine_signatures(
signatures: &[bls::Signature],
ids: &[KeyId],
) -> Result<bls::Signature, Error> {
if signatures.len() < 2 {
return Err(Error::LessThanTwoSignatures);
}
if signatures.len() != ids.len() {
return Err(Error::NotOneIdPerSignature);
}

let share_set = signatures
.iter()
.zip(ids)
.map(|(sig, id)| {
let Some(bytes) = sig.serialize_uncompressed() else {
return Err(Error::InternalError);
};
let g2 = G2Projective::from_uncompressed(&bytes);
if g2.is_some().into() {
Ok((id.identifier, ValueGroup(g2.unwrap())))
} else {
Err(Error::InternalError)
}
})
.collect::<Result<Vec<_>, _>>()?;

let result = share_set.combine().map_err(|_| Error::InternalError)?;
bls::Signature::deserialize_uncompressed(&result.0.to_uncompressed())
.map_err(|_| Error::InternalError)
}
Loading
Loading