Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ byteorder = "1.4.3"
thiserror = "1.0"
halo2curves = { version = "0.4.0", features = ["derive_serde"] }
group = "0.13.0"
once_cell = "1.18.0"

[target.'cfg(any(target_arch = "x86_64", target_arch = "aarch64"))'.dependencies]
pasta-msm = { version = "0.1.4" }
Expand Down
166 changes: 166 additions & 0 deletions src/digest.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,166 @@
use bincode::Options;
use ff::PrimeField;
use serde::Serialize;
use sha3::{Digest, Sha3_256};
use std::io;
use std::marker::PhantomData;

use crate::constants::NUM_HASH_BITS;

/// Trait for components with potentially discrete digests to be included in their container's digest.
pub trait Digestible {
/// Write the byte representation of Self in a byte buffer
fn write_bytes<W: Sized + io::Write>(&self, byte_sink: &mut W) -> Result<(), io::Error>;
}

/// Marker trait to be implemented for types that implement `Digestible` and `Serialize`.
/// Their instances will be serialized to bytes then digested.
pub trait SimpleDigestible: Serialize {}

impl<T: SimpleDigestible> Digestible for T {
fn write_bytes<W: Sized + io::Write>(&self, byte_sink: &mut W) -> Result<(), io::Error> {
let config = bincode::DefaultOptions::new()
.with_little_endian()
.with_fixint_encoding();
// Note: bincode recursively length-prefixes every field!
config
.serialize_into(byte_sink, self)
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))
}
}

pub struct DigestComputer<'a, F: PrimeField, T> {
inner: &'a T,
_phantom: PhantomData<F>,
}

impl<'a, F: PrimeField, T: Digestible> DigestComputer<'a, F, T> {
fn hasher() -> Sha3_256 {
Sha3_256::new()
}

fn map_to_field(digest: &mut [u8]) -> F {
let bv = (0..NUM_HASH_BITS).map(|i| {
let (byte_pos, bit_pos) = (i / 8, i % 8);
let bit = (digest[byte_pos] >> bit_pos) & 1;
bit == 1
});

// turn the bit vector into a scalar
let mut digest = F::ZERO;
let mut coeff = F::ONE;
for bit in bv {
if bit {
digest += coeff;
}
coeff += coeff;
}
digest
}

/// Create a new DigestComputer
pub fn new(inner: &'a T) -> Self {
DigestComputer {
inner,
_phantom: PhantomData,
}
}

/// Compute the digest of a `Digestible` instance.
pub fn digest(&self) -> Result<F, io::Error> {
let mut hasher = Self::hasher();
self
.inner
.write_bytes(&mut hasher)
.expect("Serialization error");
let mut bytes: [u8; 32] = hasher.finalize().into();
Ok(Self::map_to_field(&mut bytes))
}
}

#[cfg(test)]
mod tests {
use ff::Field;
use once_cell::sync::OnceCell;
use pasta_curves::pallas;
use serde::{Deserialize, Serialize};

use crate::traits::Group;

use super::{DigestComputer, SimpleDigestible};

#[derive(Serialize, Deserialize)]
struct S<G: Group> {
i: usize,
#[serde(skip, default = "OnceCell::new")]
digest: OnceCell<G::Scalar>,
}

impl<G: Group> SimpleDigestible for S<G> {}

impl<G: Group> S<G> {
fn new(i: usize) -> Self {
S {
i,
digest: OnceCell::new(),
}
}

fn digest(&self) -> G::Scalar {
self
.digest
.get_or_try_init(|| DigestComputer::new(self).digest())
.cloned()
.unwrap()
}
}

type G = pallas::Point;

#[test]
fn test_digest_field_not_ingested_in_computation() {
let s1 = S::<G>::new(42);

// let's set up a struct with a weird digest field to make sure the digest computation does not depend of it
let oc = OnceCell::new();
oc.set(<G as Group>::Scalar::ONE).unwrap();

let s2: S<G> = S { i: 42, digest: oc };

assert_eq!(
DigestComputer::<<G as Group>::Scalar, _>::new(&s1)
.digest()
.unwrap(),
DigestComputer::<<G as Group>::Scalar, _>::new(&s2)
.digest()
.unwrap()
);

// note: because of the semantics of `OnceCell::get_or_try_init`, the above
// equality will not result in `s1.digest() == s2.digest`
assert_ne!(
s2.digest(),
DigestComputer::<<G as Group>::Scalar, _>::new(&s2)
.digest()
.unwrap()
);
}

#[test]
fn test_digest_impervious_to_serialization() {
let good_s = S::<G>::new(42);

// let's set up a struct with a weird digest field to confuse deserializers
let oc = OnceCell::new();
oc.set(<G as Group>::Scalar::ONE).unwrap();

let bad_s: S<G> = S { i: 42, digest: oc };
// this justifies the adjective "bad"
assert_ne!(good_s.digest(), bad_s.digest());

let naughty_bytes = bincode::serialize(&bad_s).unwrap();

let retrieved_s: S<G> = bincode::deserialize(&naughty_bytes).unwrap();
assert_eq!(good_s.digest(), retrieved_s.digest())
}
}
3 changes: 3 additions & 0 deletions src/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,4 +56,7 @@ pub enum NovaError {
/// return when error during synthesis
#[error("SynthesisError")]
SynthesisError,
/// returned when there is an error creating a digest
#[error("DigestError")]
DigestError,
}
Loading