Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Store commitments in blob #2967

Merged
merged 2 commits into from
Jan 27, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 4 additions & 2 deletions saffron/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,9 @@ o1-utils.workspace = true
poly-commitment.workspace = true
rayon.workspace = true
rmp-serde.workspace = true
serde_json.workspace = true
serde.workspace = true
serde_with.workspace = true
sha3.workspace = true
time = { version = "0.3", features = ["macros"] }
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = [ "ansi", "env-filter", "fmt", "time" ] }
Expand All @@ -40,4 +42,4 @@ tracing-subscriber = { version = "0.3", features = [ "ansi", "env-filter", "fmt"
[dev-dependencies]
ark-std.workspace = true
proptest.workspace = true
once_cell.workspace = true
once_cell.workspace = true
127 changes: 59 additions & 68 deletions saffron/src/blob.rs
Original file line number Diff line number Diff line change
@@ -1,80 +1,55 @@
use crate::utils::{decode_into, encode_for_domain};
use ark_ff::{Field, PrimeField};
use ark_ff::PrimeField;
use ark_poly::{univariate::DensePolynomial, EvaluationDomain, Evaluations};
use ark_serialize::{
CanonicalDeserialize, CanonicalSerialize, Compress, Read, SerializationError, Valid, Validate,
Write,
};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use o1_utils::FieldHelpers;
use poly_commitment::{commitment::CommitmentCurve, ipa::SRS, PolyComm, SRS as _};
use rayon::prelude::*;
use serde::{Deserialize, Serialize};
use serde_with::serde_as;
use tracing::{debug, instrument};

// A FieldBlob<F> represents the encoding of a Vec<u8> as a list of polynomials over F,
// where F is a prime field. The polyonomials are represented in the monomial basis.
#[derive(Clone, Debug, PartialEq)]
pub struct FieldBlob<F: Field> {
#[serde_as]
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
#[serde(bound = "G::ScalarField : CanonicalDeserialize + CanonicalSerialize")]
pub struct FieldBlob<G: CommitmentCurve> {
pub n_bytes: usize,
pub domain_size: usize,
pub data: Vec<DensePolynomial<F>>,
pub commitments: Vec<PolyComm<G>>,
#[serde_as(as = "Vec<o1_utils::serialization::SerdeAs>")]
pub data: Vec<DensePolynomial<G::ScalarField>>,
}

impl<F: CanonicalSerialize + Field> CanonicalSerialize for FieldBlob<F> {
fn serialize_with_mode<W: Write>(
&self,
mut writer: W,
mode: Compress,
) -> Result<(), SerializationError> {
self.n_bytes.serialize_with_mode(&mut writer, mode)?;
self.domain_size.serialize_with_mode(&mut writer, mode)?;
self.data.serialize_with_mode(&mut writer, mode)?;
Ok(())
}

fn serialized_size(&self, mode: Compress) -> usize {
self.n_bytes.serialized_size(mode)
+ self.domain_size.serialized_size(mode)
+ self.data.serialized_size(mode)
}
}

impl<F: Valid + Field> Valid for FieldBlob<F> {
fn check(&self) -> Result<(), SerializationError> {
self.n_bytes.check()?;
self.domain_size.check()?;
self.data.check()?;
Ok(())
}
#[instrument(skip_all)]
fn commit_to_blob_data<G: CommitmentCurve>(
srs: &SRS<G>,
data: &[DensePolynomial<G::ScalarField>],
) -> Vec<PolyComm<G>> {
let num_chunks = 1;
data.par_iter()
.map(|p| srs.commit_non_hiding(p, num_chunks))
.collect()
}

impl<F: CanonicalDeserialize + Field> CanonicalDeserialize for FieldBlob<F> {
fn deserialize_with_mode<R: Read>(
mut reader: R,
compress: Compress,
validate: Validate,
) -> Result<Self, SerializationError> {
let n_bytes = usize::deserialize_with_mode(&mut reader, compress, validate)?;
let domain_size = usize::deserialize_with_mode(&mut reader, compress, validate)?;
let data =
Vec::<DensePolynomial<F>>::deserialize_with_mode(&mut reader, compress, validate)?;
Ok(Self {
n_bytes,
domain_size,
data,
})
}
}

impl<F: PrimeField> FieldBlob<F> {
impl<G: CommitmentCurve> FieldBlob<G> {
#[instrument(skip_all)]
pub fn encode<D: EvaluationDomain<F>>(domain: D, bytes: &[u8]) -> FieldBlob<F> {
pub fn encode<D: EvaluationDomain<G::ScalarField>>(
srs: &SRS<G>,
domain: D,
bytes: &[u8],
) -> FieldBlob<G> {
let field_elements = encode_for_domain(&domain, bytes);
let domain_size = domain.size();

let data: Vec<DensePolynomial<F>> = field_elements
let data: Vec<DensePolynomial<G::ScalarField>> = field_elements
.par_iter()
.map(|chunk| Evaluations::from_vec_and_domain(chunk.to_vec(), domain).interpolate())
.collect();

let commitments = commit_to_blob_data(srs, &data);

debug!(
"Encoded {} bytes into {} polynomials",
bytes.len(),
Expand All @@ -84,12 +59,13 @@ impl<F: PrimeField> FieldBlob<F> {
FieldBlob {
n_bytes: bytes.len(),
domain_size,
commitments,
data,
}
}

#[instrument(skip_all)]
pub fn decode<D: EvaluationDomain<F>>(domain: D, blob: FieldBlob<F>) -> Vec<u8> {
pub fn decode<D: EvaluationDomain<G::ScalarField>>(domain: D, blob: FieldBlob<G>) -> Vec<u8> {
// TODO: find an Error type and use Result
if domain.size() != blob.domain_size {
panic!(
Expand All @@ -98,8 +74,8 @@ impl<F: PrimeField> FieldBlob<F> {
domain.size()
);
}
let n = (F::MODULUS_BIT_SIZE / 8) as usize;
let m = F::size_in_bytes();
let n = (G::ScalarField::MODULUS_BIT_SIZE / 8) as usize;
let m = G::ScalarField::size_in_bytes();
let mut bytes = Vec::with_capacity(blob.n_bytes);
let mut buffer = vec![0u8; m];

Expand All @@ -118,31 +94,46 @@ impl<F: PrimeField> FieldBlob<F> {

#[cfg(test)]
mod tests {
use crate::commitment::commit_to_field_elems;

use super::*;
use ark_poly::Radix2EvaluationDomain;
use mina_curves::pasta::Fp;
use mina_curves::pasta::{Fp, Vesta};
use once_cell::sync::Lazy;
use proptest::prelude::*;

static DOMAIN: Lazy<Radix2EvaluationDomain<Fp>> = Lazy::new(|| {
const SRS_SIZE: usize = 1 << 16;
Radix2EvaluationDomain::new(SRS_SIZE).unwrap()
});
const SRS_SIZE: usize = 1 << 16;

static SRS: Lazy<SRS<Vesta>> = Lazy::new(|| SRS::create(SRS_SIZE));

static DOMAIN: Lazy<Radix2EvaluationDomain<Fp>> =
Lazy::new(|| Radix2EvaluationDomain::new(SRS_SIZE).unwrap());

// check that Vec<u8> -> FieldBlob<Fp> -> Vec<u8> is the identity function
proptest! {
#![proptest_config(ProptestConfig::with_cases(20))]
#[test]
fn test_round_trip_blob_encoding( xs in prop::collection::vec(any::<u8>(), 0..=2 * Fp::size_in_bytes() * DOMAIN.size()))
{ let blob = FieldBlob::<Fp>::encode(*DOMAIN, &xs);
let mut buf = Vec::new();
blob.serialize_compressed(&mut buf).unwrap();
let a = FieldBlob::<Fp>::deserialize_compressed(&buf[..]).unwrap();
{ let blob = FieldBlob::<Vesta>::encode(&*SRS, *DOMAIN, &xs);
let bytes = rmp_serde::to_vec(&blob).unwrap();
let a = rmp_serde::from_slice(&bytes).unwrap();
// check that ark-serialize is behaving as expected
prop_assert_eq!(blob.clone(), a);
let ys = FieldBlob::<Fp>::decode(*DOMAIN, blob);
let ys = FieldBlob::<Vesta>::decode(*DOMAIN, blob);
// check that we get the byte blob back again
prop_assert_eq!(xs,ys);
}
}

proptest! {
#![proptest_config(ProptestConfig::with_cases(10))]
#[test]
fn test_user_and_storage_provider_commitments_equal(xs in prop::collection::vec(any::<u8>(), 0..=2 * Fp::size_in_bytes() * DOMAIN.size())
)
{ let elems = encode_for_domain(&*DOMAIN, &xs);
let user_commitments = commit_to_field_elems(&*SRS, *DOMAIN, elems);
let blob = FieldBlob::<Vesta>::encode(&*SRS, *DOMAIN, &xs);
prop_assert_eq!(user_commitments, blob.commitments);
}
}
}
65 changes: 2 additions & 63 deletions saffron/src/commitment.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
use crate::blob::FieldBlob;
use ark_ec::AffineRepr;
use ark_ff::One;
use ark_poly::{Evaluations, Radix2EvaluationDomain as D};
Expand Down Expand Up @@ -26,18 +25,6 @@ pub fn commit_to_field_elems<G: CommitmentCurve>(
.collect()
}

#[instrument(skip_all)]
pub fn commit_to_blob<G: CommitmentCurve>(
srs: &SRS<G>,
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not deleted -- this and the tests below just got moved to the blob module

blob: &FieldBlob<G::ScalarField>,
) -> Vec<PolyComm<G>> {
let num_chunks = 1;
blob.data
.par_iter()
.map(|p| srs.commit_non_hiding(p, num_chunks))
.collect()
}

#[instrument(skip_all)]
pub fn fold_commitments<
G: AffineRepr,
Expand All @@ -49,62 +36,14 @@ pub fn fold_commitments<
for commitment in commitments {
absorb_commitment(sponge, commitment)
}
let challenge = sponge.challenge();
let alpha = sponge.challenge();
let powers: Vec<G::ScalarField> = commitments
.iter()
.scan(G::ScalarField::one(), |acc, _| {
let res = *acc;
*acc *= challenge;
*acc *= alpha;
Some(res)
})
.collect::<Vec<_>>();
PolyComm::multi_scalar_mul(&commitments.iter().collect::<Vec<_>>(), &powers)
}

#[cfg(test)]
mod tests {
use crate::utils::encode_for_domain;

use super::*;
use ark_poly::{EvaluationDomain, Radix2EvaluationDomain};
use mina_curves::pasta::{Fp, Vesta, VestaParameters};
use mina_poseidon::{constants::PlonkSpongeConstantsKimchi, sponge::DefaultFqSponge, FqSponge};
use o1_utils::FieldHelpers;
use once_cell::sync::Lazy;
use proptest::prelude::*;

const SRS_SIZE: usize = 1 << 16;

static SRS: Lazy<SRS<Vesta>> = Lazy::new(|| SRS::create(SRS_SIZE));

static DOMAIN: Lazy<Radix2EvaluationDomain<Fp>> =
Lazy::new(|| Radix2EvaluationDomain::new(SRS_SIZE).unwrap());

proptest! {
#![proptest_config(ProptestConfig::with_cases(10))]
#[test]
fn test_user_and_storage_provider_commitments_equal(xs in prop::collection::vec(any::<u8>(), 0..=2 * Fp::size_in_bytes() * DOMAIN.size())
)
{ let elems = encode_for_domain(&*DOMAIN, &xs);
let user_commitments = commit_to_field_elems(&*SRS, *DOMAIN, elems);
let blob = FieldBlob::<Fp>::encode(*DOMAIN, &xs);
let storeage_provider_commitments = commit_to_blob(&*SRS, &blob);
prop_assert_eq!(&user_commitments, &storeage_provider_commitments);
let user_commitment =
{ let mut fq_sponge = DefaultFqSponge::<VestaParameters, PlonkSpongeConstantsKimchi>::new(
mina_poseidon::pasta::fq_kimchi::static_params(),
);
fold_commitments(&mut fq_sponge, &user_commitments)

};
let storage_provider_commitment =
{ let mut fq_sponge = DefaultFqSponge::<VestaParameters, PlonkSpongeConstantsKimchi>::new(
mina_poseidon::pasta::fq_kimchi::static_params(),
);
fold_commitments(&mut fq_sponge, &storeage_provider_commitments)

};
prop_assert_eq!(&user_commitment, &storage_provider_commitment);
}
}
}
44 changes: 15 additions & 29 deletions saffron/src/main.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
use anyhow::Result;
use ark_poly::{EvaluationDomain, Radix2EvaluationDomain};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use clap::Parser;
use kimchi::precomputed_srs::TestSRS;
use mina_curves::pasta::{Fp, Vesta, VestaParameters};
use mina_poseidon::{constants::PlonkSpongeConstantsKimchi, sponge::DefaultFqSponge, FqSponge};
use poly_commitment::{ipa::SRS, PolyComm, SRS as _};
use mina_curves::pasta::{Fp, Vesta};
use poly_commitment::{ipa::SRS, SRS as _};
use saffron::{blob::FieldBlob, cli, commitment, utils};
use sha3::{Digest, Sha3_256};
use std::{
fs::File,
io::{Read, Write},
Expand Down Expand Up @@ -69,11 +68,9 @@ fn decode_file(args: cli::DecodeFileArgs) -> Result<()> {
input_file = args.input,
"Decoding file"
);
let mut file = File::open(args.input)?;
let mut buf = Vec::new();
file.read_to_end(&mut buf)?;
let blob: FieldBlob<Fp> = FieldBlob::<Fp>::deserialize_compressed(&buf[..])?;
let data = FieldBlob::<Fp>::decode(domain, blob);
let file = File::open(args.input)?;
let blob: FieldBlob<Vesta> = rmp_serde::decode::from_read(file)?;
let data = FieldBlob::<Vesta>::decode(domain, blob);
debug!(output_file = args.output, "Writing decoded blob to file");
let mut writer = File::create(args.output)?;
writer.write_all(&data)?;
Expand All @@ -90,30 +87,23 @@ fn encode_file(args: cli::EncodeFileArgs) -> Result<()> {
let mut file = File::open(args.input)?;
let mut buf = Vec::new();
file.read_to_end(&mut buf)?;
let blob = FieldBlob::<Fp>::encode(domain, &buf);
let blob = FieldBlob::<Vesta>::encode(&srs, domain, &buf);
args.assert_commitment
.into_iter()
.for_each(|asserted_commitment| {
let mut fq_sponge = DefaultFqSponge::<VestaParameters, PlonkSpongeConstantsKimchi>::new(
mina_poseidon::pasta::fq_kimchi::static_params(),
);
let commitments = commitment::commit_to_blob(&srs, &blob);
let c: PolyComm<ark_ec::short_weierstrass::Affine<VestaParameters>> =
commitment::fold_commitments(&mut fq_sponge, &commitments);
let bytes = serde_json::to_vec(&c).unwrap();
let computed_commitment = hex::encode(bytes);
let bytes = rmp_serde::to_vec(&blob.commitments).unwrap();
let hash = Sha3_256::new().chain_update(bytes).finalize();
let computed_commitment = hex::encode(hash);
if asserted_commitment != computed_commitment {
panic!(
"commitment mismatch: asserted {}, computed {}",
"commitment hash mismatch: asserted {}, computed {}",
asserted_commitment, computed_commitment
);
}
});
let mut bytes_to_write = Vec::with_capacity(buf.len());
blob.serialize_compressed(&mut bytes_to_write)?;
debug!(output_file = args.output, "Writing encoded blob to file",);
let mut writer = File::create(args.output)?;
writer.write_all(&bytes_to_write)?;
rmp_serde::encode::write(&mut writer, &blob)?;
Ok(())
}

Expand All @@ -123,14 +113,10 @@ pub fn compute_commitment(args: cli::ComputeCommitmentArgs) -> Result<String> {
let mut buf = Vec::new();
file.read_to_end(&mut buf)?;
let field_elems = utils::encode_for_domain(&domain_fp, &buf);
let mut fq_sponge = DefaultFqSponge::<VestaParameters, PlonkSpongeConstantsKimchi>::new(
mina_poseidon::pasta::fq_kimchi::static_params(),
);
let commitments = commitment::commit_to_field_elems(&srs, domain_fp, field_elems);
let c: PolyComm<ark_ec::short_weierstrass::Affine<VestaParameters>> =
commitment::fold_commitments(&mut fq_sponge, &commitments);
let bytes = serde_json::to_vec(&c).unwrap();
Ok(hex::encode(bytes))
let bytes = rmp_serde::to_vec(&commitments).unwrap();
let hash = Sha3_256::new().chain_update(bytes).finalize();
Ok(hex::encode(hash))
}

pub fn init_subscriber() {
Expand Down
Loading