Skip to content

Commit

Permalink
Merge pull request #2966 from o1-labs/martin/saffron-commitment
Browse files Browse the repository at this point in the history
Saffron commitments
  • Loading branch information
martyall authored Jan 24, 2025
2 parents 5ae78ca + e0ac06d commit cb4c09c
Show file tree
Hide file tree
Showing 7 changed files with 251 additions and 13 deletions.
5 changes: 5 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 5 additions & 0 deletions saffron/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,15 @@ ark-ff.workspace = true
ark-poly.workspace = true
ark-serialize = { workspace = true, features = ["derive"]}
clap = { workspace = true, features = ["derive"] }
hex.workspace = true
kimchi.workspace = true
mina-curves.workspace = true
mina-poseidon.workspace = true
o1-utils.workspace = true
poly-commitment.workspace = true
rayon.workspace = true
rmp-serde.workspace = true
serde_json.workspace = true
time = { version = "0.3", features = ["macros"] }
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = [ "ansi", "env-filter", "fmt", "time" ] }
Expand Down
20 changes: 20 additions & 0 deletions saffron/src/cli.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,12 @@ pub struct EncodeFileArgs {
help = "output file (encoded as field elements)"
)]
pub output: String,

#[arg(long = "srs-filepath", value_name = "SRS_FILEPATH")]
pub srs_cache: Option<String>,

#[arg(long = "assert-commitment", value_name = "COMMITMENT")]
pub assert_commitment: Option<String>,
}

#[derive(Parser)]
Expand All @@ -26,6 +32,18 @@ pub struct DecodeFileArgs {

#[arg(long, short = 'o', value_name = "FILE", help = "output file")]
pub output: String,

#[arg(long = "srs-filepath", value_name = "SRS_FILEPATH")]
pub srs_cache: Option<String>,
}

#[derive(Parser)]
pub struct ComputeCommitmentArgs {
#[arg(long, short = 'i', value_name = "FILE", help = "input file")]
pub input: String,

#[arg(long = "srs-filepath", value_name = "SRS_FILEPATH")]
pub srs_cache: Option<String>,
}

#[derive(Parser)]
Expand All @@ -39,4 +57,6 @@ pub enum Commands {
Encode(EncodeFileArgs),
#[command(name = "decode")]
Decode(DecodeFileArgs),
#[command(name = "compute-commitment")]
ComputeCommitment(ComputeCommitmentArgs),
}
110 changes: 110 additions & 0 deletions saffron/src/commitment.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
use crate::blob::FieldBlob;
use ark_ec::AffineRepr;
use ark_ff::One;
use ark_poly::{Evaluations, Radix2EvaluationDomain as D};
use mina_poseidon::FqSponge;
use poly_commitment::{
commitment::{absorb_commitment, CommitmentCurve},
ipa::SRS,
PolyComm, SRS as _,
};
use rayon::prelude::*;
use tracing::instrument;

#[instrument(skip_all)]
pub fn commit_to_field_elems<G: CommitmentCurve>(
srs: &SRS<G>,
domain: D<G::ScalarField>,
field_elems: Vec<Vec<G::ScalarField>>,
) -> Vec<PolyComm<G>> {
field_elems
.par_iter()
.map(|chunk| {
let evals = Evaluations::from_vec_and_domain(chunk.to_vec(), domain);
srs.commit_evaluations_non_hiding(domain, &evals)
})
.collect()
}

#[instrument(skip_all)]
pub fn commit_to_blob<G: CommitmentCurve>(
srs: &SRS<G>,
blob: &FieldBlob<G::ScalarField>,
) -> Vec<PolyComm<G>> {
let num_chunks = 1;
blob.data
.par_iter()
.map(|p| srs.commit_non_hiding(p, num_chunks))
.collect()
}

#[instrument(skip_all)]
pub fn fold_commitments<
G: AffineRepr,
EFqSponge: Clone + FqSponge<G::BaseField, G, G::ScalarField>,
>(
sponge: &mut EFqSponge,
commitments: &[PolyComm<G>],
) -> PolyComm<G> {
for commitment in commitments {
absorb_commitment(sponge, commitment)
}
let challenge = sponge.challenge();
let powers: Vec<G::ScalarField> = commitments
.iter()
.scan(G::ScalarField::one(), |acc, _| {
let res = *acc;
*acc *= challenge;
Some(res)
})
.collect::<Vec<_>>();
PolyComm::multi_scalar_mul(&commitments.iter().collect::<Vec<_>>(), &powers)
}

#[cfg(test)]
mod tests {
use crate::utils::encode_for_domain;

use super::*;
use ark_poly::{EvaluationDomain, Radix2EvaluationDomain};
use mina_curves::pasta::{Fp, Vesta, VestaParameters};
use mina_poseidon::{constants::PlonkSpongeConstantsKimchi, sponge::DefaultFqSponge, FqSponge};
use o1_utils::FieldHelpers;
use once_cell::sync::Lazy;
use proptest::prelude::*;

const SRS_SIZE: usize = 1 << 16;

static SRS: Lazy<SRS<Vesta>> = Lazy::new(|| SRS::create(SRS_SIZE));

static DOMAIN: Lazy<Radix2EvaluationDomain<Fp>> =
Lazy::new(|| Radix2EvaluationDomain::new(SRS_SIZE).unwrap());

proptest! {
#![proptest_config(ProptestConfig::with_cases(10))]
#[test]
fn test_user_and_storage_provider_commitments_equal(xs in prop::collection::vec(any::<u8>(), 0..=2 * Fp::size_in_bytes() * DOMAIN.size())
)
{ let elems = encode_for_domain(&*DOMAIN, &xs);
let user_commitments = commit_to_field_elems(&*SRS, *DOMAIN, elems);
let blob = FieldBlob::<Fp>::encode(*DOMAIN, &xs);
let storeage_provider_commitments = commit_to_blob(&*SRS, &blob);
prop_assert_eq!(&user_commitments, &storeage_provider_commitments);
let user_commitment =
{ let mut fq_sponge = DefaultFqSponge::<VestaParameters, PlonkSpongeConstantsKimchi>::new(
mina_poseidon::pasta::fq_kimchi::static_params(),
);
fold_commitments(&mut fq_sponge, &user_commitments)

};
let storage_provider_commitment =
{ let mut fq_sponge = DefaultFqSponge::<VestaParameters, PlonkSpongeConstantsKimchi>::new(
mina_poseidon::pasta::fq_kimchi::static_params(),
);
fold_commitments(&mut fq_sponge, &storeage_provider_commitments)

};
prop_assert_eq!(&user_commitment, &storage_provider_commitment);
}
}
}
1 change: 1 addition & 0 deletions saffron/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
pub mod blob;
pub mod cli;
pub mod commitment;
pub mod utils;
99 changes: 94 additions & 5 deletions saffron/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,15 @@ use anyhow::Result;
use ark_poly::{EvaluationDomain, Radix2EvaluationDomain};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use clap::Parser;
use mina_curves::pasta::Fp;
use saffron::{blob::FieldBlob, cli};
use kimchi::precomputed_srs::TestSRS;
use mina_curves::pasta::{Fp, Vesta, VestaParameters};
use mina_poseidon::{constants::PlonkSpongeConstantsKimchi, sponge::DefaultFqSponge, FqSponge};
use poly_commitment::{ipa::SRS, PolyComm, SRS as _};
use saffron::{blob::FieldBlob, cli, commitment, utils};
use std::{
fs::File,
io::{Read, Write},
path::Path,
};
use time::macros::format_description;
use tracing::debug;
Expand All @@ -15,10 +19,51 @@ use tracing_subscriber::{
EnvFilter,
};

const SRS_SIZE: usize = 1 << 16;
const DEFAULT_SRS_SIZE: usize = 1 << 16;

fn get_srs(cache: Option<String>) -> (SRS<Vesta>, Radix2EvaluationDomain<Fp>) {
match cache {
Some(cache) => {
debug!("Loading SRS from cache {}", cache);
let file_path = Path::new(&cache);
let file = File::open(file_path).expect("Error opening SRS cache file");
let srs: SRS<Vesta> = {
// By convention, proof systems serializes a TestSRS with filename 'test_<CURVE_NAME>.srs'.
// The benefit of using this is you don't waste time verifying the SRS.
if file_path
.file_name()
.unwrap()
.to_str()
.unwrap()
.starts_with("test_")
{
let test_srs: TestSRS<Vesta> = rmp_serde::from_read(&file).unwrap();
From::from(test_srs)
} else {
rmp_serde::from_read(&file).unwrap()
}
};
debug!("SRS loaded successfully from cache");
let domain_fp = Radix2EvaluationDomain::new(srs.size()).unwrap();
(srs, domain_fp)
}
None => {
debug!(
"No SRS cache provided. Creating SRS from scratch with domain size {}",
DEFAULT_SRS_SIZE
);
let domain_size = DEFAULT_SRS_SIZE;
let srs = SRS::create(domain_size);
let domain_fp = Radix2EvaluationDomain::new(srs.size()).unwrap();
srs.get_lagrange_basis(domain_fp);
debug!("SRS created successfully");
(srs, domain_fp)
}
}
}

fn decode_file(args: cli::DecodeFileArgs) -> Result<()> {
let domain = Radix2EvaluationDomain::new(SRS_SIZE).unwrap();
let (_, domain) = get_srs(args.srs_cache);
debug!(
domain_size = domain.size(),
input_file = args.input,
Expand All @@ -36,7 +81,7 @@ fn decode_file(args: cli::DecodeFileArgs) -> Result<()> {
}

fn encode_file(args: cli::EncodeFileArgs) -> Result<()> {
let domain = Radix2EvaluationDomain::new(SRS_SIZE).unwrap();
let (srs, domain) = get_srs(args.srs_cache);
debug!(
domain_size = domain.size(),
input_file = args.input,
Expand All @@ -46,6 +91,24 @@ fn encode_file(args: cli::EncodeFileArgs) -> Result<()> {
let mut buf = Vec::new();
file.read_to_end(&mut buf)?;
let blob = FieldBlob::<Fp>::encode(domain, &buf);
args.assert_commitment
.into_iter()
.for_each(|asserted_commitment| {
let mut fq_sponge = DefaultFqSponge::<VestaParameters, PlonkSpongeConstantsKimchi>::new(
mina_poseidon::pasta::fq_kimchi::static_params(),
);
let commitments = commitment::commit_to_blob(&srs, &blob);
let c: PolyComm<ark_ec::short_weierstrass::Affine<VestaParameters>> =
commitment::fold_commitments(&mut fq_sponge, &commitments);
let bytes = serde_json::to_vec(&c).unwrap();
let computed_commitment = hex::encode(bytes);
if asserted_commitment != computed_commitment {
panic!(
"commitment mismatch: asserted {}, computed {}",
asserted_commitment, computed_commitment
);
}
});
let mut bytes_to_write = Vec::with_capacity(buf.len());
blob.serialize_compressed(&mut bytes_to_write)?;
debug!(output_file = args.output, "Writing encoded blob to file",);
Expand All @@ -54,6 +117,22 @@ fn encode_file(args: cli::EncodeFileArgs) -> Result<()> {
Ok(())
}

pub fn compute_commitment(args: cli::ComputeCommitmentArgs) -> Result<String> {
let (srs, domain_fp) = get_srs(args.srs_cache);
let mut file = File::open(args.input)?;
let mut buf = Vec::new();
file.read_to_end(&mut buf)?;
let field_elems = utils::encode_for_domain(&domain_fp, &buf);
let mut fq_sponge = DefaultFqSponge::<VestaParameters, PlonkSpongeConstantsKimchi>::new(
mina_poseidon::pasta::fq_kimchi::static_params(),
);
let commitments = commitment::commit_to_field_elems(&srs, domain_fp, field_elems);
let c: PolyComm<ark_ec::short_weierstrass::Affine<VestaParameters>> =
commitment::fold_commitments(&mut fq_sponge, &commitments);
let bytes = serde_json::to_vec(&c).unwrap();
Ok(hex::encode(bytes))
}

pub fn init_subscriber() {
let timer = UtcTime::new(format_description!(
"[year]-[month]-[day]T[hour repr:24]:[minute]:[second].[subsecond digits:3]Z"
Expand All @@ -78,5 +157,15 @@ pub fn main() -> Result<()> {
match args {
cli::Commands::Encode(args) => encode_file(args),
cli::Commands::Decode(args) => decode_file(args),
cli::Commands::ComputeCommitment(args) => match compute_commitment(args) {
Ok(c) => {
println!("{}", c);
Ok(())
}
Err(e) => {
eprintln!("{}", e);
Err(e)
}
},
}
}
24 changes: 16 additions & 8 deletions saffron/test-encoding.sh
Original file line number Diff line number Diff line change
@@ -1,12 +1,16 @@
#!/bin/bash

# Check if input file is provided
if [ $# -ne 1 ]; then
echo "Usage: $0 <input_file>"
exit 1
if [ $# -lt 1 ]; then
echo "Usage: $0 <input_file> [srs-filepath]"
exit 1
fi

INPUT_FILE="$1"
SRS_ARG=""
if [ $# -eq 2 ]; then
SRS_ARG="--srs-filepath $2"
fi
ENCODED_FILE="${INPUT_FILE%.*}.bin"
DECODED_FILE="${INPUT_FILE%.*}-decoded${INPUT_FILE##*.}"

Expand All @@ -16,16 +20,20 @@ if [ ! -f "$INPUT_FILE" ]; then
exit 1
fi

# Run encode
# Compute commitment and capture last line
COMMITMENT=$(cargo run --release --bin saffron compute-commitment -i "$INPUT_FILE" $SRS_ARG | tee /dev/stderr | tail -n 1)


# Run encode with captured commitment
echo "Encoding $INPUT_FILE to $ENCODED_FILE"
if ! cargo run --release --bin saffron encode -i "$INPUT_FILE" -o "$ENCODED_FILE"; then
echo "Encoding failed"
exit 1
if ! cargo run --release --bin saffron encode -i "$INPUT_FILE" -o "$ENCODED_FILE" --assert-commitment "$COMMITMENT" $SRS_ARG; then
echo "Encoding failed"
exit 1
fi

# Run decode
echo "Decoding $ENCODED_FILE to $DECODED_FILE"
if ! cargo run --release --bin saffron decode -i "$ENCODED_FILE" -o "$DECODED_FILE"; then
if ! cargo run --release --bin saffron decode -i "$ENCODED_FILE" -o "$DECODED_FILE" $SRS_ARG; then
echo "Decoding failed"
exit 1
fi
Expand Down

0 comments on commit cb4c09c

Please sign in to comment.