diff --git a/Cargo.lock b/Cargo.lock index 0beb02121e..3adbea2286 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2651,12 +2651,17 @@ dependencies = [ "ark-serialize", "ark-std", "clap 4.4.18", + "hex", + "kimchi", "mina-curves", + "mina-poseidon", "o1-utils", "once_cell", "poly-commitment", "proptest", "rayon", + "rmp-serde", + "serde_json", "time", "tracing", "tracing-subscriber", diff --git a/saffron/Cargo.toml b/saffron/Cargo.toml index 994fbb241e..b36af88267 100644 --- a/saffron/Cargo.toml +++ b/saffron/Cargo.toml @@ -23,10 +23,15 @@ ark-ff.workspace = true ark-poly.workspace = true ark-serialize = { workspace = true, features = ["derive"]} clap = { workspace = true, features = ["derive"] } +hex.workspace = true +kimchi.workspace = true mina-curves.workspace = true +mina-poseidon.workspace = true o1-utils.workspace = true poly-commitment.workspace = true rayon.workspace = true +rmp-serde.workspace = true +serde_json.workspace = true time = { version = "0.3", features = ["macros"] } tracing = "0.1" tracing-subscriber = { version = "0.3", features = [ "ansi", "env-filter", "fmt", "time" ] } diff --git a/saffron/src/cli.rs b/saffron/src/cli.rs index f9bb471290..0e4acb98f3 100644 --- a/saffron/src/cli.rs +++ b/saffron/src/cli.rs @@ -12,6 +12,12 @@ pub struct EncodeFileArgs { help = "output file (encoded as field elements)" )] pub output: String, + + #[arg(long = "srs-filepath", value_name = "SRS_FILEPATH")] + pub srs_cache: Option, + + #[arg(long = "assert-commitment", value_name = "COMMITMENT")] + pub assert_commitment: Option, } #[derive(Parser)] @@ -26,6 +32,18 @@ pub struct DecodeFileArgs { #[arg(long, short = 'o', value_name = "FILE", help = "output file")] pub output: String, + + #[arg(long = "srs-filepath", value_name = "SRS_FILEPATH")] + pub srs_cache: Option, +} + +#[derive(Parser)] +pub struct ComputeCommitmentArgs { + #[arg(long, short = 'i', value_name = "FILE", help = "input file")] + pub input: String, + + #[arg(long = "srs-filepath", value_name = "SRS_FILEPATH")] + pub srs_cache: Option, } #[derive(Parser)] @@ -39,4 +57,6 @@ pub enum Commands { Encode(EncodeFileArgs), #[command(name = "decode")] Decode(DecodeFileArgs), + #[command(name = "compute-commitment")] + ComputeCommitment(ComputeCommitmentArgs), } diff --git a/saffron/src/commitment.rs b/saffron/src/commitment.rs new file mode 100644 index 0000000000..6bb45eed94 --- /dev/null +++ b/saffron/src/commitment.rs @@ -0,0 +1,110 @@ +use crate::blob::FieldBlob; +use ark_ec::AffineRepr; +use ark_ff::One; +use ark_poly::{Evaluations, Radix2EvaluationDomain as D}; +use mina_poseidon::FqSponge; +use poly_commitment::{ + commitment::{absorb_commitment, CommitmentCurve}, + ipa::SRS, + PolyComm, SRS as _, +}; +use rayon::prelude::*; +use tracing::instrument; + +#[instrument(skip_all)] +pub fn commit_to_field_elems( + srs: &SRS, + domain: D, + field_elems: Vec>, +) -> Vec> { + field_elems + .par_iter() + .map(|chunk| { + let evals = Evaluations::from_vec_and_domain(chunk.to_vec(), domain); + srs.commit_evaluations_non_hiding(domain, &evals) + }) + .collect() +} + +#[instrument(skip_all)] +pub fn commit_to_blob( + srs: &SRS, + blob: &FieldBlob, +) -> Vec> { + let num_chunks = 1; + blob.data + .par_iter() + .map(|p| srs.commit_non_hiding(p, num_chunks)) + .collect() +} + +#[instrument(skip_all)] +pub fn fold_commitments< + G: AffineRepr, + EFqSponge: Clone + FqSponge, +>( + sponge: &mut EFqSponge, + commitments: &[PolyComm], +) -> PolyComm { + for commitment in commitments { + absorb_commitment(sponge, commitment) + } + let challenge = sponge.challenge(); + let powers: Vec = commitments + .iter() + .scan(G::ScalarField::one(), |acc, _| { + let res = *acc; + *acc *= challenge; + Some(res) + }) + .collect::>(); + PolyComm::multi_scalar_mul(&commitments.iter().collect::>(), &powers) +} + +#[cfg(test)] +mod tests { + use crate::utils::encode_for_domain; + + use super::*; + use ark_poly::{EvaluationDomain, Radix2EvaluationDomain}; + use mina_curves::pasta::{Fp, Vesta, VestaParameters}; + use mina_poseidon::{constants::PlonkSpongeConstantsKimchi, sponge::DefaultFqSponge, FqSponge}; + use o1_utils::FieldHelpers; + use once_cell::sync::Lazy; + use proptest::prelude::*; + + const SRS_SIZE: usize = 1 << 16; + + static SRS: Lazy> = Lazy::new(|| SRS::create(SRS_SIZE)); + + static DOMAIN: Lazy> = + Lazy::new(|| Radix2EvaluationDomain::new(SRS_SIZE).unwrap()); + + proptest! { + #![proptest_config(ProptestConfig::with_cases(10))] + #[test] + fn test_user_and_storage_provider_commitments_equal(xs in prop::collection::vec(any::(), 0..=2 * Fp::size_in_bytes() * DOMAIN.size()) + ) + { let elems = encode_for_domain(&*DOMAIN, &xs); + let user_commitments = commit_to_field_elems(&*SRS, *DOMAIN, elems); + let blob = FieldBlob::::encode(*DOMAIN, &xs); + let storeage_provider_commitments = commit_to_blob(&*SRS, &blob); + prop_assert_eq!(&user_commitments, &storeage_provider_commitments); + let user_commitment = + { let mut fq_sponge = DefaultFqSponge::::new( + mina_poseidon::pasta::fq_kimchi::static_params(), + ); + fold_commitments(&mut fq_sponge, &user_commitments) + + }; + let storage_provider_commitment = + { let mut fq_sponge = DefaultFqSponge::::new( + mina_poseidon::pasta::fq_kimchi::static_params(), + ); + fold_commitments(&mut fq_sponge, &storeage_provider_commitments) + + }; + prop_assert_eq!(&user_commitment, &storage_provider_commitment); + } + } +} diff --git a/saffron/src/lib.rs b/saffron/src/lib.rs index 29aca593a0..90243a1ec5 100644 --- a/saffron/src/lib.rs +++ b/saffron/src/lib.rs @@ -1,3 +1,4 @@ pub mod blob; pub mod cli; +pub mod commitment; pub mod utils; diff --git a/saffron/src/main.rs b/saffron/src/main.rs index 57fdd58428..f0eb8878c8 100644 --- a/saffron/src/main.rs +++ b/saffron/src/main.rs @@ -2,11 +2,15 @@ use anyhow::Result; use ark_poly::{EvaluationDomain, Radix2EvaluationDomain}; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use clap::Parser; -use mina_curves::pasta::Fp; -use saffron::{blob::FieldBlob, cli}; +use kimchi::precomputed_srs::TestSRS; +use mina_curves::pasta::{Fp, Vesta, VestaParameters}; +use mina_poseidon::{constants::PlonkSpongeConstantsKimchi, sponge::DefaultFqSponge, FqSponge}; +use poly_commitment::{ipa::SRS, PolyComm, SRS as _}; +use saffron::{blob::FieldBlob, cli, commitment, utils}; use std::{ fs::File, io::{Read, Write}, + path::Path, }; use time::macros::format_description; use tracing::debug; @@ -15,10 +19,51 @@ use tracing_subscriber::{ EnvFilter, }; -const SRS_SIZE: usize = 1 << 16; +const DEFAULT_SRS_SIZE: usize = 1 << 16; + +fn get_srs(cache: Option) -> (SRS, Radix2EvaluationDomain) { + match cache { + Some(cache) => { + debug!("Loading SRS from cache {}", cache); + let file_path = Path::new(&cache); + let file = File::open(file_path).expect("Error opening SRS cache file"); + let srs: SRS = { + // By convention, proof systems serializes a TestSRS with filename 'test_.srs'. + // The benefit of using this is you don't waste time verifying the SRS. + if file_path + .file_name() + .unwrap() + .to_str() + .unwrap() + .starts_with("test_") + { + let test_srs: TestSRS = rmp_serde::from_read(&file).unwrap(); + From::from(test_srs) + } else { + rmp_serde::from_read(&file).unwrap() + } + }; + debug!("SRS loaded successfully from cache"); + let domain_fp = Radix2EvaluationDomain::new(srs.size()).unwrap(); + (srs, domain_fp) + } + None => { + debug!( + "No SRS cache provided. Creating SRS from scratch with domain size {}", + DEFAULT_SRS_SIZE + ); + let domain_size = DEFAULT_SRS_SIZE; + let srs = SRS::create(domain_size); + let domain_fp = Radix2EvaluationDomain::new(srs.size()).unwrap(); + srs.get_lagrange_basis(domain_fp); + debug!("SRS created successfully"); + (srs, domain_fp) + } + } +} fn decode_file(args: cli::DecodeFileArgs) -> Result<()> { - let domain = Radix2EvaluationDomain::new(SRS_SIZE).unwrap(); + let (_, domain) = get_srs(args.srs_cache); debug!( domain_size = domain.size(), input_file = args.input, @@ -36,7 +81,7 @@ fn decode_file(args: cli::DecodeFileArgs) -> Result<()> { } fn encode_file(args: cli::EncodeFileArgs) -> Result<()> { - let domain = Radix2EvaluationDomain::new(SRS_SIZE).unwrap(); + let (srs, domain) = get_srs(args.srs_cache); debug!( domain_size = domain.size(), input_file = args.input, @@ -46,6 +91,24 @@ fn encode_file(args: cli::EncodeFileArgs) -> Result<()> { let mut buf = Vec::new(); file.read_to_end(&mut buf)?; let blob = FieldBlob::::encode(domain, &buf); + args.assert_commitment + .into_iter() + .for_each(|asserted_commitment| { + let mut fq_sponge = DefaultFqSponge::::new( + mina_poseidon::pasta::fq_kimchi::static_params(), + ); + let commitments = commitment::commit_to_blob(&srs, &blob); + let c: PolyComm> = + commitment::fold_commitments(&mut fq_sponge, &commitments); + let bytes = serde_json::to_vec(&c).unwrap(); + let computed_commitment = hex::encode(bytes); + if asserted_commitment != computed_commitment { + panic!( + "commitment mismatch: asserted {}, computed {}", + asserted_commitment, computed_commitment + ); + } + }); let mut bytes_to_write = Vec::with_capacity(buf.len()); blob.serialize_compressed(&mut bytes_to_write)?; debug!(output_file = args.output, "Writing encoded blob to file",); @@ -54,6 +117,22 @@ fn encode_file(args: cli::EncodeFileArgs) -> Result<()> { Ok(()) } +pub fn compute_commitment(args: cli::ComputeCommitmentArgs) -> Result { + let (srs, domain_fp) = get_srs(args.srs_cache); + let mut file = File::open(args.input)?; + let mut buf = Vec::new(); + file.read_to_end(&mut buf)?; + let field_elems = utils::encode_for_domain(&domain_fp, &buf); + let mut fq_sponge = DefaultFqSponge::::new( + mina_poseidon::pasta::fq_kimchi::static_params(), + ); + let commitments = commitment::commit_to_field_elems(&srs, domain_fp, field_elems); + let c: PolyComm> = + commitment::fold_commitments(&mut fq_sponge, &commitments); + let bytes = serde_json::to_vec(&c).unwrap(); + Ok(hex::encode(bytes)) +} + pub fn init_subscriber() { let timer = UtcTime::new(format_description!( "[year]-[month]-[day]T[hour repr:24]:[minute]:[second].[subsecond digits:3]Z" @@ -78,5 +157,15 @@ pub fn main() -> Result<()> { match args { cli::Commands::Encode(args) => encode_file(args), cli::Commands::Decode(args) => decode_file(args), + cli::Commands::ComputeCommitment(args) => match compute_commitment(args) { + Ok(c) => { + println!("{}", c); + Ok(()) + } + Err(e) => { + eprintln!("{}", e); + Err(e) + } + }, } } diff --git a/saffron/test-encoding.sh b/saffron/test-encoding.sh index 7c0ba09da0..2df6af252f 100755 --- a/saffron/test-encoding.sh +++ b/saffron/test-encoding.sh @@ -1,12 +1,16 @@ #!/bin/bash # Check if input file is provided -if [ $# -ne 1 ]; then - echo "Usage: $0 " - exit 1 +if [ $# -lt 1 ]; then + echo "Usage: $0 [srs-filepath]" + exit 1 fi INPUT_FILE="$1" +SRS_ARG="" +if [ $# -eq 2 ]; then + SRS_ARG="--srs-filepath $2" +fi ENCODED_FILE="${INPUT_FILE%.*}.bin" DECODED_FILE="${INPUT_FILE%.*}-decoded${INPUT_FILE##*.}" @@ -16,16 +20,20 @@ if [ ! -f "$INPUT_FILE" ]; then exit 1 fi -# Run encode +# Compute commitment and capture last line +COMMITMENT=$(cargo run --release --bin saffron compute-commitment -i "$INPUT_FILE" $SRS_ARG | tee /dev/stderr | tail -n 1) + + +# Run encode with captured commitment echo "Encoding $INPUT_FILE to $ENCODED_FILE" -if ! cargo run --release --bin saffron encode -i "$INPUT_FILE" -o "$ENCODED_FILE"; then - echo "Encoding failed" - exit 1 +if ! cargo run --release --bin saffron encode -i "$INPUT_FILE" -o "$ENCODED_FILE" --assert-commitment "$COMMITMENT" $SRS_ARG; then + echo "Encoding failed" + exit 1 fi # Run decode echo "Decoding $ENCODED_FILE to $DECODED_FILE" -if ! cargo run --release --bin saffron decode -i "$ENCODED_FILE" -o "$DECODED_FILE"; then +if ! cargo run --release --bin saffron decode -i "$ENCODED_FILE" -o "$DECODED_FILE" $SRS_ARG; then echo "Decoding failed" exit 1 fi