Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add verify-storage-proof command to cli and e2e #2979

Merged
merged 4 commits into from
Feb 3, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion saffron/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ rayon.workspace = true
rmp-serde.workspace = true
serde.workspace = true
serde_with.workspace = true
sha3.workspace = true
thiserror.workspace = true
time = { version = "0.3", features = ["macros"] }
tracing = "0.1"
Expand Down
31 changes: 25 additions & 6 deletions saffron/src/blob.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,12 @@
use crate::utils::{decode_into, encode_for_domain};
use crate::{
commitment::fold_commitments,
utils::{decode_into, encode_for_domain},
};
use ark_ff::PrimeField;
use ark_poly::{univariate::DensePolynomial, EvaluationDomain, Evaluations};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use kimchi::curve::KimchiCurve;
use mina_poseidon::FqSponge;
use o1_utils::FieldHelpers;
use poly_commitment::{commitment::CommitmentCurve, ipa::SRS, PolyComm, SRS as _};
use rayon::prelude::*;
Expand All @@ -18,6 +23,9 @@ pub struct FieldBlob<G: CommitmentCurve> {
pub n_bytes: usize,
pub domain_size: usize,
pub commitments: Vec<PolyComm<G>>,
pub folded_commitment: PolyComm<G>,
#[serde_as(as = "o1_utils::serialization::SerdeAs")]
pub alpha: G::ScalarField,
#[serde_as(as = "Vec<o1_utils::serialization::SerdeAs>")]
pub data: Vec<DensePolynomial<G::ScalarField>>,
}
Expand All @@ -33,9 +41,12 @@ fn commit_to_blob_data<G: CommitmentCurve>(
.collect()
}

impl<G: CommitmentCurve> FieldBlob<G> {
impl<G: KimchiCurve> FieldBlob<G> {
#[instrument(skip_all, level = "debug")]
pub fn encode<D: EvaluationDomain<G::ScalarField>>(
pub fn encode<
D: EvaluationDomain<G::ScalarField>,
EFqSponge: Clone + FqSponge<G::BaseField, G, G::ScalarField>,
>(
srs: &SRS<G>,
domain: D,
bytes: &[u8],
Expand All @@ -52,6 +63,11 @@ impl<G: CommitmentCurve> FieldBlob<G> {

let commitments = commit_to_blob_data(srs, &data);

let (folded_commitment, alpha) = {
let mut sponge = EFqSponge::new(G::other_curve_sponge_params());
fold_commitments(&mut sponge, &commitments)
};

debug!(
"Encoded {:.2} MB into {} polynomials",
bytes.len() as f32 / 1_000_000.0,
Expand All @@ -62,6 +78,8 @@ impl<G: CommitmentCurve> FieldBlob<G> {
n_bytes: bytes.len(),
domain_size,
commitments,
folded_commitment,
alpha,
data,
}
}
Expand Down Expand Up @@ -101,7 +119,8 @@ mod tests {
use super::*;
use crate::utils::test_utils::*;
use ark_poly::Radix2EvaluationDomain;
use mina_curves::pasta::{Fp, Vesta};
use mina_curves::pasta::{Fp, Vesta, VestaParameters};
use mina_poseidon::{constants::PlonkSpongeConstantsKimchi, sponge::DefaultFqSponge};
use once_cell::sync::Lazy;
use proptest::prelude::*;

Expand All @@ -121,7 +140,7 @@ mod tests {
#![proptest_config(ProptestConfig::with_cases(20))]
#[test]
fn test_round_trip_blob_encoding(UserData(xs) in UserData::arbitrary())
{ let blob = FieldBlob::<Vesta>::encode(&*SRS, *DOMAIN, &xs);
{ let blob = FieldBlob::<Vesta>::encode::<_, DefaultFqSponge<VestaParameters, PlonkSpongeConstantsKimchi>>(&*SRS, *DOMAIN, &xs);
let bytes = rmp_serde::to_vec(&blob).unwrap();
let a = rmp_serde::from_slice(&bytes).unwrap();
// check that ark-serialize is behaving as expected
Expand All @@ -138,7 +157,7 @@ mod tests {
fn test_user_and_storage_provider_commitments_equal(UserData(xs) in UserData::arbitrary())
{ let elems = encode_for_domain(&*DOMAIN, &xs);
let user_commitments = commit_to_field_elems(&*SRS, *DOMAIN, elems);
let blob = FieldBlob::<Vesta>::encode(&*SRS, *DOMAIN, &xs);
let blob = FieldBlob::<Vesta>::encode::<_, DefaultFqSponge<VestaParameters, PlonkSpongeConstantsKimchi>>(&*SRS, *DOMAIN, &xs);
prop_assert_eq!(user_commitments, blob.commitments);
}
}
Expand Down
26 changes: 26 additions & 0 deletions saffron/src/cli.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,30 @@ pub struct StorageProofArgs {
pub challenge: HexString,
}

#[derive(Parser)]
pub struct VerifyStorageProofArgs {
#[arg(long = "srs-filepath", value_name = "SRS_FILEPATH")]
pub srs_cache: Option<String>,

#[arg(
long,
short = 'c',
value_name = "COMMITMENT",
help = "commitment (hex encoded)"
)]
pub commitment: HexString,

#[arg(
long = "challenge",
value_name = "CHALLENGE",
help = "challenge (hex encoded"
)]
pub challenge: HexString,

#[arg(long, short = 'p', value_name = "PROOF", help = "proof (hex encoded)")]
pub proof: HexString,
}

#[derive(Parser)]
#[command(
name = "saffron",
Expand All @@ -105,4 +129,6 @@ pub enum Commands {
ComputeCommitment(ComputeCommitmentArgs),
#[command(name = "storage-proof")]
StorageProof(StorageProofArgs),
#[command(name = "verify-storage-proof")]
VerifyStorageProof(VerifyStorageProofArgs),
}
24 changes: 22 additions & 2 deletions saffron/src/commitment.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use ark_ec::AffineRepr;
use ark_ff::One;
use ark_poly::{Evaluations, Radix2EvaluationDomain as D};
use kimchi::curve::KimchiCurve;
use mina_poseidon::FqSponge;
use poly_commitment::{
commitment::{absorb_commitment, CommitmentCurve},
Expand Down Expand Up @@ -32,7 +33,7 @@ pub fn fold_commitments<
>(
sponge: &mut EFqSponge,
commitments: &[PolyComm<G>],
) -> PolyComm<G> {
) -> (PolyComm<G>, G::ScalarField) {
for commitment in commitments {
absorb_commitment(sponge, commitment)
}
Expand All @@ -45,5 +46,24 @@ pub fn fold_commitments<
Some(res)
})
.collect::<Vec<_>>();
PolyComm::multi_scalar_mul(&commitments.iter().collect::<Vec<_>>(), &powers)
(
PolyComm::multi_scalar_mul(&commitments.iter().collect::<Vec<_>>(), &powers),
alpha,
)
}

pub fn user_commitment<
G: KimchiCurve,
EFqSponge: Clone + FqSponge<G::BaseField, G, G::ScalarField>,
>(
srs: &SRS<G>,
domain: D<G::ScalarField>,
field_elems: Vec<Vec<G::ScalarField>>,
) -> PolyComm<G> {
let commitments = commit_to_field_elems(srs, domain, field_elems);
let (commitment, _) = {
let mut sponge = EFqSponge::new(G::other_curve_sponge_params());
fold_commitments(&mut sponge, &commitments)
};
commitment
}
66 changes: 42 additions & 24 deletions saffron/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,11 @@ use rand::rngs::OsRng;
use saffron::{
blob::FieldBlob,
cli::{self, HexString},
commitment, env, proof, utils,
commitment::user_commitment,
env,
proof::{self, StorageProof},
utils,
};
use sha3::{Digest, Sha3_256};
use std::{
fs::File,
io::{Read, Write},
Expand All @@ -20,6 +22,8 @@ use tracing::{debug, debug_span};

pub const DEFAULT_SRS_SIZE: usize = 1 << 16;

type FqSponge = DefaultFqSponge<VestaParameters, PlonkSpongeConstantsKimchi>;

fn get_srs(cache: Option<String>) -> (SRS<Vesta>, Radix2EvaluationDomain<Fp>) {
let res = match cache {
Some(cache) => {
Expand Down Expand Up @@ -73,17 +77,16 @@ fn encode_file(args: cli::EncodeFileArgs) -> Result<()> {
let mut file = File::open(args.input)?;
let mut buf = Vec::new();
file.read_to_end(&mut buf)?;
let blob = FieldBlob::<Vesta>::encode(&srs, domain, &buf);
let blob = FieldBlob::<Vesta>::encode::<_, FqSponge>(&srs, domain, &buf);
args.assert_commitment
.into_iter()
.for_each(|asserted_commitment| {
let bytes = rmp_serde::to_vec(&blob.commitments).unwrap();
let hash = Sha3_256::new().chain_update(bytes).finalize().to_vec();
if asserted_commitment.0 != hash {
let c = rmp_serde::from_slice(&asserted_commitment.0).unwrap();
if blob.folded_commitment != c {
panic!(
"commitment hash mismatch: asserted {}, computed {}",
asserted_commitment,
HexString(hash)
HexString(rmp_serde::encode::to_vec(&c).unwrap())
);
}
});
Expand All @@ -99,28 +102,42 @@ pub fn compute_commitment(args: cli::ComputeCommitmentArgs) -> Result<HexString>
let mut buf = Vec::new();
file.read_to_end(&mut buf)?;
let field_elems = utils::encode_for_domain(&domain_fp, &buf);
let commitments = commitment::commit_to_field_elems(&srs, domain_fp, field_elems);
let bytes = rmp_serde::to_vec(&commitments).unwrap();
let hash = Sha3_256::new().chain_update(bytes).finalize().to_vec();
Ok(HexString(hash))
let commitment = user_commitment::<_, FqSponge>(&srs, domain_fp, field_elems);
let res = rmp_serde::to_vec(&commitment)?;
Ok(HexString(res))
}

pub fn storage_proof(args: cli::StorageProofArgs) -> Result<HexString> {
let file = File::open(args.input)?;
let blob: FieldBlob<Vesta> = rmp_serde::decode::from_read(file)?;
let proof =
{
let (srs, _) = get_srs(args.srs_cache);
let group_map = <Vesta as CommitmentCurve>::Map::setup();
let mut rng = OsRng;
let evaluation_point = utils::encode(&args.challenge.0);
proof::storage_proof::<
Vesta,
DefaultFqSponge<VestaParameters, PlonkSpongeConstantsKimchi>,
>(&srs, &group_map, blob, evaluation_point, &mut rng)
};
let bytes = rmp_serde::to_vec(&proof).unwrap();
Ok(HexString(bytes))
let proof = {
let (srs, _) = get_srs(args.srs_cache);
let group_map = <Vesta as CommitmentCurve>::Map::setup();
let mut rng = OsRng;
let evaluation_point = utils::encode(&args.challenge.0);
proof::storage_proof::<Vesta, FqSponge>(&srs, &group_map, blob, evaluation_point, &mut rng)
};
let res = rmp_serde::to_vec(&proof)?;
Ok(HexString(res))
}

pub fn verify_storage_proof(args: cli::VerifyStorageProofArgs) -> Result<()> {
let (srs, _) = get_srs(args.srs_cache);
let group_map = <Vesta as CommitmentCurve>::Map::setup();
let commitment = rmp_serde::from_slice(&args.commitment.0)?;
let evaluation_point = utils::encode(&args.challenge.0);
let proof: StorageProof<Vesta> = rmp_serde::from_slice(&args.proof.0)?;
let mut rng = OsRng;
let res = proof::verify_storage_proof::<Vesta, FqSponge>(
&srs,
&group_map,
commitment,
evaluation_point,
&proof,
&mut rng,
);
assert!(res);
Ok(())
}

pub fn main() -> Result<()> {
Expand All @@ -139,5 +156,6 @@ pub fn main() -> Result<()> {
println!("{}", proof);
Ok(())
}
cli::Commands::VerifyStorageProof(args) => verify_storage_proof(args),
}
}
18 changes: 7 additions & 11 deletions saffron/src/proof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use kimchi::curve::KimchiCurve;
use mina_poseidon::FqSponge;
use o1_utils::ExtendedDensePolynomial;
use poly_commitment::{
commitment::{absorb_commitment, BatchEvaluationProof, CommitmentCurve, Evaluation},
commitment::{BatchEvaluationProof, CommitmentCurve, Evaluation},
ipa::{OpeningProof, SRS},
utils::DensePolynomialOrEvaluations,
PolyComm,
Expand Down Expand Up @@ -37,19 +37,15 @@ pub fn storage_proof<G: KimchiCurve, EFqSponge: Clone + FqSponge<G::BaseField, G
where
G::BaseField: PrimeField,
{
let alpha = {
let mut sponge = EFqSponge::new(G::other_curve_sponge_params());
for commitment in &blob.commitments {
absorb_commitment(&mut sponge, commitment)
}
sponge.challenge()
};
let p = {
let init = (DensePolynomial::zero(), G::ScalarField::one());
blob.data
.into_iter()
.fold(init, |(acc_poly, curr_power), curr_poly| {
(acc_poly + curr_poly.scale(curr_power), curr_power * alpha)
(
acc_poly + curr_poly.scale(curr_power),
curr_power * blob.alpha,
)
})
.0
};
Expand Down Expand Up @@ -155,15 +151,15 @@ mod tests {
#[test]
fn test_storage_prove_verify(UserData(data) in UserData::arbitrary()) {
let mut rng = OsRng;
let commitment = {
let (commitment,_) = {
let field_elems = encode_for_domain(&*DOMAIN, &data);
let user_commitments = commit_to_field_elems(&*SRS, *DOMAIN, field_elems);
let mut fq_sponge = DefaultFqSponge::<VestaParameters, PlonkSpongeConstantsKimchi>::new(
mina_poseidon::pasta::fq_kimchi::static_params(),
);
fold_commitments(&mut fq_sponge, &user_commitments)
};
let blob = FieldBlob::<Vesta>::encode(&*SRS, *DOMAIN, &data);
let blob = FieldBlob::<Vesta>::encode::<_, DefaultFqSponge<VestaParameters, PlonkSpongeConstantsKimchi>>(&*SRS, *DOMAIN, &data);
let evaluation_point = Fp::rand(&mut rng);
let proof = storage_proof::<
Vesta, DefaultFqSponge<VestaParameters, PlonkSpongeConstantsKimchi>
Expand Down
10 changes: 10 additions & 0 deletions saffron/test-encoding.sh
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ fi
COMMITMENT=$(cargo run --release --bin saffron compute-commitment -i "$INPUT_FILE" $SRS_ARG | tee /dev/stderr | tail -n 1)



# Run encode with captured commitment
echo "Encoding $INPUT_FILE to $ENCODED_FILE"
if ! cargo run --release --bin saffron encode -i "$INPUT_FILE" -o "$ENCODED_FILE" --assert-commitment "$COMMITMENT" $SRS_ARG; then
Expand All @@ -44,6 +45,15 @@ if [ $? -ne 0 ]; then
exit 1
fi

# Verify the storage proof
echo "Verifying proof..."
if ! cargo run --release --bin saffron verify-storage-proof --commitment "$COMMITMENT" --challenge "$CHALLENGE" --proof "$PROOF" $SRS_ARG; then
echo "Proof verification failed"
exit 1
fi
echo "✓ Proof verification successful"


# Run decode
echo "Decoding $ENCODED_FILE to $DECODED_FILE"
if ! cargo run --release --bin saffron decode -i "$ENCODED_FILE" -o "$DECODED_FILE" $SRS_ARG; then
Expand Down
Loading