Skip to content

Commit

Permalink
[kimchi][proof] add zero-knowledge rows in witness
Browse files Browse the repository at this point in the history
  • Loading branch information
mimoo committed Oct 29, 2021
1 parent 160ab8d commit cb7c49c
Show file tree
Hide file tree
Showing 9 changed files with 118 additions and 92 deletions.
29 changes: 24 additions & 5 deletions circuits/plonk-15-wires/src/nolookup/constraints.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,16 @@ use oracle::poseidon::ArithmeticSpongeParams;
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use serde_with::serde_as;

//
// Constants
//

pub const ZK_ROWS: u64 = 3;

//
// ConstraintSystem
//

#[serde_as]
#[derive(Clone, Serialize, Deserialize, Debug)]
pub struct ConstraintSystem<F: FftField> {
Expand Down Expand Up @@ -255,7 +265,7 @@ where
/// Returns the end of the circuit, which is used for introducing zero-knowledge in the permutation polynomial
pub fn zk_w3<F: FftField>(domain: D<F>) -> F {
domain.group_gen.pow(&[domain.size - 3])
domain.group_gen.pow(&[domain.size - (ZK_ROWS)])
}

/// Evaluates the polynomial
Expand Down Expand Up @@ -332,9 +342,8 @@ impl<F: FftField + SquareRootField> ConstraintSystem<F> {

// +3 on gates.len() here to ensure that we have room for the zero-knowledge entries of the permutation polynomial
// see https://minaprotocol.com/blog/a-more-efficient-approach-to-zero-knowledge-for-plonk
// TODO: hardcode this value somewhere
let domain = EvaluationDomains::<F>::create(gates.len() + 3)?;
assert!(domain.d1.size > 3);
let domain = EvaluationDomains::<F>::create(gates.len() + ZK_ROWS as usize)?;
assert!(domain.d1.size > ZK_ROWS);

// pre-compute all the elements
let mut sid = domain.d1.elements().map(|elm| elm).collect::<Vec<_>>();
Expand Down Expand Up @@ -599,6 +608,15 @@ impl<F: FftField + SquareRootField> ConstraintSystem<F> {
pub fn verify(&self, witness: &[Vec<F>; COLUMNS]) -> Result<(), GateError> {
let left_wire = vec![F::one(), F::zero(), F::zero(), F::zero(), F::zero()];

// pad the witness
let pad = vec![F::zero(); self.domain.d1.size as usize - witness[0].len()];
let witness: [Vec<F>; COLUMNS] = array_init(|i| {
let mut w = witness[i].to_vec();
w.extend_from_slice(&pad);
w
});

// check each rows' wiring
for (row, gate) in self.gates.iter().enumerate() {
// check if wires are connected
for col in 0..PERMUTS {
Expand All @@ -621,7 +639,8 @@ impl<F: FftField + SquareRootField> ConstraintSystem<F> {
}
}

gate.verify(witness, &self)
// check the gate's satisfiability
gate.verify(&witness, &self)
.map_err(|err| GateError::Custom { row, err })?;
}

Expand Down
15 changes: 6 additions & 9 deletions circuits/plonk-15-wires/src/polynomials/chacha.rs
Original file line number Diff line number Diff line change
Expand Up @@ -435,16 +435,13 @@ mod tests {
use super::*;
use crate::polynomials::chacha::constraint;
use crate::{
expr::{Column, Constants, Expr, Linearization, PolishToken},
gate::{LookupInfo, LookupsUsed},
gates::poseidon::ROUNDS_PER_ROW,
nolookup::constraints::{zk_w3, ConstraintSystem},
expr::{Column, Constants, PolishToken},
gate::LookupInfo,
nolookup::scalars::{LookupEvaluations, ProofEvaluations},
polynomials::{chacha, lookup},
wires::*,
};
use ark_ff::UniformRand;
use ark_poly::{univariate::DensePolynomial, EvaluationDomain, Radix2EvaluationDomain as D};
use ark_poly::{EvaluationDomain, Radix2EvaluationDomain as D};
use array_init::array_init;
use mina_curves::pasta::fp::Fp as F;
use rand::{rngs::StdRng, SeedableRng};
Expand Down Expand Up @@ -491,10 +488,10 @@ mod tests {

let expr = constraint::<F>(10);
let linearized = expr.linearize(evaluated_cols).unwrap();
let expr_polish = expr.to_polish();
let _expr_polish = expr.to_polish();
let linearized_polish = linearized.map(|e| e.to_polish());

let mut rng = &mut StdRng::from_seed([0u8; 32]);
let rng = &mut StdRng::from_seed([0u8; 32]);

let d = D::new(1024).unwrap();

Expand Down Expand Up @@ -554,7 +551,7 @@ mod tests {
/*
assert_eq!(
expr.evaluate_(d, pt, &evals, &constants).unwrap(),
PolishToken::evaluate(&expr_polish, d, pt, &evals, &constants).unwrap());
PolishToken::evaluate(&_, d, pt, &evals, &constants).unwrap());
*/
}
}
4 changes: 2 additions & 2 deletions circuits/plonk-15-wires/src/polynomials/permutation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ use ark_poly::{
use ark_poly::{Polynomial, UVPolynomial};
use o1_utils::{ExtendedDensePolynomial, ExtendedEvaluations};
use oracle::rndoracle::ProofError;
use rand::rngs::ThreadRng;
use rand::{rngs::ThreadRng, CryptoRng, RngCore};

impl<F: FftField + SquareRootField> ConstraintSystem<F> {
/// permutation quotient poly contribution computation
Expand Down Expand Up @@ -122,7 +122,7 @@ impl<F: FftField + SquareRootField> ConstraintSystem<F> {
witness: &[Vec<F>; COLUMNS],
beta: &F,
gamma: &F,
rng: &mut ThreadRng,
rng: &mut (impl RngCore + CryptoRng),
) -> Result<DensePolynomial<F>, ProofError> {
let n = self.domain.d1.size as usize;

Expand Down
79 changes: 54 additions & 25 deletions dlog/plonk-15-wires/src/prover.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ This source file implements prover's zk-proof primitive.
pub use super::{index::Index, range};
use crate::plonk_sponge::FrSponge;
use ark_ec::AffineCurve;
use ark_ff::UniformRand;
use ark_ff::{FftField, Field, One, Zero};
use ark_poly::{
univariate::DensePolynomial, Evaluations, Polynomial, Radix2EvaluationDomain as D, UVPolynomial,
Expand All @@ -18,14 +19,14 @@ use commitment_dlog::commitment::{
use lookup::CombinedEntry;
use o1_utils::ExtendedDensePolynomial;
use oracle::{rndoracle::ProofError, sponge::ScalarChallenge, FqSponge};
use plonk_15_wires_circuits::nolookup::constraints::ZK_ROWS;
use plonk_15_wires_circuits::{
expr::{l0_1, Constants, Environment, LookupEnvironment},
gate::{combine_table_entry, GateType, LookupInfo, LookupsUsed},
nolookup::scalars::{LookupEvaluations, ProofEvaluations},
polynomials::{chacha, complete_add, endomul_scalar, endosclmul, lookup, poseidon, varbasemul},
wires::{COLUMNS, PERMUTS},
};
use rand::thread_rng;
use std::collections::HashMap;

type Fr<G> = <G as AffineCurve>::ScalarField;
Expand Down Expand Up @@ -105,18 +106,42 @@ where
// RETURN: prover's zk-proof
pub fn create<EFqSponge: Clone + FqSponge<Fq<G>, G, Fr<G>>, EFrSponge: FrSponge<Fr<G>>>(
group_map: &G::Map,
witness: &[Vec<Fr<G>>; COLUMNS],
mut witness: [Vec<Fr<G>>; COLUMNS],
index: &Index<G>,
prev_challenges: Vec<(Vec<Fr<G>>, PolyComm<G>)>,
) -> Result<Self, ProofError> {
let d1 = index.cs.domain.d1;
let n = index.cs.domain.d1.size as usize;
for w in witness.iter() {
if w.len() != n {
let d1_size = index.cs.domain.d1.size as usize;
// TODO: rng should be passed as arg
let rng = &mut rand::rngs::OsRng;

// double-check the witness
if cfg!(test) {
index.cs.verify(&witness).expect("incorrect witness");
}

// ensure we have room for the zero-knowledge rows
let length_witness = witness[0].len();
let length_padding = d1_size
.checked_sub(length_witness)
.ok_or_else(|| ProofError::WitnessCsInconsistent)?;
if length_padding < ZK_ROWS as usize {
return Err(ProofError::WitnessCsInconsistent);
}

// pad and add zero-knowledge rows to the witness columns
for w in &mut witness {
if w.len() != length_witness {
return Err(ProofError::WitnessCsInconsistent);
}

// padding
w.extend(std::iter::repeat(Fr::<G>::zero()).take(length_padding));

// zk-rows
for row in w.iter_mut().rev().take(ZK_ROWS as usize) {
*row = Fr::<G>::rand(rng);
}
}
//if index.cs.verify(witness) != true {return Err(ProofError::WitnessCsInconsistent)};

// the transcript of the random oracle non-interactive argument
let mut fq_sponge = EFqSponge::new(index.fq_sponge_params.clone());
Expand All @@ -129,15 +154,15 @@ where
)
.interpolate();

let rng = &mut thread_rng();

// commit to the wire values
let w_comm: [(PolyComm<G>, PolyComm<Fr<G>>); COLUMNS] = array_init(|i| {
let e = Evaluations::<Fr<G>, D<Fr<G>>>::from_vec_and_domain(
witness[i].clone(),
index.cs.domain.d1,
);
index.srs.commit_evaluations(d1, &e, None, rng)
index
.srs
.commit_evaluations(index.cs.domain.d1, &e, None, rng)
});

// compute witness polynomials
Expand Down Expand Up @@ -224,7 +249,7 @@ where
None => (None, None, None, None),
Some(_) => {
let iter_lookup_table = || {
(0..n).map(|i| {
(0..d1_size).map(|i| {
let row = index.cs.lookup_tables8[0].iter().map(|e| &e.evals[8 * i]);
CombinedEntry(combine_table_entry(joint_combiner, row))
})
Expand All @@ -237,7 +262,7 @@ where
dummy_lookup_value.clone(),
iter_lookup_table,
index.cs.lookup_table_lengths[0],
d1,
index.cs.domain.d1,
&index.cs.gates,
&witness,
joint_combiner,
Expand All @@ -247,13 +272,17 @@ where
.into_iter()
.map(|chunk| {
let v: Vec<_> = chunk.into_iter().map(|x| x.0).collect();
lookup::zk_patch(v, d1, rng)
lookup::zk_patch(v, index.cs.domain.d1, rng)
})
.collect();

let comm: Vec<_> = lookup_sorted
.iter()
.map(|v| index.srs.commit_evaluations(d1, v, None, rng))
.map(|v| {
index
.srs
.commit_evaluations(index.cs.domain.d1, v, None, rng)
})
.collect();
let coeffs : Vec<_> =
// TODO: We can avoid storing these coefficients.
Expand All @@ -279,7 +308,7 @@ where
match lookup_sorted {
None => (None, None, None),
Some(lookup_sorted) => {
let iter_lookup_table = || (0..n).map(|i| {
let iter_lookup_table = || (0..d1_size).map(|i| {
let row = index.cs.lookup_tables8[0].iter().map(|e| & e.evals[8 * i]);
combine_table_entry(joint_combiner, row)
});
Expand All @@ -288,7 +317,7 @@ where
lookup::aggregation::<_, Fr<G>, _>(
dummy_lookup_value.0,
iter_lookup_table(),
d1,
index.cs.domain.d1,
&index.cs.gates,
&witness,
joint_combiner,
Expand All @@ -297,11 +326,11 @@ where
rng)?;

drop(lookup_sorted);
if aggreg.evals[n - 4] != Fr::<G>::one() {
panic!("aggregation incorrect: {}", aggreg.evals[n-3]);
if aggreg.evals[d1_size - 4] != Fr::<G>::one() {
panic!("aggregation incorrect: {}", aggreg.evals[d1_size-3]);
}

let comm = index.srs.commit_evaluations(d1, &aggreg, None, rng);
let comm = index.srs.commit_evaluations(index.cs.domain.d1, &aggreg, None, rng);
fq_sponge.absorb_g(&comm.0.unshifted);

let coeffs = aggreg.interpolate();
Expand All @@ -314,7 +343,7 @@ where
};

// compute permutation aggregation polynomial
let z = index.cs.perm_aggreg(witness, &beta, &gamma, rng)?;
let z = index.cs.perm_aggreg(&witness, &beta, &gamma, rng)?;
// commit to z
let z_comm = index.srs.commit(&z, None, rng);

Expand Down Expand Up @@ -381,7 +410,7 @@ where
coefficient: &index.cs.coefficients8,
vanishes_on_last_4_rows: &index.cs.vanishes_on_last_4_rows,
z: &lagrange.d8.this.z,
l0_1: l0_1(d1),
l0_1: l0_1(index.cs.domain.d1),
domain: index.cs.domain,
index: index_evals,
lookup: lookup_env,
Expand Down Expand Up @@ -440,7 +469,7 @@ where
(t4, t8),
alpha,
alphas[alphas.len() - 1],
lookup::constraints(&index.cs.dummy_lookup_values[0], d1)
lookup::constraints(&index.cs.dummy_lookup_values[0], index.cs.domain.d1)
.iter()
.map(|e| e.evaluations(&env))
.collect(),
Expand Down Expand Up @@ -606,8 +635,8 @@ where
)
})
.collect::<Vec<_>>();
let non_hiding = |n: usize| PolyComm {
unshifted: vec![Fr::<G>::zero(); n],
let non_hiding = |d1_size: usize| PolyComm {
unshifted: vec![Fr::<G>::zero(); d1_size],
shifted: None,
};

Expand All @@ -627,7 +656,7 @@ where
// construct evaluation proof
let mut polynomials = polys
.iter()
.map(|(p, n)| (p, None, non_hiding(*n)))
.map(|(p, d1_size)| (p, None, non_hiding(*d1_size)))
.collect::<Vec<_>>();
polynomials.extend(vec![(&p, None, non_hiding(1))]);
polynomials.extend(vec![(&ft, None, blinding_ft)]);
Expand Down
Loading

0 comments on commit cb7c49c

Please sign in to comment.