From 8a4a6caa796dfb1d6ca71ffb57567295f8663fa9 Mon Sep 17 00:00:00 2001 From: David Wong Date: Thu, 21 Oct 2021 16:53:51 -0700 Subject: [PATCH] [kimchi][proof] add zero-knowledge rows in witness --- .../src/nolookup/constraints.rs | 163 ++++---- .../src/polynomials/permutation.rs | 4 +- dlog/plonk-15-wires/src/prover.rs | 372 ++++++++++-------- 3 files changed, 289 insertions(+), 250 deletions(-) diff --git a/circuits/plonk-15-wires/src/nolookup/constraints.rs b/circuits/plonk-15-wires/src/nolookup/constraints.rs index a6a9d3f159..076b8346f1 100644 --- a/circuits/plonk-15-wires/src/nolookup/constraints.rs +++ b/circuits/plonk-15-wires/src/nolookup/constraints.rs @@ -5,7 +5,7 @@ This source file implements Plonk circuit constraint primitive. *****************************************************************************************************************/ use crate::domains::EvaluationDomains; -use crate::gate::{LookupInfo, CircuitGate, GateType}; +use crate::gate::{CircuitGate, GateType, LookupInfo}; pub use crate::polynomial::{WitnessEvals, WitnessOverDomains, WitnessShifts}; use crate::wires::*; use ark_ff::{FftField, SquareRootField, Zero}; @@ -21,6 +21,16 @@ use oracle::poseidon::ArithmeticSpongeParams; use serde::{de::DeserializeOwned, Deserialize, Serialize}; use serde_with::serde_as; +// +// Constants +// + +pub const ZK_ROWS: u64 = 3; + +// +// ConstraintSystem +// + #[serde_as] #[derive(Clone, Serialize, Deserialize, Debug)] pub struct ConstraintSystem { @@ -46,7 +56,6 @@ pub struct ConstraintSystem { // Coefficient polynomials. These define constant that gates can use as they like. // --------------------------------------- - /// coefficients polynomials in coefficient form #[serde_as(as = "[o1_utils::serialization::SerdeAs; COLUMNS]")] pub coefficientsm: [DP; COLUMNS], @@ -176,7 +185,7 @@ pub struct ConstraintSystem { /// Returns the end of the circuit, which is used for introducing zero-knowledge in the permutation polynomial pub fn zk_w3(domain: D) -> F { - domain.group_gen.pow(&[domain.size - 3]) + domain.group_gen.pow(&[domain.size - (ZK_ROWS)]) } /// Evaluates the polynomial @@ -244,7 +253,7 @@ impl ConstraintSystem { /// creates a constraint system from a vector of gates ([CircuitGate]), some sponge parameters ([ArithmeticSpongeParams]), and the number of public inputs. pub fn create( mut gates: Vec>, - lookup_tables: Vec< Vec> >, + lookup_tables: Vec>>, fr_sponge_params: ArithmeticSpongeParams, public: usize, ) -> Option { @@ -253,8 +262,8 @@ impl ConstraintSystem { // +3 on gates.len() here to ensure that we have room for the zero-knowledge entries of the permutation polynomial // see https://minaprotocol.com/blog/a-more-efficient-approach-to-zero-knowledge-for-plonk - let domain = EvaluationDomains::::create(gates.len() + 3)?; - assert!(domain.d1.size > 3); + let domain = EvaluationDomains::::create(gates.len() + ZK_ROWS as usize)?; + assert!(domain.d1.size > ZK_ROWS); // pre-compute all the elements let mut sid = domain.d1.elements().map(|elm| elm).collect::>(); @@ -290,20 +299,20 @@ impl ConstraintSystem { } } - let sigmal1 : [_ ; PERMUTS] = { + let sigmal1: [_; PERMUTS] = { let [s0, s1, s2, s3, s4, s5, s6] = sigmal1; - [ E::>::from_vec_and_domain(s0, domain.d1), - E::>::from_vec_and_domain(s1, domain.d1), - E::>::from_vec_and_domain(s2, domain.d1), - E::>::from_vec_and_domain(s3, domain.d1), - E::>::from_vec_and_domain(s4, domain.d1), - E::>::from_vec_and_domain(s5, domain.d1), - E::>::from_vec_and_domain(s6, domain.d1) ] + [ + E::>::from_vec_and_domain(s0, domain.d1), + E::>::from_vec_and_domain(s1, domain.d1), + E::>::from_vec_and_domain(s2, domain.d1), + E::>::from_vec_and_domain(s3, domain.d1), + E::>::from_vec_and_domain(s4, domain.d1), + E::>::from_vec_and_domain(s5, domain.d1), + E::>::from_vec_and_domain(s6, domain.d1), + ] }; - let sigmam: [DP; PERMUTS] = array_init(|i| { - sigmal1[i].clone().interpolate() - }); + let sigmam: [DP; PERMUTS] = array_init(|i| sigmal1[i].clone().interpolate()); let mut s = sid[0..2].to_vec(); // TODO(mimoo): why do we do that? sid.append(&mut s); @@ -322,7 +331,10 @@ impl ConstraintSystem { // compute ECC arithmetic constraint polynomials let complete_addm = E::>::from_vec_and_domain( - gates.iter().map(|gate| F::from((gate.typ == GateType::CompleteAdd) as u64)).collect(), + gates + .iter() + .map(|gate| F::from((gate.typ == GateType::CompleteAdd) as u64)) + .collect(), domain.d1, ) .interpolate(); @@ -350,49 +362,40 @@ impl ConstraintSystem { let chacha8 = { use GateType::*; - let has_chacha_gate = - gates.iter().any(|gate| { - match gate.typ { - ChaCha0 | ChaCha1 | ChaCha2 | ChaChaFinal => true, - _ => false - } - }); + let has_chacha_gate = gates.iter().any(|gate| match gate.typ { + ChaCha0 | ChaCha1 | ChaCha2 | ChaChaFinal => true, + _ => false, + }); if !has_chacha_gate { None } else { - let a : [_; 4] = - array_init(|i| { - let g = - match i { - 0 => ChaCha0, - 1 => ChaCha1, - 2 => ChaCha2, - 3 => ChaChaFinal, - _ => panic!("Invalid index") - }; - E::>::from_vec_and_domain( - gates - .iter() - .map(|gate| { - if gate.typ == g { - F::one() - } else { - F::zero() - } - }) - .collect(), - domain.d1) - .interpolate() - .evaluate_over_domain(domain.d8) - }); + let a: [_; 4] = array_init(|i| { + let g = match i { + 0 => ChaCha0, + 1 => ChaCha1, + 2 => ChaCha2, + 3 => ChaChaFinal, + _ => panic!("Invalid index"), + }; + E::>::from_vec_and_domain( + gates + .iter() + .map(|gate| if gate.typ == g { F::one() } else { F::zero() }) + .collect(), + domain.d1, + ) + .interpolate() + .evaluate_over_domain(domain.d8) + }); Some(a) } }; - let coefficientsm: [_; COLUMNS] = - array_init(|i| { - E::>::from_vec_and_domain( - gates.iter().map(|gate| { + let coefficientsm: [_; COLUMNS] = array_init(|i| { + E::>::from_vec_and_domain( + gates + .iter() + .map(|gate| { if i < gate.c.len() { gate.c[i] } else { @@ -400,9 +403,10 @@ impl ConstraintSystem { } }) .collect(), - domain.d1) - .interpolate() - }); + domain.d1, + ) + .interpolate() + }); // TODO: This doesn't need to be degree 8 but that would require some changes in expr let coefficients8 = array_init(|i| coefficientsm[i].evaluate_over_domain_by_ref(domain.d8)); @@ -434,37 +438,44 @@ impl ConstraintSystem { let endo = F::zero(); let lookup_table_lengths: Vec<_> = lookup_tables.iter().map(|v| v[0].len()).collect(); - let dummy_lookup_values : Vec> = - lookup_tables.iter() + let dummy_lookup_values: Vec> = lookup_tables + .iter() .map(|cols| cols.iter().map(|c| c[c.len() - 1]).collect()) .collect(); - let lookup_tables : Vec>> = - lookup_tables + let lookup_tables: Vec>> = lookup_tables .into_iter() .zip(dummy_lookup_values.iter()) .map(|(t, dummy)| { - t.into_iter().enumerate().map(|(i, mut col)| { - let d = dummy[i]; - col.extend((0..(n - col.len())).map(|_| d)); - E::>::from_vec_and_domain(col, domain.d1).interpolate() - }).collect() - }).collect(); - let lookup_tables8 = lookup_tables.iter().map(|t| { - t.iter().map(|col| col.evaluate_over_domain_by_ref(domain.d8)).collect() - }).collect(); + t.into_iter() + .enumerate() + .map(|(i, mut col)| { + let d = dummy[i]; + col.extend((0..(n - col.len())).map(|_| d)); + E::>::from_vec_and_domain(col, domain.d1).interpolate() + }) + .collect() + }) + .collect(); + let lookup_tables8 = lookup_tables + .iter() + .map(|t| { + t.iter() + .map(|col| col.evaluate_over_domain_by_ref(domain.d8)) + .collect() + }) + .collect(); let lookup_info = LookupInfo::::create(); // return result Some(ConstraintSystem { chacha8, - lookup_selectors: - if lookup_info.lookup_used(&gates).is_some() { - LookupInfo::::create().selector_polynomials(domain, &gates) - } else { - vec![] - }, + lookup_selectors: if lookup_info.lookup_used(&gates).is_some() { + LookupInfo::::create().selector_polynomials(domain, &gates) + } else { + vec![] + }, dummy_lookup_values, lookup_table_lengths, lookup_tables8, diff --git a/circuits/plonk-15-wires/src/polynomials/permutation.rs b/circuits/plonk-15-wires/src/polynomials/permutation.rs index cac2d87b6e..f035883f35 100644 --- a/circuits/plonk-15-wires/src/polynomials/permutation.rs +++ b/circuits/plonk-15-wires/src/polynomials/permutation.rs @@ -16,7 +16,7 @@ use ark_poly::{ use ark_poly::{Polynomial, UVPolynomial}; use o1_utils::{ExtendedDensePolynomial, ExtendedEvaluations}; use oracle::rndoracle::ProofError; -use rand::rngs::ThreadRng; +use rand::{rngs::ThreadRng, CryptoRng, RngCore}; impl ConstraintSystem { /// permutation quotient poly contribution computation @@ -122,7 +122,7 @@ impl ConstraintSystem { witness: &[Vec; COLUMNS], beta: &F, gamma: &F, - rng: &mut ThreadRng, + rng: &mut (impl RngCore + CryptoRng), ) -> Result, ProofError> { let n = self.domain.d1.size as usize; diff --git a/dlog/plonk-15-wires/src/prover.rs b/dlog/plonk-15-wires/src/prover.rs index 3d280da980..c96632b177 100644 --- a/dlog/plonk-15-wires/src/prover.rs +++ b/dlog/plonk-15-wires/src/prover.rs @@ -7,7 +7,8 @@ This source file implements prover's zk-proof primitive. pub use super::{index::Index, range}; use crate::plonk_sponge::FrSponge; use ark_ec::AffineCurve; -use ark_ff::{FftField, Field, Zero, One}; +use ark_ff::UniformRand; +use ark_ff::{FftField, Field, One, Zero}; use ark_poly::{ univariate::DensePolynomial, Evaluations, Polynomial, Radix2EvaluationDomain as D, UVPolynomial, }; @@ -15,18 +16,21 @@ use array_init::array_init; use commitment_dlog::commitment::{ b_poly_coefficients, CommitmentCurve, CommitmentField, OpeningProof, PolyComm, }; +use lookup::CombinedEntry; use o1_utils::ExtendedDensePolynomial; use oracle::{rndoracle::ProofError, sponge::ScalarChallenge, FqSponge}; use plonk_15_wires_circuits::{ - expr::{Environment, LookupEnvironment, Constants, l0_1}, - polynomials::{chacha, lookup, poseidon, varbasemul, complete_add}, - nolookup::scalars::{LookupEvaluations, ProofEvaluations}, + expr::{l0_1, Constants, Environment, LookupEnvironment}, + gate::{combine_table_entry, GateType, LookupInfo, LookupsUsed}, + nolookup::{ + constraints::ZK_ROWS, + scalars::{LookupEvaluations, ProofEvaluations}, + }, + polynomials::{chacha, complete_add, lookup, poseidon, varbasemul}, wires::{COLUMNS, PERMUTS}, - gate::{combine_table_entry, LookupsUsed, LookupInfo, GateType}, }; -use lookup::{CombinedEntry}; -use rand::thread_rng; use std::collections::HashMap; +use std::convert::TryInto; type Fr = ::ScalarField; type Fq = ::BaseField; @@ -34,7 +38,7 @@ type Fq = ::BaseField; #[derive(Clone)] pub struct LookupCommitments { pub sorted: Vec>, - pub aggreg: PolyComm + pub aggreg: PolyComm, } #[derive(Clone)] @@ -68,30 +72,31 @@ pub struct ProverProof { } fn combine_evaluations( - init : (Evaluations>, Evaluations>), + init: (Evaluations>, Evaluations>), alpha: F, prev_alpha_pow: F, es: Vec>>, - ) -> (Evaluations>, Evaluations>) { - +) -> (Evaluations>, Evaluations>) { let mut alpha_pow = prev_alpha_pow; let pows = (0..).map(|_| { alpha_pow *= alpha; alpha_pow }); - es.into_iter().zip(pows).fold(init, |(mut a4, mut a8), (mut e, alpha_pow)| { - e.evals.iter_mut().for_each(|x| *x *= alpha_pow); - if e.domain().size == a4.domain().size { - a4 += &e; - } else if e.domain().size == a8.domain().size { - a8 += &e; - } else { - panic!("Bad evaluation") - } - drop(e); - (a4, a8) - }) + es.into_iter() + .zip(pows) + .fold(init, |(mut a4, mut a8), (mut e, alpha_pow)| { + e.evals.iter_mut().for_each(|x| *x *= alpha_pow); + if e.domain().size == a4.domain().size { + a4 += &e; + } else if e.domain().size == a8.domain().size { + a8 += &e; + } else { + panic!("Bad evaluation") + } + drop(e); + (a4, a8) + }) } impl ProverProof @@ -104,18 +109,42 @@ where // RETURN: prover's zk-proof pub fn create, G, Fr>, EFrSponge: FrSponge>>( group_map: &G::Map, - witness: &[Vec>; COLUMNS], + mut witness: [Vec>; COLUMNS], index: &Index, prev_challenges: Vec<(Vec>, PolyComm)>, ) -> Result { - let d1 = index.cs.domain.d1; - let n = index.cs.domain.d1.size as usize; - for w in witness.iter() { - if w.len() != n { + let d1_size = index.cs.domain.d1.size as usize; + // TODO: rng should be passed as arg + let rng = &mut rand::rngs::OsRng; + + // double-check the witness + if cfg!(test) { + index.cs.verify(&witness).expect("incorrect witness"); + } + + // ensure we have room for the zero-knowledge rows + let length_witness = witness[0].len(); + let length_padding = d1_size.checked_sub(length_witness); + let padding = if let Some(length_padding) = length_padding { + if length_padding < ZK_ROWS as usize { return Err(ProofError::WitnessCsInconsistent); } + vec![Fr::::zero(); length_padding] + } else { + return Err(ProofError::WitnessCsInconsistent); + }; + + // pad and add zero-knowledge rows to the witness columns + for w in &mut witness { + if w.len() != length_witness { + return Err(ProofError::WitnessCsInconsistent); + } + + w.extend_from_slice(&padding); // padding + for row in w.iter_mut().rev().take(ZK_ROWS as usize) { + *row = Fr::::rand(rng); // zk-rows + } } - //if index.cs.verify(witness) != true {return Err(ProofError::WitnessCsInconsistent)}; // the transcript of the random oracle non-interactive argument let mut fq_sponge = EFqSponge::new(index.fq_sponge_params.clone()); @@ -128,16 +157,16 @@ where ) .interpolate(); - let rng = &mut thread_rng(); - // commit to the wire values - let w_comm: [(PolyComm, PolyComm>); COLUMNS] = - array_init(|i| { - let e = - Evaluations::, D>>::from_vec_and_domain( - witness[i].clone(), index.cs.domain.d1); - index.srs.commit_evaluations(d1, &e, None, rng) - }); + let w_comm: [(PolyComm, PolyComm>); COLUMNS] = array_init(|i| { + let e = Evaluations::, D>>::from_vec_and_domain( + witness[i].clone(), + index.cs.domain.d1, + ); + index + .srs + .commit_evaluations(index.cs.domain.d1, &e, None, rng) + }); // compute witness polynomials let w: [DensePolynomial>; COLUMNS] = array_init(|i| { @@ -158,15 +187,14 @@ where let lookup_used = lookup_info.lookup_used(&index.cs.gates); let joint_combiner_ = { - let s = - match lookup_used.as_ref() { - None | Some(LookupsUsed::Single) => ScalarChallenge(Fr::::zero()), - Some(LookupsUsed::Joint) => ScalarChallenge(fq_sponge.challenge()), - }; + let s = match lookup_used.as_ref() { + None | Some(LookupsUsed::Single) => ScalarChallenge(Fr::::zero()), + Some(LookupsUsed::Joint) => ScalarChallenge(fq_sponge.challenge()), + }; (s, s.to_field(&index.srs.endo_r)) }; - let joint_combiner : Fr = joint_combiner_.1; + let joint_combiner: Fr = joint_combiner_.1; // TODO: Looking-up a tuple (f_0, f_1, ..., f_{m-1}) in a tuple of tables (T_0, ..., T_{m-1}) is // reduced to a single lookup @@ -210,14 +238,12 @@ where // their average length or something like that. let dummy_lookup_value = { - let x = - match lookup_used.as_ref() { - None => Fr::::zero(), - Some(_) => - combine_table_entry( - joint_combiner, - index.cs.dummy_lookup_values[0].iter()), - }; + let x = match lookup_used.as_ref() { + None => Fr::::zero(), + Some(_) => { + combine_table_entry(joint_combiner, index.cs.dummy_lookup_values[0].iter()) + } + }; CombinedEntry(x) }; @@ -225,41 +251,47 @@ where match lookup_used.as_ref() { None => (None, None, None, None), Some(_) => { - let iter_lookup_table = || (0..n).map(|i| { - let row = index.cs.lookup_tables8[0].iter().map(|e| & e.evals[8 * i]); - CombinedEntry ( - combine_table_entry(joint_combiner, row) ) - }); - + let iter_lookup_table = || { + (0..d1_size).map(|i| { + let row = index.cs.lookup_tables8[0].iter().map(|e| &e.evals[8 * i]); + CombinedEntry(combine_table_entry(joint_combiner, row)) + }) + }; // TODO: Once we switch to committing using lagrange commitments, // `witness` will be consumed when we interpolate, so interpolation will // have to moved below this. - let lookup_sorted : Vec>>> = - lookup::sorted( - dummy_lookup_value.clone(), - iter_lookup_table, - index.cs.lookup_table_lengths[0], - d1, - &index.cs.gates, - &witness, - joint_combiner)?; + let lookup_sorted: Vec>>> = lookup::sorted( + dummy_lookup_value.clone(), + iter_lookup_table, + index.cs.lookup_table_lengths[0], + index.cs.domain.d1, + &index.cs.gates, + &witness, + joint_combiner, + )?; - let lookup_sorted : Vec<_> = - lookup_sorted.into_iter().map(|chunk| { - let v : Vec<_> = chunk.into_iter().map(|x| x.0).collect(); - lookup::zk_patch(v, d1, rng) - }).collect(); + let lookup_sorted: Vec<_> = lookup_sorted + .into_iter() + .map(|chunk| { + let v: Vec<_> = chunk.into_iter().map(|x| x.0).collect(); + lookup::zk_patch(v, index.cs.domain.d1, rng) + }) + .collect(); - let comm : Vec<_> = - lookup_sorted.iter().map(|v| - index.srs.commit_evaluations(d1, v, None, rng)) + let comm: Vec<_> = lookup_sorted + .iter() + .map(|v| { + index + .srs + .commit_evaluations(index.cs.domain.d1, v, None, rng) + }) .collect(); let coeffs : Vec<_> = // TODO: We can avoid storing these coefficients. lookup_sorted.iter().map(|e| e.clone().interpolate()).collect(); - let evals8 : Vec<_> = - coeffs.iter() + let evals8: Vec<_> = coeffs + .iter() .map(|v| v.evaluate_over_domain_by_ref(index.cs.domain.d8)) .collect(); @@ -279,7 +311,7 @@ where match lookup_sorted { None => (None, None, None), Some(lookup_sorted) => { - let iter_lookup_table = || (0..n).map(|i| { + let iter_lookup_table = || (0..d1_size).map(|i| { let row = index.cs.lookup_tables8[0].iter().map(|e| & e.evals[8 * i]); combine_table_entry(joint_combiner, row) }); @@ -288,7 +320,7 @@ where lookup::aggregation::<_, Fr, _>( dummy_lookup_value.0, iter_lookup_table(), - d1, + index.cs.domain.d1, &index.cs.gates, &witness, joint_combiner, @@ -297,11 +329,11 @@ where rng)?; drop(lookup_sorted); - if aggreg.evals[n - 4] != Fr::::one() { - panic!("aggregation incorrect: {}", aggreg.evals[n-3]); + if aggreg.evals[d1_size - 4] != Fr::::one() { + panic!("aggregation incorrect: {}", aggreg.evals[d1_size-3]); } - let comm = index.srs.commit_evaluations(d1, &aggreg, None, rng); + let comm = index.srs.commit_evaluations(index.cs.domain.d1, &aggreg, None, rng); fq_sponge.absorb_g(&comm.0.unshifted); let coeffs = aggreg.interpolate(); @@ -314,7 +346,7 @@ where }; // compute permutation aggregation polynomial - let z = index.cs.perm_aggreg(witness, &beta, &gamma, rng)?; + let z = index.cs.perm_aggreg(&witness, &beta, &gamma, rng)?; // commit to z let z_comm = index.srs.commit(&z, None, rng); @@ -327,56 +359,58 @@ where // evaluate polynomials over domains let lagrange = index.cs.evaluate(&w, &z); - let lookup_table_combined = - lookup_used.as_ref().map(|_| { - let joint_table = &index.cs.lookup_tables8[0]; - let mut res = joint_table[joint_table.len() - 1].clone(); - for col in joint_table.iter().rev().skip(1) { - res.evals.iter_mut().for_each(|e| *e *= joint_combiner); - res += &col; - } - res - }); + let lookup_table_combined = lookup_used.as_ref().map(|_| { + let joint_table = &index.cs.lookup_tables8[0]; + let mut res = joint_table[joint_table.len() - 1].clone(); + for col in joint_table.iter().rev().skip(1) { + res.evals.iter_mut().for_each(|e| *e *= joint_combiner); + res += &col; + } + res + }); - let lookup_env = - lookup_table_combined.as_ref() + let lookup_env = lookup_table_combined + .as_ref() .zip(lookup_sorted8.as_ref()) - .zip(lookup_aggreg8.as_ref()).map(|((lookup_table_combined, lookup_sorted), lookup_aggreg)| { - LookupEnvironment { + .zip(lookup_aggreg8.as_ref()) + .map( + |((lookup_table_combined, lookup_sorted), lookup_aggreg)| LookupEnvironment { aggreg: &lookup_aggreg, sorted: &lookup_sorted, table: lookup_table_combined, selectors: &index.cs.lookup_selectors, - } - }); + }, + ); // compute quotient polynomial let env = { let mut index_evals = HashMap::new(); - use GateType::*; - index_evals.insert(Poseidon, &index.cs.ps8); - index_evals.insert(CompleteAdd, &index.cs.complete_addl4); - index_evals.insert(Vbmul, &index.cs.mull8); - index_evals.insert(Endomul, &index.cs.emull); - [ChaCha0, ChaCha1, ChaCha2, ChaChaFinal].iter().enumerate().for_each(|(i, g)| { + use GateType::*; + index_evals.insert(Poseidon, &index.cs.ps8); + index_evals.insert(CompleteAdd, &index.cs.complete_addl4); + index_evals.insert(Vbmul, &index.cs.mull8); + index_evals.insert(Endomul, &index.cs.emull); + [ChaCha0, ChaCha1, ChaCha2, ChaChaFinal] + .iter() + .enumerate() + .for_each(|(i, g)| { if let Some(c) = &index.cs.chacha8 { index_evals.insert(*g, &c[i]); } }); Environment { - constants: - Constants { - alpha: alpha, - beta: beta, - gamma: gamma, - joint_combiner, - }, + constants: Constants { + alpha: alpha, + beta: beta, + gamma: gamma, + joint_combiner, + }, witness: &lagrange.d8.this.w, coefficient: &index.cs.coefficients8, vanishes_on_last_4_rows: &index.cs.vanishes_on_last_4_rows, z: &lagrange.d8.this.z, - l0_1: l0_1(d1), + l0_1: l0_1(index.cs.domain.d1), domain: index.cs.domain, index: index_evals, lookup: lookup_env, @@ -396,9 +430,7 @@ where // endoscaling let emul8 = index.cs.endomul_quot(&lagrange, &alphas[range::ENDML]); // scalar multiplication - let mul8 = - varbasemul::constraint(range::MUL.start) - .evaluations(&env); + let mul8 = varbasemul::constraint(range::MUL.start).evaluations(&env); // collect contribution evaluations let t4 = { @@ -406,36 +438,30 @@ where t4 += &add4; t4 }; - let t4 = - match index.cs.chacha8.as_ref() { - None => t4, - Some(_) => { - let chacha = chacha::constraint(range::CHACHA.start).evaluations(&env); - &t4 + &chacha - } - }; + let t4 = match index.cs.chacha8.as_ref() { + None => t4, + Some(_) => { + let chacha = chacha::constraint(range::CHACHA.start).evaluations(&env); + &t4 + &chacha + } + }; let mut t8 = &perm + &(&mul8 + &emul8); // quotient polynomial for lookup // lookup::constraints t8 += &poseidon::constraint(&index.cs.fr_sponge_params).evaluations(&env); - let (t4, t8) = - match lookup_used { - None => (t4, t8), - Some(_) => { - combine_evaluations( - (t4, t8), - alpha, - alphas[alphas.len() - 1], - lookup::constraints( - &index.cs.dummy_lookup_values[0], - d1) - .iter().map(|e| e.evaluations(&env)) - .collect() - ) - } - }; - + let (t4, t8) = match lookup_used { + None => (t4, t8), + Some(_) => combine_evaluations( + (t4, t8), + alpha, + alphas[alphas.len() - 1], + lookup::constraints(&index.cs.dummy_lookup_values[0], index.cs.domain.d1) + .iter() + .map(|e| e.evaluations(&env)) + .collect(), + ), + }; // divide contributions with vanishing polynomial let (mut t, res) = (&(&t4.interpolate() + &t8.interpolate()) + &p) @@ -462,20 +488,25 @@ where let zeta_omega = zeta * ω let lookup_evals = |e: Fr| { - lookup_aggreg_coeffs.as_ref() - .zip(lookup_sorted_coeffs.as_ref()) - .map(|(aggreg, sorted)| - LookupEvaluations { + lookup_aggreg_coeffs + .as_ref() + .zip(lookup_sorted_coeffs.as_ref()) + .map(|(aggreg, sorted)| LookupEvaluations { aggreg: aggreg.eval(e, index.max_poly_size), - sorted: sorted.iter().map(|c| c.eval(e, index.max_poly_size)).collect(), - table: - index.cs.lookup_tables[0] + sorted: sorted + .iter() + .map(|c| c.eval(e, index.max_poly_size)) + .collect(), + table: index.cs.lookup_tables[0] .iter() .map(|p| p.eval(e, index.max_poly_size)) .rev() .fold(vec![Fr::::zero()], |acc, x| { - acc.into_iter().zip(x.iter()).map(|(acc, x)| acc * joint_combiner + x).collect() - }) + acc.into_iter() + .zip(x.iter()) + .map(|(acc, x)| acc * joint_combiner + x) + .collect() + }), }) }; @@ -516,14 +547,15 @@ where s: array_init(|i| DensePolynomial::eval_polynomial(&es.s[i], e1)), w: array_init(|i| DensePolynomial::eval_polynomial(&es.w[i], e1)), z: DensePolynomial::eval_polynomial(&es.z, e1), - lookup: - es.lookup.as_ref().map(|l| { - LookupEvaluations { - table: DensePolynomial::eval_polynomial(&l.table, e1), - aggreg: DensePolynomial::eval_polynomial(&l.aggreg, e1), - sorted: l.sorted.iter().map(|p| DensePolynomial::eval_polynomial(p, e1)).collect(), - } - }), + lookup: es.lookup.as_ref().map(|l| LookupEvaluations { + table: DensePolynomial::eval_polynomial(&l.table, e1), + aggreg: DensePolynomial::eval_polynomial(&l.aggreg, e1), + sorted: l + .sorted + .iter() + .map(|p| DensePolynomial::eval_polynomial(p, e1)) + .collect(), + }), generic_selector: DensePolynomial::eval_polynomial(&es.generic_selector, e1), poseidon_selector: DensePolynomial::eval_polynomial(&es.poseidon_selector, e1), }) @@ -541,10 +573,7 @@ where .perm_lnrz(&evals, zeta, beta, gamma, &alphas[range::PERM]); let f = { - let (_lin_constant, lin) = - index - .linearization - .to_polynomial(&env, zeta, evals); + let (_lin_constant, lin) = index.linearization.to_polynomial(&env, zeta, evals); f + lin }; @@ -594,8 +623,8 @@ where ) }) .collect::>(); - let non_hiding = |n: usize| PolyComm { - unshifted: vec![Fr::::zero(); n], + let non_hiding = |d1_size: usize| PolyComm { + unshifted: vec![Fr::::zero(); d1_size], shifted: None, }; @@ -615,7 +644,7 @@ where // construct evaluation proof let mut polynomials = polys .iter() - .map(|(p, n)| (p, None, non_hiding(*n))) + .map(|(p, d1_size)| (p, None, non_hiding(*d1_size))) .collect::>(); polynomials.extend(vec![(&p, None, non_hiding(1))]); polynomials.extend( @@ -638,13 +667,12 @@ where w_comm: array_init(|i| w_comm[i].0.clone()), z_comm: z_comm.0, t_comm: t_comm.0, - lookup: - lookup_aggreg_comm.zip(lookup_sorted_comm).map(|(a, s)| { - LookupCommitments { - aggreg: a.0, - sorted: s.iter().map(|(x, _)| x.clone()).collect() - } - }) + lookup: lookup_aggreg_comm.zip(lookup_sorted_comm).map(|(a, s)| { + LookupCommitments { + aggreg: a.0, + sorted: s.iter().map(|(x, _)| x.clone()).collect(), + } + }), }, proof: index.srs.open( group_map,