Skip to content

Commit

Permalink
o1vm/pickles: first shot at logup
Browse files Browse the repository at this point in the history
  • Loading branch information
Fizzixnerd committed Nov 15, 2024
1 parent 59250e7 commit 1ec45d7
Show file tree
Hide file tree
Showing 7 changed files with 417 additions and 92 deletions.
9 changes: 5 additions & 4 deletions msm/src/logup.rs
Original file line number Diff line number Diff line change
Expand Up @@ -275,13 +275,13 @@ pub struct LogupWitness<F, ID: LookupTableID> {
#[derive(Debug, Clone)]
pub struct LookupProof<T, ID> {
/// The multiplicity polynomials
pub(crate) m: BTreeMap<ID, Vec<T>>,
pub m: BTreeMap<ID, Vec<T>>,
/// The polynomial keeping the sum of each row
pub(crate) h: BTreeMap<ID, Vec<T>>,
pub h: BTreeMap<ID, Vec<T>>,
/// The "running-sum" over the rows, coined `φ`
pub(crate) sum: T,
pub sum: T,
/// All fixed lookup tables values, indexed by their ID
pub(crate) fixed_tables: BTreeMap<ID, T>,
pub fixed_tables: BTreeMap<ID, T>,
}

/// Iterator implementation to abstract the content of the structure.
Expand Down Expand Up @@ -512,6 +512,7 @@ pub mod prover {
}

/// Represents the environment for the logup argument.
#[derive(Clone)]
pub struct Env<G: KimchiCurve, ID: LookupTableID> {
/// The polynomial of the multiplicities, indexed by the table ID.
pub lookup_counters_poly_d1: BTreeMap<ID, Vec<DensePolynomial<G::ScalarField>>>,
Expand Down
170 changes: 136 additions & 34 deletions o1vm/src/pickles/column_env.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,19 @@
use ark_ff::FftField;
use ark_poly::{Evaluations, Radix2EvaluationDomain};
use kimchi_msm::columns::Column;
use kimchi_msm::{columns::Column, logup::prover::QuotientPolynomialEnvironment, LookupTableID};
use poly_commitment::PolyComm;

use crate::{
interpreters::mips::{column::N_MIPS_SEL_COLS, witness::SCRATCH_SIZE},
pickles::proof::WitnessColumns,
};
use kimchi::circuits::{
berkeley_columns::{BerkeleyChallengeTerm, BerkeleyChallenges},
domains::{Domain, EvaluationDomains},
expr::{ColumnEnvironment as TColumnEnvironment, Constants},
use kimchi::{
circuits::{
berkeley_columns::{BerkeleyChallengeTerm, BerkeleyChallenges},
domains::{Domain, EvaluationDomains},
expr::{ColumnEnvironment as TColumnEnvironment, Constants},
},
curve::KimchiCurve,
};

type Evals<F> = Evaluations<F, Radix2EvaluationDomain<F>>;
Expand All @@ -18,10 +22,10 @@ type Evals<F> = Evaluations<F, Radix2EvaluationDomain<F>>;
/// required to evaluate an expression as a polynomial.
///
/// All are evaluations.
pub struct ColumnEnvironment<'a, F: FftField> {
pub struct ColumnEnvironment<'a, F: FftField, G: KimchiCurve, ID: LookupTableID> {
/// The witness column polynomials. Includes relation columns and dynamic
/// selector columns.
pub witness: &'a WitnessColumns<Evals<F>, [Evals<F>; N_MIPS_SEL_COLS]>,
pub witness: &'a WitnessColumns<Evals<F>, G, [Evals<F>; N_MIPS_SEL_COLS], ID>,
/// The value `prod_{j != 1} (1 - ω^j)`, used for efficiently
/// computing the evaluations of the unnormalized Lagrange basis
/// polynomials.
Expand All @@ -33,6 +37,9 @@ pub struct ColumnEnvironment<'a, F: FftField> {
pub challenges: BerkeleyChallenges<F>,
/// The domains used in the PLONK argument.
pub domain: EvaluationDomains<F>,

/// Lookup specific polynomials
pub lookup: Option<QuotientPolynomialEnvironment<'a, F, ID>>,
}

pub fn get_all_columns() -> Vec<Column> {
Expand All @@ -46,47 +53,142 @@ pub fn get_all_columns() -> Vec<Column> {
cols
}

impl<G> WitnessColumns<G, [G; N_MIPS_SEL_COLS]> {
pub fn get_column(&self, col: &Column) -> Option<&G> {
match *col {
Column::Relation(i) => {
if i < SCRATCH_SIZE {
let res = &self.scratch[i];
Some(res)
} else if i == SCRATCH_SIZE {
let res = &self.instruction_counter;
Some(res)
} else if i == SCRATCH_SIZE + 1 {
let res = &self.error;
Some(res)
} else {
panic!("We should not have that many relation columns");
}
pub fn get_column_comm<'a, G: KimchiCurve, ID: LookupTableID>(
env: &'a WitnessColumns<PolyComm<G>, G, [PolyComm<G>; N_MIPS_SEL_COLS], ID>,
col: &Column,
) -> Option<&'a PolyComm<G>> {
match *col {
Column::Relation(i) => {
if i < SCRATCH_SIZE {
let res = &env.scratch[i];
Some(res)
} else if i == SCRATCH_SIZE {
let res = &env.instruction_counter;
Some(res)
} else if i == SCRATCH_SIZE + 1 {
let res = &env.error;
Some(res)
} else {
panic!("We should not have that many relation columns");
}
Column::DynamicSelector(i) => {
assert!(
i < N_MIPS_SEL_COLS,
"We do not have that many dynamic selector columns"
);
let res = &self.selector[i];
}
Column::DynamicSelector(i) => {
assert!(
i < N_MIPS_SEL_COLS,
"We do not have that many dynamic selector columns"
);
let res = &env.selector[i];
Some(res)
}
Column::LookupPartialSum((table_id, i)) => {
if let Some(ref lookup) = env.lookup_env {
let table_id = ID::from_u32(table_id);
Some(&lookup.lookup_terms_comms_d1[&table_id][i])
} else {
panic!("No lookup provided")
}
}
Column::LookupAggregation => {
if let Some(ref lookup) = env.lookup_env {
Some(&lookup.lookup_aggregation_comm_d1)
} else {
panic!("No lookup provided")
}
}
Column::LookupMultiplicity((table_id, i)) => {
if let Some(ref lookup) = env.lookup_env {
Some(&lookup.lookup_counters_comm_d1[&ID::from_u32(table_id)][i])
} else {
panic!("No lookup provided")
}
}
Column::LookupFixedTable(table_id) => {
if let Some(ref lookup) = env.lookup_env {
Some(&lookup.fixed_lookup_tables_comms_d1[&ID::from_u32(table_id)])
} else {
panic!("No lookup provided")
}
}
_ => {
panic!("We should not have any other type of columns")
}
}
}

pub fn get_column_eval<'a, G: KimchiCurve, ID: LookupTableID>(
env: &'a WitnessColumns<Evals<G::ScalarField>, G, [Evals<G::ScalarField>; N_MIPS_SEL_COLS], ID>,
col: &Column,
) -> Option<&'a Evals<G::ScalarField>> {
match *col {
Column::Relation(i) => {
if i < SCRATCH_SIZE {
let res = &env.scratch[i];
Some(res)
} else if i == SCRATCH_SIZE {
let res = &env.instruction_counter;
Some(res)
} else if i == SCRATCH_SIZE + 1 {
let res = &env.error;
Some(res)
} else {
panic!("We should not have that many relation columns");
}
}
Column::DynamicSelector(i) => {
assert!(
i < N_MIPS_SEL_COLS,
"We do not have that many dynamic selector columns"
);
let res = &env.selector[i];
Some(res)
}
Column::LookupPartialSum((table_id, i)) => {
if let Some(ref lookup) = env.lookup_env {
let table_id = ID::from_u32(table_id);
Some(&lookup.lookup_terms_evals_d8[&table_id][i])
} else {
panic!("No lookup provided")
}
}
Column::LookupAggregation => {
if let Some(ref lookup) = env.lookup_env {
Some(&lookup.lookup_aggregation_evals_d8)
} else {
panic!("No lookup provided")
}
_ => {
panic!("We should not have any other type of columns")
}
Column::LookupMultiplicity((table_id, i)) => {
if let Some(ref lookup) = env.lookup_env {
Some(&lookup.lookup_counters_evals_d8[&ID::from_u32(table_id)][i])
} else {
panic!("No lookup provided")
}
}
Column::LookupFixedTable(table_id) => {
if let Some(ref lookup) = env.lookup_env {
Some(&lookup.fixed_lookup_tables_evals_d8[&ID::from_u32(table_id)])
} else {
panic!("No lookup provided")
}
}
_ => {
panic!("We should not have any other type of columns")
}
}
}

impl<'a, F: FftField> TColumnEnvironment<'a, F, BerkeleyChallengeTerm, BerkeleyChallenges<F>>
for ColumnEnvironment<'a, F>
impl<'a, F: FftField, G: KimchiCurve, ID: LookupTableID>
TColumnEnvironment<'a, F, BerkeleyChallengeTerm, BerkeleyChallenges<F>>
for ColumnEnvironment<'a, F, G, ID>
where
G: KimchiCurve<ScalarField = F>,
{
// FIXME: do we change to the MIPS column type?
// We do not want to keep kimchi_msm/generic prover
type Column = Column;

fn get_column(&self, col: &Self::Column) -> Option<&'a Evals<F>> {
self.witness.get_column(col)
get_column_eval(self.witness, col)
}

fn get_domain(&self, d: Domain) -> Radix2EvaluationDomain<F> {
Expand Down
7 changes: 6 additions & 1 deletion o1vm/src/pickles/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ use o1vm::{
witness::{self as mips_witness},
Instruction,
},
lookups::LookupTableIDs,
pickles::{proof::ProofInputs, prover, verifier},
preimage_oracle::PreImageOracle,
};
Expand All @@ -28,6 +29,8 @@ use mina_curves::pasta::{Fp, Vesta};

pub const DOMAIN_SIZE: usize = 1 << 15;

type ID = LookupTableIDs;

pub fn main() -> ExitCode {
let cli = cannon_cli::main_cli();

Expand Down Expand Up @@ -95,7 +98,7 @@ pub fn main() -> ExitCode {
constraints
};

let mut curr_proof_inputs: ProofInputs<Vesta> = ProofInputs::new(DOMAIN_SIZE);
let mut curr_proof_inputs: ProofInputs<Vesta, ID> = ProofInputs::new(DOMAIN_SIZE);
while !mips_wit_env.halt {
let _instr: Instruction = mips_wit_env.step(&configuration, &meta, &start);
for (scratch, scratch_chunk) in mips_wit_env
Expand Down Expand Up @@ -126,6 +129,7 @@ pub fn main() -> ExitCode {
DefaultFqSponge<VestaParameters, PlonkSpongeConstantsKimchi>,
DefaultFrSponge<Fp, PlonkSpongeConstantsKimchi>,
_,
ID,
>(domain_fp, &srs, curr_proof_inputs, &constraints, &mut rng)
.unwrap();
// FIXME: check that the proof is correct. This is for testing purposes.
Expand All @@ -140,6 +144,7 @@ pub fn main() -> ExitCode {
Vesta,
DefaultFqSponge<VestaParameters, PlonkSpongeConstantsKimchi>,
DefaultFrSponge<Fp, PlonkSpongeConstantsKimchi>,
ID,
>(domain_fp, &srs, &constraints, &proof);
debug!(
"Verification done in {elapsed} μs",
Expand Down
36 changes: 25 additions & 11 deletions o1vm/src/pickles/proof.rs
Original file line number Diff line number Diff line change
@@ -1,39 +1,53 @@
use std::collections::BTreeMap;

use kimchi::{curve::KimchiCurve, proof::PointEvaluations};
use kimchi_msm::{
logup::{prover::Env as LookupEnv, LookupProof},
LogupWitness, LookupTableID,
};
use poly_commitment::{ipa::OpeningProof, PolyComm};

use crate::interpreters::mips::{column::N_MIPS_SEL_COLS, witness::SCRATCH_SIZE};

pub struct WitnessColumns<G, S> {
pub scratch: [G; SCRATCH_SIZE],
pub instruction_counter: G,
pub error: G,
#[derive(Clone)]
pub struct WitnessColumns<F, G: KimchiCurve, S, ID: LookupTableID> {
pub scratch: [F; crate::interpreters::mips::witness::SCRATCH_SIZE],
pub instruction_counter: F,
pub error: F,
pub selector: S,
pub lookup_env: Option<LookupEnv<G, ID>>,
}

pub struct ProofInputs<G: KimchiCurve> {
pub evaluations: WitnessColumns<Vec<G::ScalarField>, Vec<G::ScalarField>>,
pub struct ProofInputs<G: KimchiCurve, ID: LookupTableID> {
pub evaluations: WitnessColumns<Vec<G::ScalarField>, G, Vec<G::ScalarField>, ID>,
pub logups: BTreeMap<ID, LogupWitness<G::ScalarField, ID>>,
}

impl<G: KimchiCurve> ProofInputs<G> {
impl<G: KimchiCurve, ID: LookupTableID> ProofInputs<G, ID> {
pub fn new(domain_size: usize) -> Self {
ProofInputs {
evaluations: WitnessColumns {
scratch: std::array::from_fn(|_| Vec::with_capacity(domain_size)),
instruction_counter: Vec::with_capacity(domain_size),
error: Vec::with_capacity(domain_size),
selector: Vec::with_capacity(domain_size),
lookup_env: None,
},
logups: BTreeMap::new(),
}
}
}

// FIXME: should we blind the commitment?
pub struct Proof<G: KimchiCurve> {
pub commitments: WitnessColumns<PolyComm<G>, [PolyComm<G>; N_MIPS_SEL_COLS]>,
pub zeta_evaluations: WitnessColumns<G::ScalarField, [G::ScalarField; N_MIPS_SEL_COLS]>,
pub zeta_omega_evaluations: WitnessColumns<G::ScalarField, [G::ScalarField; N_MIPS_SEL_COLS]>,
pub struct Proof<G: KimchiCurve, ID: LookupTableID> {
pub commitments: WitnessColumns<PolyComm<G>, G, [PolyComm<G>; N_MIPS_SEL_COLS], ID>,
pub zeta_evaluations: WitnessColumns<G::ScalarField, G, [G::ScalarField; N_MIPS_SEL_COLS], ID>,
pub zeta_omega_evaluations:
WitnessColumns<G::ScalarField, G, [G::ScalarField; N_MIPS_SEL_COLS], ID>,
pub quotient_commitment: PolyComm<G>,
pub quotient_evaluations: PointEvaluations<Vec<G::ScalarField>>,
pub logup_commitments: Option<LookupProof<PolyComm<G>, ID>>,
pub logup_evaluations: Option<LookupProof<PointEvaluations<G::ScalarField>, ID>>,
/// IPA opening proof
pub opening_proof: OpeningProof<G>,
}
Loading

0 comments on commit 1ec45d7

Please sign in to comment.