Skip to content

Commit

Permalink
Add dedicated opening verifying function
Browse files Browse the repository at this point in the history
  • Loading branch information
hratoanina committed Aug 5, 2024
1 parent 144f607 commit e73c257
Show file tree
Hide file tree
Showing 5 changed files with 144 additions and 196 deletions.
70 changes: 38 additions & 32 deletions starky/src/cross_table_lookup.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ use crate::lookup::{
eval_helper_columns, eval_helper_columns_circuit, get_grand_product_challenge_set,
get_helper_cols, Column, ColumnFilter, Filter, GrandProductChallenge, GrandProductChallengeSet,
};
use crate::proof::{MultiProof, StarkProofTarget, StarkProofWithMetadata};
use crate::proof::{MultiProof, StarkOpeningSet, StarkProofTarget, StarkProofWithMetadata};
use crate::stark::Stark;

/// An alias for `usize`, to represent the index of a STARK table in a multi-STARK setting.
Expand Down Expand Up @@ -164,7 +164,7 @@ pub struct CtlZData<'a, F: Field> {
pub(crate) columns: Vec<&'a [Column<F>]>,
/// Vector of filter columns for the current table.
/// Each filter evaluates to either 1 or 0.
pub(crate) filter: Vec<Filter<F>>,
pub(crate) filters: Vec<Filter<F>>,
}

impl<'a, F: Field> CtlZData<'a, F> {
Expand All @@ -174,14 +174,14 @@ impl<'a, F: Field> CtlZData<'a, F> {
z: PolynomialValues<F>,
challenge: GrandProductChallenge<F>,
columns: Vec<&'a [Column<F>]>,
filter: Vec<Filter<F>>,
filters: Vec<Filter<F>>,
) -> Self {
Self {
helper_columns,
z,
challenge,
columns,
filter,
filters,
}
}
}
Expand Down Expand Up @@ -266,16 +266,22 @@ where
let num_ctl_helper_cols =
num_ctl_helper_columns_by_table(all_cross_table_lookups, max_constraint_degree);

CtlCheckVars::from_proofs(
&multi_proof.stark_proofs,
let openings = multi_proof
.stark_proofs
.iter()
.map(|proof| proof.proof.openings.clone())
.collect::<Vec<_>>();

CtlCheckVars::from_proofs::<C, N>(
&openings,
all_cross_table_lookups,
ctl_challenges,
num_lookup_columns,
&num_ctl_helper_cols,
)
}
/// Returns the number of helper columns for each `Table`.
pub(crate) fn num_ctl_helper_columns_by_table<F: Field, const N: usize>(
pub fn num_ctl_helper_columns_by_table<F: Field, const N: usize>(
ctls: &[CrossTableLookup<F>],
constraint_degree: usize,
) -> Vec<[usize; N]> {
Expand Down Expand Up @@ -362,17 +368,17 @@ pub(crate) fn cross_table_lookup_data<'a, F: RichField, const D: usize, const N:
}
});
let mut columns = Vec::with_capacity(count);
let mut filter = Vec::with_capacity(count);
let mut filters = Vec::with_capacity(count);
for (col, filt) in cols_filts {
columns.push(&col[..]);
filter.push(filt.clone());
filters.push(filt.clone());
}
ctl_data_per_table[table].zs_columns.push(CtlZData {
helper_columns: helpers_zs[..num_helpers].to_vec(),
z: helpers_zs[num_helpers].clone(),
challenge,
columns,
filter,
filters,
});
}
// There is no helper column for the looking table.
Expand All @@ -384,7 +390,7 @@ pub(crate) fn cross_table_lookup_data<'a, F: RichField, const D: usize, const N:
z: looked_poly,
challenge,
columns: vec![&looked_table.columns[..]],
filter: vec![looked_table.filter.clone()],
filters: vec![looked_table.filter.clone()],
});
}
}
Expand Down Expand Up @@ -486,27 +492,27 @@ where
/// Column linear combinations of the `CrossTableLookup`s.
pub(crate) columns: Vec<&'a [Column<F>]>,
/// Filter that evaluates to either 1 or 0.
pub(crate) filter: Vec<Filter<F>>,
pub(crate) filters: Vec<Filter<F>>,
}

impl<'a, F: RichField + Extendable<D>, const D: usize>
CtlCheckVars<'a, F, F::Extension, F::Extension, D>
{
/// Extracts the `CtlCheckVars` for each STARK.
pub fn from_proofs<C: GenericConfig<D, F = F>, const N: usize>(
proofs: &[StarkProofWithMetadata<F, C, D>; N],
openings: &[StarkOpeningSet<F, D>],
cross_table_lookups: &'a [CrossTableLookup<F>],
ctl_challenges: &'a GrandProductChallengeSet<F>,
num_lookup_columns: &[usize; N],
num_helper_ctl_columns: &Vec<[usize; N]>,
) -> [Vec<Self>; N] {
let mut ctl_vars_per_table = [0; N].map(|_| vec![]);
// If there are no auxiliary polys in the proofs `openings`,
// return early. The verifier will reject the proofs when
// If there are no auxiliary polys in the `openings`,
// return early. The verifier will reject the proof when
// calling `validate_proof_shape`.
if proofs
if openings
.iter()
.any(|p| p.proof.openings.auxiliary_polys.is_none())
.all(|opening| opening.auxiliary_polys.is_none())
{
return ctl_vars_per_table;
}
Expand All @@ -519,17 +525,17 @@ impl<'a, F: RichField + Extendable<D>, const D: usize>
}

// Get all cross-table lookup polynomial openings for each STARK proof.
let ctl_zs = proofs
let ctl_zs = openings
.iter()
.zip(num_lookup_columns)
.map(|(p, &num_lookup)| {
let openings = &p.proof.openings;
.map(|(opening, &num_lookup)| {
// let openings = &p.proof.openings;

let ctl_zs = &openings
let ctl_zs = &opening
.auxiliary_polys
.as_ref()
.expect("We cannot have CTls without auxiliary polynomials.")[num_lookup..];
let ctl_zs_next = &openings
let ctl_zs_next = &opening
.auxiliary_polys_next
.as_ref()
.expect("We cannot have CTls without auxiliary polynomials.")[num_lookup..];
Expand Down Expand Up @@ -575,10 +581,10 @@ impl<'a, F: RichField + Extendable<D>, const D: usize>
}
});
let mut columns = Vec::with_capacity(count);
let mut filter = Vec::with_capacity(count);
let mut filters = Vec::with_capacity(count);
for (col, filt) in cols_filts {
columns.push(&col[..]);
filter.push(filt.clone());
filters.push(filt.clone());
}
let helper_columns = ctl_zs[table]
[start_indices[table]..start_indices[table] + num_ctls[table]]
Expand All @@ -595,7 +601,7 @@ impl<'a, F: RichField + Extendable<D>, const D: usize>
next_z: *looking_z_next,
challenges,
columns,
filter,
filters,
});
}

Expand All @@ -606,14 +612,14 @@ impl<'a, F: RichField + Extendable<D>, const D: usize>
z_indices[looked_table.table] += 1;

let columns = vec![&looked_table.columns[..]];
let filter = vec![looked_table.filter.clone()];
let filters = vec![looked_table.filter.clone()];
ctl_vars_per_table[looked_table.table].push(Self {
helper_columns: vec![],
local_z: *looked_z,
next_z: *looked_z_next,
challenges,
columns,
filter,
filters,
});
}
}
Expand Down Expand Up @@ -650,7 +656,7 @@ pub(crate) fn eval_cross_table_lookup_checks<F, FE, P, S, const D: usize, const
next_z,
challenges,
columns,
filter,
filters,
} = lookup_vars;

// Compute all linear combinations on the current table, and combine them using the challenge.
Expand All @@ -665,7 +671,7 @@ pub(crate) fn eval_cross_table_lookup_checks<F, FE, P, S, const D: usize, const

// Check helper columns.
eval_helper_columns(
filter,
filters,
&evals,
local_values,
next_values,
Expand All @@ -685,8 +691,8 @@ pub(crate) fn eval_cross_table_lookup_checks<F, FE, P, S, const D: usize, const
let combin0 = challenges.combine(&evals[0]);
let combin1 = challenges.combine(&evals[1]);

let f0 = filter[0].eval_filter(local_values, next_values);
let f1 = filter[1].eval_filter(local_values, next_values);
let f0 = filters[0].eval_filter(local_values, next_values);
let f1 = filters[1].eval_filter(local_values, next_values);

consumer
.constraint_last_row(combin0 * combin1 * *local_z - f0 * combin1 - f1 * combin0);
Expand All @@ -695,7 +701,7 @@ pub(crate) fn eval_cross_table_lookup_checks<F, FE, P, S, const D: usize, const
);
} else {
let combin0 = challenges.combine(&evals[0]);
let f0 = filter[0].eval_filter(local_values, next_values);
let f0 = filters[0].eval_filter(local_values, next_values);
consumer.constraint_last_row(combin0 * *local_z - f0);
consumer.constraint_transition(combin0 * (*local_z - *next_z) - f0);
}
Expand Down
2 changes: 0 additions & 2 deletions starky/src/get_challenges.rs
Original file line number Diff line number Diff line change
Expand Up @@ -430,7 +430,6 @@ where
&self,
challenger: &mut Challenger<F, C::Hasher>,
challenges: Option<&GrandProductChallengeSet<F>>,
// ignore_trace_cap: bool,
config: &StarkConfig,
) -> StarkProofChallenges<F, D> {
let degree_bits = self.recover_degree_bits(config);
Expand Down Expand Up @@ -475,7 +474,6 @@ where
&self,
challenger: &mut Challenger<F, C::Hasher>,
challenges: Option<&GrandProductChallengeSet<F>>,
// ignore_trace_cap: bool,
config: &StarkConfig,
) -> StarkProofChallenges<F, D> {
self.proof.get_challenges(challenger, challenges, config)
Expand Down
28 changes: 0 additions & 28 deletions starky/src/proof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -144,34 +144,6 @@ pub struct StarkProofWithPublicInputsTarget<const D: usize> {
pub public_inputs: Vec<Target>,
}

/// A compressed proof format of a single STARK.
#[derive(Debug, Clone)]
pub struct CompressedStarkProof<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
> {
/// Merkle cap of LDEs of trace values.
pub trace_cap: MerkleCap<F, C::Hasher>,
/// Purported values of each polynomial at the challenge point.
pub openings: StarkOpeningSet<F, D>,
/// A batch FRI argument for all openings.
pub opening_proof: CompressedFriProof<F, C::Hasher, D>,
}

/// A compressed [`StarkProof`] format of a single STARK with its public inputs.
#[derive(Debug, Clone)]
pub struct CompressedStarkProofWithPublicInputs<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
> {
/// A compressed STARK proof.
pub proof: CompressedStarkProof<F, C, D>,
/// Public inputs for this compressed STARK proof.
pub public_inputs: Vec<F>,
}

/// A [`StarkProof`] along with metadata about the initial Fiat-Shamir state, which is used when
/// creating a recursive wrapper proof around a STARK proof.
#[derive(Debug, Clone)]
Expand Down
4 changes: 2 additions & 2 deletions starky/src/prover.rs
Original file line number Diff line number Diff line change
Expand Up @@ -466,7 +466,7 @@ where
[num_lookup_columns + total_num_helper_cols + i],
challenges: zs_columns.challenge,
columns: zs_columns.columns.clone(),
filter: zs_columns.filter.clone(),
filters: zs_columns.filters.clone(),
};

start_index += num_ctl_helper_cols;
Expand Down Expand Up @@ -624,7 +624,7 @@ fn check_constraints<'a, F, C, S, const D: usize>(
[num_lookup_columns + total_num_helper_cols + iii],
challenges: zs_columns.challenge,
columns: zs_columns.columns.clone(),
filter: zs_columns.filter.clone(),
filters: zs_columns.filters.clone(),
};

start_index += num_helper_cols;
Expand Down
Loading

0 comments on commit e73c257

Please sign in to comment.