From 0b81952889df1572e17d4029fa70676e7591b85c Mon Sep 17 00:00:00 2001 From: Rohit Narurkar <rohit.narurkar@proton.me> Date: Wed, 16 Oct 2024 22:57:51 +0100 Subject: [PATCH 01/12] feat: add chunk kind --- prover/src/aggregator/prover.rs | 24 +++++++++++++++++------- prover/src/consts.rs | 5 +++++ prover/src/lib.rs | 2 +- prover/src/proof.rs | 2 +- prover/src/proof/chunk.rs | 19 ++++++++++++++++++- prover/src/types.rs | 6 ++++-- prover/src/zkevm/prover.rs | 1 + 7 files changed, 47 insertions(+), 12 deletions(-) diff --git a/prover/src/aggregator/prover.rs b/prover/src/aggregator/prover.rs index e4b42295df..e648f7dd44 100644 --- a/prover/src/aggregator/prover.rs +++ b/prover/src/aggregator/prover.rs @@ -10,18 +10,22 @@ use snark_verifier_sdk::Snark; use crate::{ common, config::LayerId, - consts::{BATCH_KECCAK_ROW, BATCH_VK_FILENAME, BUNDLE_VK_FILENAME, CHUNK_PROTOCOL_FILENAME}, + consts::{ + BATCH_KECCAK_ROW, BATCH_VK_FILENAME, BUNDLE_VK_FILENAME, FD_HALO2_CHUNK_PROTOCOL, + FD_SP1_CHUNK_PROTOCOL, + }, io::{force_to_read, try_to_read}, proof::BundleProof, types::BundleProvingTask, - BatchProof, BatchProvingTask, ChunkProof, + BatchProof, BatchProvingTask, ChunkKind, ChunkProof, }; #[derive(Debug)] pub struct Prover<'params> { // Make it public for testing with inner functions (unnecessary for FFI). pub prover_impl: common::Prover<'params>, - pub chunk_protocol: Vec<u8>, + pub halo2_protocol: Vec<u8>, + pub sp1_protocol: Vec<u8>, raw_vk_batch: Option<Vec<u8>>, raw_vk_bundle: Option<Vec<u8>>, } @@ -35,7 +39,8 @@ impl<'params> Prover<'params> { env::set_var("KECCAK_ROWS", BATCH_KECCAK_ROW.to_string()); let prover_impl = common::Prover::from_params_map(params_map); - let chunk_protocol = force_to_read(assets_dir, &CHUNK_PROTOCOL_FILENAME); + let halo2_protocol = force_to_read(assets_dir, &FD_HALO2_CHUNK_PROTOCOL); + let sp1_protocol = force_to_read(assets_dir, &FD_SP1_CHUNK_PROTOCOL); let raw_vk_batch = try_to_read(assets_dir, &BATCH_VK_FILENAME); let raw_vk_bundle = try_to_read(assets_dir, &BUNDLE_VK_FILENAME); @@ -56,7 +61,8 @@ impl<'params> Prover<'params> { Self { prover_impl, - chunk_protocol, + halo2_protocol, + sp1_protocol, raw_vk_batch, raw_vk_bundle, } @@ -65,12 +71,16 @@ impl<'params> Prover<'params> { // Return true if chunk proofs are valid (same protocol), false otherwise. pub fn check_protocol_of_chunks(&self, chunk_proofs: &[ChunkProof]) -> bool { chunk_proofs.iter().enumerate().all(|(i, proof)| { - let result = proof.protocol == self.chunk_protocol; + let protocol_expected = match proof.chunk_kind { + ChunkKind::Halo2 => &self.halo2_protocol, + ChunkKind::Sp1 => &self.sp1_protocol, + }; + let result = &proof.protocol == protocol_expected; if !result { log::error!( "Non-match protocol of chunk-proof index-{}: expected = {:x}, actual = {:x}", i, - Sha256::digest(&self.chunk_protocol), + Sha256::digest(protocol_expected), Sha256::digest(&proof.protocol), ); } diff --git a/prover/src/consts.rs b/prover/src/consts.rs index 978594092d..a919f86cdf 100644 --- a/prover/src/consts.rs +++ b/prover/src/consts.rs @@ -16,6 +16,11 @@ pub fn chunk_vk_filename() -> String { pub static CHUNK_PROTOCOL_FILENAME: LazyLock<String> = LazyLock::new(|| read_env_var("CHUNK_PROTOCOL_FILENAME", "chunk.protocol".to_string())); +pub static FD_HALO2_CHUNK_PROTOCOL: LazyLock<String> = + LazyLock::new(|| read_env_var("HALO2_CHUNK_PROTOCOL", "chunk_halo2.protocol".to_string())); +pub static FD_SP1_CHUNK_PROTOCOL: LazyLock<String> = + LazyLock::new(|| read_env_var("SP1_CHUNK_PROTOCOL", "chunk_sp1.protocol".to_string())); + pub static CHUNK_VK_FILENAME: LazyLock<String> = LazyLock::new(chunk_vk_filename); pub static BATCH_VK_FILENAME: LazyLock<String> = LazyLock::new(batch_vk_filename); pub static BUNDLE_VK_FILENAME: LazyLock<String> = LazyLock::new(bundle_vk_filename); diff --git a/prover/src/lib.rs b/prover/src/lib.rs index a49b88e8f8..cfcd316b59 100644 --- a/prover/src/lib.rs +++ b/prover/src/lib.rs @@ -26,7 +26,7 @@ pub use common::{ChunkInfo, CompressionCircuit}; pub use eth_types; pub use eth_types::l2_types::BlockTrace; pub use evm::deploy_and_call; -pub use proof::{BatchProof, BundleProof, ChunkProof, EvmProof, Proof}; +pub use proof::{BatchProof, BundleProof, ChunkKind, ChunkProof, EvmProof, Proof}; pub use snark_verifier_sdk::{CircuitExt, Snark}; pub use types::{BatchProvingTask, BundleProvingTask, ChunkProvingTask, WitnessBlock}; pub use zkevm_circuits; diff --git a/prover/src/proof.rs b/prover/src/proof.rs index 4d5d8f81f9..74e640468c 100644 --- a/prover/src/proof.rs +++ b/prover/src/proof.rs @@ -19,7 +19,7 @@ mod evm; pub use batch::BatchProof; pub use bundle::BundleProof; -pub use chunk::{compare_chunk_info, ChunkProof}; +pub use chunk::{compare_chunk_info, ChunkKind, ChunkProof}; pub use evm::EvmProof; #[derive(Clone, Debug, Default, Deserialize, Serialize)] diff --git a/prover/src/proof/chunk.rs b/prover/src/proof/chunk.rs index 8d760725d7..4aefdb4f58 100644 --- a/prover/src/proof/chunk.rs +++ b/prover/src/proof/chunk.rs @@ -7,6 +7,21 @@ use serde_derive::{Deserialize, Serialize}; use snark_verifier::Protocol; use snark_verifier_sdk::Snark; +/// The innermost SNARK belongs to the following variants. +#[derive(Clone, Debug, Deserialize, Serialize)] +pub enum ChunkKind { + /// halo2-based SuperCircuit. + Halo2, + /// sp1-based STARK with a halo2-backend. + Sp1, +} + +impl Default for ChunkKind { + fn default() -> Self { + Self::Halo2 + } +} + #[derive(Clone, Debug, Default, Deserialize, Serialize)] pub struct ChunkProof { #[serde(with = "base64")] @@ -14,6 +29,7 @@ pub struct ChunkProof { #[serde(flatten)] pub proof: Proof, pub chunk_info: ChunkInfo, + pub chunk_kind: ChunkKind, #[serde(default)] pub row_usages: Vec<SubCircuitRowUsage>, } @@ -56,6 +72,7 @@ impl ChunkProof { snark: Snark, pk: Option<&ProvingKey<G1Affine>>, chunk_info: ChunkInfo, + chunk_kind: ChunkKind, row_usages: Vec<SubCircuitRowUsage>, ) -> Result<Self> { let protocol = serde_json::to_vec(&snark.protocol)?; @@ -65,6 +82,7 @@ impl ChunkProof { protocol, proof, chunk_info, + chunk_kind, row_usages, }) } @@ -79,7 +97,6 @@ impl ChunkProof { // Dump vk and protocol. dump_vk(dir, &filename, &self.proof.vk); dump_data(dir, &format!("chunk_{filename}.protocol"), &self.protocol); - dump_as_json(dir, &filename, &self) } diff --git a/prover/src/types.rs b/prover/src/types.rs index bf661f9cc2..891ac2495b 100644 --- a/prover/src/types.rs +++ b/prover/src/types.rs @@ -11,20 +11,22 @@ pub struct BlockTraceJsonRpcResult { } pub use eth_types::base64; -use crate::{BatchProof, ChunkProof}; +use crate::{BatchProof, ChunkKind, ChunkProof}; #[derive(Debug, Clone, Deserialize, Serialize)] pub struct ChunkProvingTask { /// Prover can check `chunk_info` is consistent with block traces pub chunk_info: Option<ChunkInfo>, pub block_traces: Vec<BlockTrace>, + pub chunk_kind: ChunkKind, } impl ChunkProvingTask { - pub fn from(block_traces: Vec<BlockTrace>) -> Self { + pub fn new(block_traces: Vec<BlockTrace>, chunk_kind: ChunkKind) -> Self { Self { block_traces, chunk_info: None, + chunk_kind, } } pub fn is_empty(&self) -> bool { diff --git a/prover/src/zkevm/prover.rs b/prover/src/zkevm/prover.rs index 4d9f71b8b4..40fb805ed9 100644 --- a/prover/src/zkevm/prover.rs +++ b/prover/src/zkevm/prover.rs @@ -106,6 +106,7 @@ impl<'params> Prover<'params> { snark, self.prover_impl.pk(LayerId::Layer2.id()), chunk_info, + chunk.chunk_kind, row_usage, ); From ca06d528c34d37b64c89cdb092cffde07473dc0c Mon Sep 17 00:00:00 2001 From: Rohit Narurkar <rohit.narurkar@proton.me> Date: Thu, 17 Oct 2024 14:12:48 +0100 Subject: [PATCH 02/12] initial work | to be tested --- Cargo.lock | 4 +- aggregator/src/aggregation/circuit.rs | 127 ++++++++++++-- aggregator/src/constants.rs | 241 ++++++++++++++++++++++++++ 3 files changed, 357 insertions(+), 15 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6155537ce5..7506201fcf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4594,7 +4594,7 @@ checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] name = "snark-verifier" version = "0.1.0" -source = "git+https://github.com/scroll-tech/snark-verifier?branch=develop#572ef69d1595fca82213d3b05e859eaf355a5fa1" +source = "git+https://github.com/scroll-tech/snark-verifier?branch=develop#948671cac73f11e66187a15483e38ab3626dc2a3" dependencies = [ "bytes", "ethereum-types", @@ -4617,7 +4617,7 @@ dependencies = [ [[package]] name = "snark-verifier-sdk" version = "0.0.1" -source = "git+https://github.com/scroll-tech/snark-verifier?branch=develop#572ef69d1595fca82213d3b05e859eaf355a5fa1" +source = "git+https://github.com/scroll-tech/snark-verifier?branch=develop#948671cac73f11e66187a15483e38ab3626dc2a3" dependencies = [ "bincode", "ethereum-types", diff --git a/aggregator/src/aggregation/circuit.rs b/aggregator/src/aggregation/circuit.rs index b9a3cc7c79..3901894645 100644 --- a/aggregator/src/aggregation/circuit.rs +++ b/aggregator/src/aggregation/circuit.rs @@ -1,5 +1,6 @@ use ark_std::{end_timer, start_timer}; use halo2_proofs::{ + arithmetic::Field, circuit::{Layouter, SimpleFloorPlanner, Value}, halo2curves::bn256::{Bn256, Fr, G1Affine}, plonk::{Circuit, ConstraintSystem, Error, Selector}, @@ -8,17 +9,25 @@ use halo2_proofs::{ use itertools::Itertools; use rand::Rng; use snark_verifier::{ - loader::halo2::{ - halo2_ecc::{ - ecc::EccChip, - fields::fp::FpConfig, - halo2_base::{AssignedValue, Context, ContextParams}, + loader::{ + halo2::{ + halo2_ecc::{ + ecc::EccChip, + fields::fp::FpConfig, + halo2_base::{ + gates::GateInstructions, AssignedValue, Context, ContextParams, + QuantumCell::Existing, + }, + }, + Halo2Loader, }, - Halo2Loader, + EcPointLoader, ScalarLoader, }, pcs::kzg::{Bdfg21, Kzg, KzgSuccinctVerifyingKey}, }; -use snark_verifier_sdk::{aggregate, flatten_accumulator, CircuitExt, Snark, SnarkWitness}; +use snark_verifier_sdk::{ + aggregate_as_witness, flatten_accumulator, CircuitExt, Snark, SnarkWitness, +}; use std::{env, fs::File, rc::Rc}; use zkevm_circuits::util::Challenges; @@ -26,7 +35,10 @@ use crate::{ aggregation::{decoder::WORKED_EXAMPLE, witgen::process, BatchCircuitConfig, BatchData}, batch::BatchHash, blob_consistency::BlobConsistencyConfig, - constants::{ACC_LEN, DIGEST_LEN}, + constants::{ + ACC_LEN, DIGEST_LEN, PREPROCESSED_POLYS_HALO2, PREPROCESSED_POLYS_SP1, + TRANSCRIPT_INIT_STATE_HALO2, TRANSCRIPT_INIT_STATE_SP1, + }, core::{assign_batch_hashes, extract_proof_and_instances_with_pairing_check}, util::parse_hash_digest_cells, witgen::{zstd_encode, MultiBlockProcessResult}, @@ -216,7 +228,12 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> { // - new accumulator // log::debug!("aggregation: chunk aggregation"); - let (assigned_aggregation_instances, acc) = aggregate::<Kzg<Bn256, Bdfg21>>( + let ( + assigned_aggregation_instances, + acc, + preprocessed_poly_sets, + transcript_init_states, + ) = aggregate_as_witness::<Kzg<Bn256, Bdfg21>>( &self.svk, &loader, &self.snarks_with_padding, @@ -226,6 +243,93 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> { log::trace!("{}-th instance: {:?}", i, e.value) } + // We must ensure that the commitments to preprocessed polynomial and initial + // state of transcripts for every SNARK that is being aggregated belongs to the + // fixed set of values expected. + // + // Commitments to the preprocessed polynomials. + let mut ctx = loader.ctx_mut(); + let mut preprocessed_polys_halo2 = Vec::with_capacity(7); + let mut preprocessed_polys_sp1 = Vec::with_capacity(7); + for preprocessed_poly in PREPROCESSED_POLYS_HALO2.iter() { + preprocessed_polys_halo2.push( + loader + .ec_point_load_const(preprocessed_poly) + .into_assigned(), + ); + } + for preprocessed_poly in PREPROCESSED_POLYS_SP1.iter() { + preprocessed_polys_sp1.push( + loader + .ec_point_load_const(preprocessed_poly) + .into_assigned(), + ); + } + for preprocessed_polys in preprocessed_poly_sets.iter() { + let mut preprocessed_check_1 = + config.flex_gate().load_constant(&mut ctx, Fr::ONE); + let mut preprocessed_check_2 = + config.flex_gate().load_constant(&mut ctx, Fr::ONE); + for ((commitment, comm_halo2), comm_sp1) in preprocessed_polys + .iter() + .zip_eq(preprocessed_polys_halo2.iter()) + .zip_eq(preprocessed_polys_sp1.iter()) + { + let check_1 = + config.ecc_chip().is_equal(&mut ctx, commitment, comm_halo2); + let check_2 = + config.ecc_chip().is_equal(&mut ctx, commitment, comm_sp1); + preprocessed_check_1 = config.flex_gate().and( + &mut ctx, + Existing(preprocessed_check_1), + Existing(check_1), + ); + preprocessed_check_2 = config.flex_gate().and( + &mut ctx, + Existing(preprocessed_check_2), + Existing(check_2), + ); + } + let preprocessed_check = config.flex_gate().or( + &mut ctx, + Existing(preprocessed_check_1), + Existing(preprocessed_check_2), + ); + config + .flex_gate() + .assert_is_const(&mut ctx, &preprocessed_check, Fr::ONE); + } + + // Transcript initial state. + let transcript_init_state_halo2 = loader + .load_const(&TRANSCRIPT_INIT_STATE_HALO2) + .into_assigned(); + let transcript_init_state_sp1 = loader + .load_const(&TRANSCRIPT_INIT_STATE_SP1) + .into_assigned(); + for transcript_init_state in transcript_init_states { + let transcript_init_state = transcript_init_state + .expect("SNARK should have an initial state for transcript"); + let transcript_check_1 = config.flex_gate().is_equal( + &mut ctx, + Existing(transcript_init_state), + Existing(transcript_init_state_halo2), + ); + let transcript_check_2 = config.flex_gate().is_equal( + &mut ctx, + Existing(transcript_init_state), + Existing(transcript_init_state_sp1), + ); + let transcript_check = config.flex_gate().or( + &mut ctx, + Existing(transcript_check_1), + Existing(transcript_check_2), + ); + config + .flex_gate() + .assert_is_const(&mut ctx, &transcript_check, Fr::ONE); + } + // extract the following cells for later constraints // - the accumulators // - the public inputs from each snark @@ -239,11 +343,8 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> { .flat_map(|instance_column| instance_column.iter().skip(ACC_LEN)), ); - loader - .ctx_mut() - .print_stats(&["snark aggregation [chunks -> batch]"]); + ctx.print_stats(&["snark aggregation [chunks -> batch]"]); - let mut ctx = Rc::into_inner(loader).unwrap().into_ctx(); log::debug!("batching: assigning barycentric"); let barycentric = config.blob_consistency_config.assign_barycentric( &mut ctx, diff --git a/aggregator/src/constants.rs b/aggregator/src/constants.rs index 7d5d633172..3fa61b9c63 100644 --- a/aggregator/src/constants.rs +++ b/aggregator/src/constants.rs @@ -1,3 +1,6 @@ +use halo2_proofs::halo2curves::bn256::{Fq, Fr, G1Affine}; +use std::sync::LazyLock; + // A chain_id is u64 and uses 8 bytes pub(crate) const CHAIN_ID_LEN: usize = 8; @@ -88,3 +91,241 @@ pub const MAX_AGG_SNARKS: usize = 45; // Number of bytes in a u256. pub const N_BYTES_U256: usize = 32; + +/// The [`Batch Circuit`] supports aggregation of up to [`MAX_AGG_SNARKS`] SNARKs, where either +/// SNARK is of 2 kinds, namely: +/// +/// 1. halo2-based [`SuperCircuit`] -> [`CompressionCircuit`] (wide) -> `CompressionCircuit` (thin) +/// 2. sp1-based STARK -> halo2-based backend -> `CompressionCircuit` (thin) +/// +/// For each SNARK witness provided for aggregation, we require that the commitments to the +/// preprocessed polynomials and the transcript's initial state belong to a fixed set, one +/// belonging to each of the above SNARK kinds. +/// +/// Represents the fixed commitments to the preprocessed polynomials for [`ChunkKind::Halo2`]. +pub static PREPROCESSED_POLYS_HALO2: LazyLock<Vec<G1Affine>> = LazyLock::new(|| { + vec![ + G1Affine { + x: Fq::from_raw([ + 4541478842587617678, + 7188475718571567728, + 239378696823010373, + 179342154257362491, + ]), + y: Fq::from_raw([ + 2102960765482384605, + 18163083796572731063, + 17943480866217266774, + 85103875006328896, + ]), + }, + G1Affine { + x: Fq::from_raw([ + 4093061539863783111, + 194291308596025748, + 11369022891089479442, + 1463255879024205618, + ]), + y: Fq::from_raw([ + 16700532425791245072, + 7378851796565816368, + 17346566642486298786, + 970075911594951367, + ]), + }, + G1Affine { + x: Fq::from_raw([ + 6315321914675870134, + 1582860689439567350, + 15739400164232855740, + 1223439486676386684, + ]), + y: Fq::from_raw([ + 13096458462745381806, + 11924041770036958177, + 12977682459629830027, + 1912305792904139855, + ]), + }, + G1Affine { + x: Fq::from_raw([ + 408389462232057354, + 10888945426883186814, + 9738219244958428216, + 3343776552242400005, + ]), + y: Fq::from_raw([ + 2204271371398632469, + 3229396059398198493, + 15594587291868236687, + 1533897200726072018, + ]), + }, + G1Affine { + x: Fq::from_raw([ + 14778744839025706557, + 7305439111399726684, + 14617960481571289161, + 2468165792866445337, + ]), + y: Fq::from_raw([ + 15298503060320124348, + 16948478742631860463, + 10983004142833888255, + 70418435200471011, + ]), + }, + G1Affine { + x: Fq::from_raw([ + 10682202061899776328, + 12746133157404224107, + 10194303803070492548, + 3314924930376820519, + ]), + y: Fq::from_raw([ + 10891118471780302094, + 7166241992404117528, + 6263062724619736264, + 340188705380829494, + ]), + }, + G1Affine { + x: Fq::from_raw([ + 9240035288364311447, + 16941312289372401027, + 15915874119483357666, + 2647144763697367565, + ]), + y: Fq::from_raw([ + 11086173928117658245, + 3518116464318723439, + 13832518766777794466, + 2351978436917361063, + ]), + }, + ] +}); + +/// Represents the fixed commitments to the preprocessed polynomials for [`ChunkKind::Sp1`]. +pub static PREPROCESSED_POLYS_SP1: LazyLock<Vec<G1Affine>> = LazyLock::new(|| { + vec![ + G1Affine { + x: Fq::from_raw([ + 4541478842587617678, + 7188475718571567728, + 239378696823010373, + 179342154257362491, + ]), + y: Fq::from_raw([ + 2102960765482384605, + 18163083796572731063, + 17943480866217266774, + 85103875006328896, + ]), + }, + G1Affine { + x: Fq::from_raw([ + 14482602916982982999, + 2357100016965177442, + 18431616353722806990, + 1632384859399911320, + ]), + y: Fq::from_raw([ + 9341870623509249436, + 10625117674485803345, + 11602556742997327241, + 588490870283709105, + ]), + }, + G1Affine { + x: Fq::from_raw([ + 1695984461415246698, + 16627531726212442277, + 7436715082446168910, + 1334937499741146447, + ]), + y: Fq::from_raw([ + 10378694966954049300, + 14869436676005235944, + 8183056858201575129, + 2775754316985040075, + ]), + }, + G1Affine { + x: Fq::from_raw([ + 10696015357775661092, + 16365831078551355495, + 6432053641301558040, + 3332063291233986333, + ]), + y: Fq::from_raw([ + 15981342105615776301, + 12342977772828558934, + 12118653449154188133, + 528988368198712851, + ]), + }, + G1Affine { + x: Fq::from_raw([ + 4303830904018986544, + 12892574281015932006, + 12553056811812850723, + 3211210156168296116, + ]), + y: Fq::from_raw([ + 4036545931324298107, + 7599907392816691312, + 15293245440448741876, + 212143551489911410, + ]), + }, + G1Affine { + x: Fq::from_raw([ + 10931155675221794876, + 4312691987032924781, + 9804797475001633245, + 3451890802936893314, + ]), + y: Fq::from_raw([ + 11180962733343570413, + 10484712170183330434, + 14444948151863902680, + 2123487521383807780, + ]), + }, + G1Affine { + x: Fq::from_raw([ + 1814367689437931729, + 8489483461414090990, + 10000388380055359653, + 1286074470617787276, + ]), + y: Fq::from_raw([ + 7726546312100213647, + 1034780786427294399, + 6531068821869198065, + 517274402271116562, + ]), + }, + ] +}); + +/// Represents the initial state of the transcript for [`ChunkKind::Halo2`]. +pub static TRANSCRIPT_INIT_STATE_HALO2: LazyLock<Fr> = LazyLock::new(|| { + Fr::from_raw([ + 3505826241380660566, + 11473746322117040456, + 14075887197298535585, + 1737617936020314372, + ]) +}); + +/// Represents the initial state of the transcript for [`ChunkKind::Sp1`]. +pub static TRANSCRIPT_INIT_STATE_SP1: LazyLock<Fr> = LazyLock::new(|| { + Fr::from_raw([ + 1678899198020618715, + 10231258143962228858, + 12365017456265435574, + 841984517048583699, + ]) +}); From 9c7120fc7449aae2ff275d1350dc715f987d741f Mon Sep 17 00:00:00 2001 From: Rohit Narurkar <rohit.narurkar@proton.me> Date: Thu, 17 Oct 2024 15:56:24 +0100 Subject: [PATCH 03/12] fix: borrow ctx from loader only once --- aggregator/src/aggregation/circuit.rs | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/aggregator/src/aggregation/circuit.rs b/aggregator/src/aggregation/circuit.rs index 3901894645..184215a154 100644 --- a/aggregator/src/aggregation/circuit.rs +++ b/aggregator/src/aggregation/circuit.rs @@ -247,8 +247,8 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> { // state of transcripts for every SNARK that is being aggregated belongs to the // fixed set of values expected. // - // Commitments to the preprocessed polynomials. - let mut ctx = loader.ctx_mut(); + // First we load the constants. + log::info!("populating constants"); let mut preprocessed_polys_halo2 = Vec::with_capacity(7); let mut preprocessed_polys_sp1 = Vec::with_capacity(7); for preprocessed_poly in PREPROCESSED_POLYS_HALO2.iter() { @@ -265,6 +265,16 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> { .into_assigned(), ); } + let transcript_init_state_halo2 = loader + .load_const(&TRANSCRIPT_INIT_STATE_HALO2) + .into_assigned(); + let transcript_init_state_sp1 = loader + .load_const(&TRANSCRIPT_INIT_STATE_SP1) + .into_assigned(); + log::info!("populating constants OK"); + + // Commitments to the preprocessed polynomials. + let mut ctx = Rc::into_inner(loader).unwrap().into_ctx(); for preprocessed_polys in preprocessed_poly_sets.iter() { let mut preprocessed_check_1 = config.flex_gate().load_constant(&mut ctx, Fr::ONE); @@ -301,12 +311,6 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> { } // Transcript initial state. - let transcript_init_state_halo2 = loader - .load_const(&TRANSCRIPT_INIT_STATE_HALO2) - .into_assigned(); - let transcript_init_state_sp1 = loader - .load_const(&TRANSCRIPT_INIT_STATE_SP1) - .into_assigned(); for transcript_init_state in transcript_init_states { let transcript_init_state = transcript_init_state .expect("SNARK should have an initial state for transcript"); From 1dc2f9e8e06f723d0d430ca2ebea3f4d51eaf12b Mon Sep 17 00:00:00 2001 From: Rohit Narurkar <rohit.narurkar@proton.me> Date: Thu, 17 Oct 2024 17:29:15 +0100 Subject: [PATCH 04/12] more logs --- aggregator/src/aggregation/circuit.rs | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/aggregator/src/aggregation/circuit.rs b/aggregator/src/aggregation/circuit.rs index 184215a154..9a895f5423 100644 --- a/aggregator/src/aggregation/circuit.rs +++ b/aggregator/src/aggregation/circuit.rs @@ -251,23 +251,28 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> { log::info!("populating constants"); let mut preprocessed_polys_halo2 = Vec::with_capacity(7); let mut preprocessed_polys_sp1 = Vec::with_capacity(7); - for preprocessed_poly in PREPROCESSED_POLYS_HALO2.iter() { + for (i, preprocessed_poly) in PREPROCESSED_POLYS_HALO2.iter().enumerate() { + log::debug!("load const {i}"); preprocessed_polys_halo2.push( loader .ec_point_load_const(preprocessed_poly) .into_assigned(), ); + log::debug!("load const {i} OK"); } - for preprocessed_poly in PREPROCESSED_POLYS_SP1.iter() { + for (i, preprocessed_poly) in PREPROCESSED_POLYS_SP1.iter().enumerate() { + log::debug!("load const (sp1) {i}"); preprocessed_polys_sp1.push( loader .ec_point_load_const(preprocessed_poly) .into_assigned(), ); + log::debug!("load const (sp1) {i} OK"); } let transcript_init_state_halo2 = loader .load_const(&TRANSCRIPT_INIT_STATE_HALO2) .into_assigned(); + log::debug!("load transcript OK"); let transcript_init_state_sp1 = loader .load_const(&TRANSCRIPT_INIT_STATE_SP1) .into_assigned(); From 0dea436c09b02cbe8d8f0d7beb66b5189784575b Mon Sep 17 00:00:00 2001 From: Rohit Narurkar <rohit.narurkar@proton.me> Date: Fri, 18 Oct 2024 11:12:01 +0100 Subject: [PATCH 05/12] read from protocol json files --- aggregator/src/aggregation/circuit.rs | 18 ++++++++------- aggregator/src/constants.rs | 33 +++++++++++++++++++++++++++ 2 files changed, 43 insertions(+), 8 deletions(-) diff --git a/aggregator/src/aggregation/circuit.rs b/aggregator/src/aggregation/circuit.rs index 9a895f5423..e0260efbf1 100644 --- a/aggregator/src/aggregation/circuit.rs +++ b/aggregator/src/aggregation/circuit.rs @@ -35,10 +35,7 @@ use crate::{ aggregation::{decoder::WORKED_EXAMPLE, witgen::process, BatchCircuitConfig, BatchData}, batch::BatchHash, blob_consistency::BlobConsistencyConfig, - constants::{ - ACC_LEN, DIGEST_LEN, PREPROCESSED_POLYS_HALO2, PREPROCESSED_POLYS_SP1, - TRANSCRIPT_INIT_STATE_HALO2, TRANSCRIPT_INIT_STATE_SP1, - }, + constants::{ACC_LEN, DIGEST_LEN, FIXED_PROTOCOL_HALO2, FIXED_PROTOCOL_SP1}, core::{assign_batch_hashes, extract_proof_and_instances_with_pairing_check}, util::parse_hash_digest_cells, witgen::{zstd_encode, MultiBlockProcessResult}, @@ -251,7 +248,12 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> { log::info!("populating constants"); let mut preprocessed_polys_halo2 = Vec::with_capacity(7); let mut preprocessed_polys_sp1 = Vec::with_capacity(7); - for (i, preprocessed_poly) in PREPROCESSED_POLYS_HALO2.iter().enumerate() { + let (fixed_preprocessed_polys_halo2, fixed_transcript_init_state_halo2) = + FIXED_PROTOCOL_HALO2.clone(); + let (fixed_preprocessed_polys_sp1, fixed_transcript_init_state_sp1) = + FIXED_PROTOCOL_SP1.clone(); + for (i, preprocessed_poly) in fixed_preprocessed_polys_halo2.iter().enumerate() + { log::debug!("load const {i}"); preprocessed_polys_halo2.push( loader @@ -260,7 +262,7 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> { ); log::debug!("load const {i} OK"); } - for (i, preprocessed_poly) in PREPROCESSED_POLYS_SP1.iter().enumerate() { + for (i, preprocessed_poly) in fixed_preprocessed_polys_sp1.iter().enumerate() { log::debug!("load const (sp1) {i}"); preprocessed_polys_sp1.push( loader @@ -270,11 +272,11 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> { log::debug!("load const (sp1) {i} OK"); } let transcript_init_state_halo2 = loader - .load_const(&TRANSCRIPT_INIT_STATE_HALO2) + .load_const(&fixed_transcript_init_state_halo2) .into_assigned(); log::debug!("load transcript OK"); let transcript_init_state_sp1 = loader - .load_const(&TRANSCRIPT_INIT_STATE_SP1) + .load_const(&fixed_transcript_init_state_sp1) .into_assigned(); log::info!("populating constants OK"); diff --git a/aggregator/src/constants.rs b/aggregator/src/constants.rs index 3fa61b9c63..b64473421c 100644 --- a/aggregator/src/constants.rs +++ b/aggregator/src/constants.rs @@ -103,6 +103,39 @@ pub const N_BYTES_U256: usize = 32; /// belonging to each of the above SNARK kinds. /// /// Represents the fixed commitments to the preprocessed polynomials for [`ChunkKind::Halo2`]. +pub type PreprocessedPolyCommits = Vec<G1Affine>; +pub type TranscriptInitState = Fr; +pub type FixedProtocol = (PreprocessedPolyCommits, TranscriptInitState); + +pub static FIXED_PROTOCOL_HALO2: LazyLock<FixedProtocol> = LazyLock::new(|| { + let path = + std::env::var("HALO2_CHUNK_PROTOCOL").unwrap_or("chunk_chunk_halo2.protocol".to_string()); + let file = std::fs::File::open(&path).expect("could not open file"); + let reader = std::io::BufReader::new(file); + let protocol: snark_verifier::Protocol<G1Affine> = + serde_json::from_reader(reader).expect("could not deserialise protocol"); + ( + protocol.preprocessed, + protocol + .transcript_initial_state + .expect("transcript initial state is None"), + ) +}); +pub static FIXED_PROTOCOL_SP1: LazyLock<FixedProtocol> = LazyLock::new(|| { + let path = + std::env::var("SP1_CHUNK_PROTOCOL").unwrap_or("chunk_chunk_sp1.protocol".to_string()); + let file = std::fs::File::open(&path).expect("could not open file"); + let reader = std::io::BufReader::new(file); + let protocol: snark_verifier::Protocol<G1Affine> = + serde_json::from_reader(reader).expect("could not deserialise protocol"); + ( + protocol.preprocessed, + protocol + .transcript_initial_state + .expect("transcript initial state is None"), + ) +}); + pub static PREPROCESSED_POLYS_HALO2: LazyLock<Vec<G1Affine>> = LazyLock::new(|| { vec![ G1Affine { From 64e78bfde529f24fb4114e4d70461e75df49a00f Mon Sep 17 00:00:00 2001 From: Rohit Narurkar <rohit.narurkar@proton.me> Date: Fri, 18 Oct 2024 13:47:47 +0100 Subject: [PATCH 06/12] dir + name --- aggregator/src/constants.rs | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/aggregator/src/constants.rs b/aggregator/src/constants.rs index b64473421c..2011313aef 100644 --- a/aggregator/src/constants.rs +++ b/aggregator/src/constants.rs @@ -108,8 +108,11 @@ pub type TranscriptInitState = Fr; pub type FixedProtocol = (PreprocessedPolyCommits, TranscriptInitState); pub static FIXED_PROTOCOL_HALO2: LazyLock<FixedProtocol> = LazyLock::new(|| { - let path = + let name = std::env::var("HALO2_CHUNK_PROTOCOL").unwrap_or("chunk_chunk_halo2.protocol".to_string()); + let dir = + std::env::var("SCROLL_PROVER_ASSETS_DIR").unwrap_or("./tests/test_assets".to_string()); + let path = std::path::Path::new(&dir).join(name); let file = std::fs::File::open(&path).expect("could not open file"); let reader = std::io::BufReader::new(file); let protocol: snark_verifier::Protocol<G1Affine> = @@ -122,8 +125,11 @@ pub static FIXED_PROTOCOL_HALO2: LazyLock<FixedProtocol> = LazyLock::new(|| { ) }); pub static FIXED_PROTOCOL_SP1: LazyLock<FixedProtocol> = LazyLock::new(|| { - let path = + let name = std::env::var("SP1_CHUNK_PROTOCOL").unwrap_or("chunk_chunk_sp1.protocol".to_string()); + let dir = + std::env::var("SCROLL_PROVER_ASSETS_DIR").unwrap_or("./tests/test_assets".to_string()); + let path = std::path::Path::new(&dir).join(name); let file = std::fs::File::open(&path).expect("could not open file"); let reader = std::io::BufReader::new(file); let protocol: snark_verifier::Protocol<G1Affine> = From 934e4622193ea2327cccbd55ab5a94aac4f223cb Mon Sep 17 00:00:00 2001 From: Rohit Narurkar <rohit.narurkar@proton.me> Date: Fri, 18 Oct 2024 14:33:35 +0100 Subject: [PATCH 07/12] reference already duplicated somewhere in --- aggregator/src/aggregation/circuit.rs | 53 ++++++++++++++------------- 1 file changed, 27 insertions(+), 26 deletions(-) diff --git a/aggregator/src/aggregation/circuit.rs b/aggregator/src/aggregation/circuit.rs index e0260efbf1..e052f458bd 100644 --- a/aggregator/src/aggregation/circuit.rs +++ b/aggregator/src/aggregation/circuit.rs @@ -9,19 +9,16 @@ use halo2_proofs::{ use itertools::Itertools; use rand::Rng; use snark_verifier::{ - loader::{ - halo2::{ - halo2_ecc::{ - ecc::EccChip, - fields::fp::FpConfig, - halo2_base::{ - gates::GateInstructions, AssignedValue, Context, ContextParams, - QuantumCell::Existing, - }, + loader::halo2::{ + halo2_ecc::{ + ecc::EccChip, + fields::{fp::FpConfig, FieldChip}, + halo2_base::{ + gates::GateInstructions, utils::fe_to_biguint, AssignedValue, Context, + ContextParams, QuantumCell::Existing, }, - Halo2Loader, }, - EcPointLoader, ScalarLoader, + Halo2Loader, }, pcs::kzg::{Bdfg21, Kzg, KzgSuccinctVerifyingKey}, }; @@ -236,6 +233,7 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> { &self.snarks_with_padding, self.as_proof(), ); + let mut ctx = Rc::into_inner(loader).unwrap().into_ctx(); for (i, e) in assigned_aggregation_instances[0].iter().enumerate() { log::trace!("{}-th instance: {:?}", i, e.value) } @@ -252,36 +250,37 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> { FIXED_PROTOCOL_HALO2.clone(); let (fixed_preprocessed_polys_sp1, fixed_transcript_init_state_sp1) = FIXED_PROTOCOL_SP1.clone(); - for (i, preprocessed_poly) in fixed_preprocessed_polys_halo2.iter().enumerate() + for (i, &preprocessed_poly) in fixed_preprocessed_polys_halo2.iter().enumerate() { log::debug!("load const {i}"); preprocessed_polys_halo2.push( - loader - .ec_point_load_const(preprocessed_poly) - .into_assigned(), + config + .ecc_chip() + .assign_constant_point(&mut ctx, preprocessed_poly), ); log::debug!("load const {i} OK"); } - for (i, preprocessed_poly) in fixed_preprocessed_polys_sp1.iter().enumerate() { + for (i, &preprocessed_poly) in fixed_preprocessed_polys_sp1.iter().enumerate() { log::debug!("load const (sp1) {i}"); preprocessed_polys_sp1.push( - loader - .ec_point_load_const(preprocessed_poly) - .into_assigned(), + config + .ecc_chip() + .assign_constant_point(&mut ctx, preprocessed_poly), ); log::debug!("load const (sp1) {i} OK"); } - let transcript_init_state_halo2 = loader - .load_const(&fixed_transcript_init_state_halo2) - .into_assigned(); + let transcript_init_state_halo2 = config + .ecc_chip() + .field_chip() + .load_constant(&mut ctx, fe_to_biguint(&fixed_transcript_init_state_halo2)); log::debug!("load transcript OK"); - let transcript_init_state_sp1 = loader - .load_const(&fixed_transcript_init_state_sp1) - .into_assigned(); + let transcript_init_state_sp1 = config + .ecc_chip() + .field_chip() + .load_constant(&mut ctx, fe_to_biguint(&fixed_transcript_init_state_sp1)); log::info!("populating constants OK"); // Commitments to the preprocessed polynomials. - let mut ctx = Rc::into_inner(loader).unwrap().into_ctx(); for preprocessed_polys in preprocessed_poly_sets.iter() { let mut preprocessed_check_1 = config.flex_gate().load_constant(&mut ctx, Fr::ONE); @@ -318,6 +317,7 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> { } // Transcript initial state. + /* for transcript_init_state in transcript_init_states { let transcript_init_state = transcript_init_state .expect("SNARK should have an initial state for transcript"); @@ -340,6 +340,7 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> { .flex_gate() .assert_is_const(&mut ctx, &transcript_check, Fr::ONE); } + */ // extract the following cells for later constraints // - the accumulators From aa66d11e8a8586a40fbd98edafe1736b89354eac Mon Sep 17 00:00:00 2001 From: Rohit Narurkar <rohit.narurkar@proton.me> Date: Fri, 18 Oct 2024 15:04:16 +0100 Subject: [PATCH 08/12] minor updates, bump snark-verifier --- aggregator/src/aggregation/circuit.rs | 19 +- aggregator/src/constants.rs | 247 ++------------------------ 2 files changed, 26 insertions(+), 240 deletions(-) diff --git a/aggregator/src/aggregation/circuit.rs b/aggregator/src/aggregation/circuit.rs index e052f458bd..1290310891 100644 --- a/aggregator/src/aggregation/circuit.rs +++ b/aggregator/src/aggregation/circuit.rs @@ -14,11 +14,12 @@ use snark_verifier::{ ecc::EccChip, fields::{fp::FpConfig, FieldChip}, halo2_base::{ - gates::GateInstructions, utils::fe_to_biguint, AssignedValue, Context, - ContextParams, QuantumCell::Existing, + gates::{GateInstructions, RangeInstructions}, + AssignedValue, Context, ContextParams, + QuantumCell::Existing, }, }, - Halo2Loader, + Halo2Loader, IntegerInstructions, }, pcs::kzg::{Bdfg21, Kzg, KzgSuccinctVerifyingKey}, }; @@ -272,12 +273,18 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> { let transcript_init_state_halo2 = config .ecc_chip() .field_chip() - .load_constant(&mut ctx, fe_to_biguint(&fixed_transcript_init_state_halo2)); + .range() + .gate() + .assign_constant(&mut ctx, fixed_transcript_init_state_halo2) + .expect("IntegerInstructions::assign_constant infallible"); log::debug!("load transcript OK"); let transcript_init_state_sp1 = config .ecc_chip() .field_chip() - .load_constant(&mut ctx, fe_to_biguint(&fixed_transcript_init_state_sp1)); + .range() + .gate() + .assign_constant(&mut ctx, fixed_transcript_init_state_sp1) + .expect("IntegerInstructions::assign_constant infallible"); log::info!("populating constants OK"); // Commitments to the preprocessed polynomials. @@ -317,7 +324,6 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> { } // Transcript initial state. - /* for transcript_init_state in transcript_init_states { let transcript_init_state = transcript_init_state .expect("SNARK should have an initial state for transcript"); @@ -340,7 +346,6 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> { .flex_gate() .assert_is_const(&mut ctx, &transcript_check, Fr::ONE); } - */ // extract the following cells for later constraints // - the accumulators diff --git a/aggregator/src/constants.rs b/aggregator/src/constants.rs index 2011313aef..9637ec0226 100644 --- a/aggregator/src/constants.rs +++ b/aggregator/src/constants.rs @@ -1,4 +1,4 @@ -use halo2_proofs::halo2curves::bn256::{Fq, Fr, G1Affine}; +use halo2_proofs::halo2curves::bn256::{Fr, G1Affine}; use std::sync::LazyLock; // A chain_id is u64 and uses 8 bytes @@ -92,6 +92,14 @@ pub const MAX_AGG_SNARKS: usize = 45; // Number of bytes in a u256. pub const N_BYTES_U256: usize = 32; +/// Alias for a list of G1 points. +type PreprocessedPolyCommits = Vec<G1Affine>; +/// Alias for the transcript's initial state. +type TranscriptInitState = Fr; +/// Alias for the fixed part of the protocol which consists of the commitments to the preprocessed +/// polynomials and the initial state of the transcript. +type FixedProtocol = (PreprocessedPolyCommits, TranscriptInitState); + /// The [`Batch Circuit`] supports aggregation of up to [`MAX_AGG_SNARKS`] SNARKs, where either /// SNARK is of 2 kinds, namely: /// @@ -102,11 +110,8 @@ pub const N_BYTES_U256: usize = 32; /// preprocessed polynomials and the transcript's initial state belong to a fixed set, one /// belonging to each of the above SNARK kinds. /// -/// Represents the fixed commitments to the preprocessed polynomials for [`ChunkKind::Halo2`]. -pub type PreprocessedPolyCommits = Vec<G1Affine>; -pub type TranscriptInitState = Fr; -pub type FixedProtocol = (PreprocessedPolyCommits, TranscriptInitState); - +/// Represents the fixed commitments to the preprocessed polynomials and the initial state of the +/// transcript for [`ChunkKind::Halo2`]. pub static FIXED_PROTOCOL_HALO2: LazyLock<FixedProtocol> = LazyLock::new(|| { let name = std::env::var("HALO2_CHUNK_PROTOCOL").unwrap_or("chunk_chunk_halo2.protocol".to_string()); @@ -124,6 +129,9 @@ pub static FIXED_PROTOCOL_HALO2: LazyLock<FixedProtocol> = LazyLock::new(|| { .expect("transcript initial state is None"), ) }); + +/// Represents the fixed commitments to the preprocessed polynomials and the initial state of the +/// transcript for [`ChunkKind::Sp1`]. pub static FIXED_PROTOCOL_SP1: LazyLock<FixedProtocol> = LazyLock::new(|| { let name = std::env::var("SP1_CHUNK_PROTOCOL").unwrap_or("chunk_chunk_sp1.protocol".to_string()); @@ -141,230 +149,3 @@ pub static FIXED_PROTOCOL_SP1: LazyLock<FixedProtocol> = LazyLock::new(|| { .expect("transcript initial state is None"), ) }); - -pub static PREPROCESSED_POLYS_HALO2: LazyLock<Vec<G1Affine>> = LazyLock::new(|| { - vec![ - G1Affine { - x: Fq::from_raw([ - 4541478842587617678, - 7188475718571567728, - 239378696823010373, - 179342154257362491, - ]), - y: Fq::from_raw([ - 2102960765482384605, - 18163083796572731063, - 17943480866217266774, - 85103875006328896, - ]), - }, - G1Affine { - x: Fq::from_raw([ - 4093061539863783111, - 194291308596025748, - 11369022891089479442, - 1463255879024205618, - ]), - y: Fq::from_raw([ - 16700532425791245072, - 7378851796565816368, - 17346566642486298786, - 970075911594951367, - ]), - }, - G1Affine { - x: Fq::from_raw([ - 6315321914675870134, - 1582860689439567350, - 15739400164232855740, - 1223439486676386684, - ]), - y: Fq::from_raw([ - 13096458462745381806, - 11924041770036958177, - 12977682459629830027, - 1912305792904139855, - ]), - }, - G1Affine { - x: Fq::from_raw([ - 408389462232057354, - 10888945426883186814, - 9738219244958428216, - 3343776552242400005, - ]), - y: Fq::from_raw([ - 2204271371398632469, - 3229396059398198493, - 15594587291868236687, - 1533897200726072018, - ]), - }, - G1Affine { - x: Fq::from_raw([ - 14778744839025706557, - 7305439111399726684, - 14617960481571289161, - 2468165792866445337, - ]), - y: Fq::from_raw([ - 15298503060320124348, - 16948478742631860463, - 10983004142833888255, - 70418435200471011, - ]), - }, - G1Affine { - x: Fq::from_raw([ - 10682202061899776328, - 12746133157404224107, - 10194303803070492548, - 3314924930376820519, - ]), - y: Fq::from_raw([ - 10891118471780302094, - 7166241992404117528, - 6263062724619736264, - 340188705380829494, - ]), - }, - G1Affine { - x: Fq::from_raw([ - 9240035288364311447, - 16941312289372401027, - 15915874119483357666, - 2647144763697367565, - ]), - y: Fq::from_raw([ - 11086173928117658245, - 3518116464318723439, - 13832518766777794466, - 2351978436917361063, - ]), - }, - ] -}); - -/// Represents the fixed commitments to the preprocessed polynomials for [`ChunkKind::Sp1`]. -pub static PREPROCESSED_POLYS_SP1: LazyLock<Vec<G1Affine>> = LazyLock::new(|| { - vec![ - G1Affine { - x: Fq::from_raw([ - 4541478842587617678, - 7188475718571567728, - 239378696823010373, - 179342154257362491, - ]), - y: Fq::from_raw([ - 2102960765482384605, - 18163083796572731063, - 17943480866217266774, - 85103875006328896, - ]), - }, - G1Affine { - x: Fq::from_raw([ - 14482602916982982999, - 2357100016965177442, - 18431616353722806990, - 1632384859399911320, - ]), - y: Fq::from_raw([ - 9341870623509249436, - 10625117674485803345, - 11602556742997327241, - 588490870283709105, - ]), - }, - G1Affine { - x: Fq::from_raw([ - 1695984461415246698, - 16627531726212442277, - 7436715082446168910, - 1334937499741146447, - ]), - y: Fq::from_raw([ - 10378694966954049300, - 14869436676005235944, - 8183056858201575129, - 2775754316985040075, - ]), - }, - G1Affine { - x: Fq::from_raw([ - 10696015357775661092, - 16365831078551355495, - 6432053641301558040, - 3332063291233986333, - ]), - y: Fq::from_raw([ - 15981342105615776301, - 12342977772828558934, - 12118653449154188133, - 528988368198712851, - ]), - }, - G1Affine { - x: Fq::from_raw([ - 4303830904018986544, - 12892574281015932006, - 12553056811812850723, - 3211210156168296116, - ]), - y: Fq::from_raw([ - 4036545931324298107, - 7599907392816691312, - 15293245440448741876, - 212143551489911410, - ]), - }, - G1Affine { - x: Fq::from_raw([ - 10931155675221794876, - 4312691987032924781, - 9804797475001633245, - 3451890802936893314, - ]), - y: Fq::from_raw([ - 11180962733343570413, - 10484712170183330434, - 14444948151863902680, - 2123487521383807780, - ]), - }, - G1Affine { - x: Fq::from_raw([ - 1814367689437931729, - 8489483461414090990, - 10000388380055359653, - 1286074470617787276, - ]), - y: Fq::from_raw([ - 7726546312100213647, - 1034780786427294399, - 6531068821869198065, - 517274402271116562, - ]), - }, - ] -}); - -/// Represents the initial state of the transcript for [`ChunkKind::Halo2`]. -pub static TRANSCRIPT_INIT_STATE_HALO2: LazyLock<Fr> = LazyLock::new(|| { - Fr::from_raw([ - 3505826241380660566, - 11473746322117040456, - 14075887197298535585, - 1737617936020314372, - ]) -}); - -/// Represents the initial state of the transcript for [`ChunkKind::Sp1`]. -pub static TRANSCRIPT_INIT_STATE_SP1: LazyLock<Fr> = LazyLock::new(|| { - Fr::from_raw([ - 1678899198020618715, - 10231258143962228858, - 12365017456265435574, - 841984517048583699, - ]) -}); From bcee797a640634c30832f60bc2c6676de2529143 Mon Sep 17 00:00:00 2001 From: Rohit Narurkar <rohit.narurkar@proton.me> Date: Fri, 18 Oct 2024 15:17:14 +0100 Subject: [PATCH 09/12] refactor --- aggregator/src/aggregation/circuit.rs | 35 +++++++++++++++------------ 1 file changed, 20 insertions(+), 15 deletions(-) diff --git a/aggregator/src/aggregation/circuit.rs b/aggregator/src/aggregation/circuit.rs index 1290310891..2bb1e5141e 100644 --- a/aggregator/src/aggregation/circuit.rs +++ b/aggregator/src/aggregation/circuit.rs @@ -234,11 +234,29 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> { &self.snarks_with_padding, self.as_proof(), ); - let mut ctx = Rc::into_inner(loader).unwrap().into_ctx(); + + // extract the following cells for later constraints + // - the accumulators + // - the public inputs from each snark + accumulator_instances.extend(flatten_accumulator(acc).iter().copied()); + // the snark is not a fresh one, assigned_instances already contains an + // accumulator so we want to skip the first 12 elements from the public + // input + snark_inputs.extend( + assigned_aggregation_instances + .iter() + .flat_map(|instance_column| instance_column.iter().skip(ACC_LEN)), + ); for (i, e) in assigned_aggregation_instances[0].iter().enumerate() { log::trace!("{}-th instance: {:?}", i, e.value) } + loader + .ctx_mut() + .print_stats(&["snark aggregation"]); + + let mut ctx = Rc::into_inner(loader).unwrap().into_ctx(); + // We must ensure that the commitments to preprocessed polynomial and initial // state of transcripts for every SNARK that is being aggregated belongs to the // fixed set of values expected. @@ -347,20 +365,7 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> { .assert_is_const(&mut ctx, &transcript_check, Fr::ONE); } - // extract the following cells for later constraints - // - the accumulators - // - the public inputs from each snark - accumulator_instances.extend(flatten_accumulator(acc).iter().copied()); - // the snark is not a fresh one, assigned_instances already contains an - // accumulator so we want to skip the first 12 elements from the public - // input - snark_inputs.extend( - assigned_aggregation_instances - .iter() - .flat_map(|instance_column| instance_column.iter().skip(ACC_LEN)), - ); - - ctx.print_stats(&["snark aggregation [chunks -> batch]"]); + ctx.print_stats(&["protocol check"]); log::debug!("batching: assigning barycentric"); let barycentric = config.blob_consistency_config.assign_barycentric( From fbc296d58f861cfe55fa00bb4ad56408e6a98e82 Mon Sep 17 00:00:00 2001 From: Rohit Narurkar <rohit.narurkar@proton.me> Date: Tue, 22 Oct 2024 17:32:17 +0100 Subject: [PATCH 10/12] clean up + refactor --- aggregator/src/aggregation.rs | 40 ++++++++++++++++ aggregator/src/aggregation/circuit.rs | 35 ++++++++------ aggregator/src/constants.rs | 61 ------------------------- aggregator/src/tests/aggregation.rs | 7 +++ prover/src/aggregator/prover.rs | 2 + prover/src/common/prover/aggregation.rs | 32 +++++++++++-- prover/src/consts.rs | 17 +++++-- 7 files changed, 111 insertions(+), 83 deletions(-) diff --git a/aggregator/src/aggregation.rs b/aggregator/src/aggregation.rs index bb067bfb0c..501c2fb591 100644 --- a/aggregator/src/aggregation.rs +++ b/aggregator/src/aggregation.rs @@ -19,3 +19,43 @@ pub(crate) use rlc::{RlcConfig, POWS_OF_256}; pub use circuit::BatchCircuit; pub use config::BatchCircuitConfig; +use halo2_base::halo2_proofs::halo2curves::bn256::{Fr, G1Affine}; +use snark_verifier::Protocol; + +/// Alias for a list of G1 points. +pub type PreprocessedPolyCommits = Vec<G1Affine>; +/// Alias for the transcript's initial state. +pub type TranscriptInitState = Fr; + +/// Alias for the fixed part of the protocol which consists of the commitments to the preprocessed +/// polynomials and the initial state of the transcript. +#[derive(Clone)] +pub struct FixedProtocol { + /// The commitments to the preprocessed polynomials. + pub preprocessed: PreprocessedPolyCommits, + /// The initial state of the transcript. + pub init_state: TranscriptInitState, +} + +impl From<Protocol<G1Affine>> for FixedProtocol { + fn from(protocol: Protocol<G1Affine>) -> Self { + Self { + preprocessed: protocol.preprocessed, + init_state: protocol + .transcript_initial_state + .expect("protocol transcript init state None"), + } + } +} + +impl From<&Protocol<G1Affine>> for FixedProtocol { + fn from(protocol: &Protocol<G1Affine>) -> Self { + Self { + preprocessed: protocol.preprocessed.clone(), + init_state: protocol + .transcript_initial_state + .clone() + .expect("protocol transcript init state None"), + } + } +} diff --git a/aggregator/src/aggregation/circuit.rs b/aggregator/src/aggregation/circuit.rs index 2bb1e5141e..4ef74b977d 100644 --- a/aggregator/src/aggregation/circuit.rs +++ b/aggregator/src/aggregation/circuit.rs @@ -33,12 +33,12 @@ use crate::{ aggregation::{decoder::WORKED_EXAMPLE, witgen::process, BatchCircuitConfig, BatchData}, batch::BatchHash, blob_consistency::BlobConsistencyConfig, - constants::{ACC_LEN, DIGEST_LEN, FIXED_PROTOCOL_HALO2, FIXED_PROTOCOL_SP1}, + constants::{ACC_LEN, DIGEST_LEN}, core::{assign_batch_hashes, extract_proof_and_instances_with_pairing_check}, util::parse_hash_digest_cells, witgen::{zstd_encode, MultiBlockProcessResult}, - ConfigParams, LOG_DEGREE, PI_CHAIN_ID, PI_CURRENT_BATCH_HASH, PI_CURRENT_STATE_ROOT, - PI_CURRENT_WITHDRAW_ROOT, PI_PARENT_BATCH_HASH, PI_PARENT_STATE_ROOT, + ConfigParams, FixedProtocol, LOG_DEGREE, PI_CHAIN_ID, PI_CURRENT_BATCH_HASH, + PI_CURRENT_STATE_ROOT, PI_CURRENT_WITHDRAW_ROOT, PI_PARENT_BATCH_HASH, PI_PARENT_STATE_ROOT, }; /// Batch circuit, the chunk aggregation routine below recursion circuit @@ -62,14 +62,21 @@ pub struct BatchCircuit<const N_SNARKS: usize> { // batch hash circuit for which the snarks are generated // the chunks in this batch are also padded already pub batch_hash: BatchHash<N_SNARKS>, + + /// The SNARK protocol from the halo2-based inner circuit route. + pub halo2_protocol: FixedProtocol, + /// The SNARK protocol from the sp1-based inner circuit route. + pub sp1_protocol: FixedProtocol, } impl<const N_SNARKS: usize> BatchCircuit<N_SNARKS> { - pub fn new( + pub fn new<P: Into<FixedProtocol>>( params: &ParamsKZG<Bn256>, snarks_with_padding: &[Snark], rng: impl Rng + Send, batch_hash: BatchHash<N_SNARKS>, + halo2_protocol: P, + sp1_protocol: P, ) -> Result<Self, snark_verifier::Error> { let timer = start_timer!(|| "generate aggregation circuit"); @@ -127,6 +134,8 @@ impl<const N_SNARKS: usize> BatchCircuit<N_SNARKS> { flattened_instances, as_proof: Value::known(as_proof), batch_hash, + halo2_protocol: halo2_protocol.into(), + sp1_protocol: sp1_protocol.into(), }) } @@ -251,9 +260,7 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> { log::trace!("{}-th instance: {:?}", i, e.value) } - loader - .ctx_mut() - .print_stats(&["snark aggregation"]); + loader.ctx_mut().print_stats(&["snark aggregation"]); let mut ctx = Rc::into_inner(loader).unwrap().into_ctx(); @@ -265,11 +272,8 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> { log::info!("populating constants"); let mut preprocessed_polys_halo2 = Vec::with_capacity(7); let mut preprocessed_polys_sp1 = Vec::with_capacity(7); - let (fixed_preprocessed_polys_halo2, fixed_transcript_init_state_halo2) = - FIXED_PROTOCOL_HALO2.clone(); - let (fixed_preprocessed_polys_sp1, fixed_transcript_init_state_sp1) = - FIXED_PROTOCOL_SP1.clone(); - for (i, &preprocessed_poly) in fixed_preprocessed_polys_halo2.iter().enumerate() + for (i, &preprocessed_poly) in + self.halo2_protocol.preprocessed.iter().enumerate() { log::debug!("load const {i}"); preprocessed_polys_halo2.push( @@ -279,7 +283,8 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> { ); log::debug!("load const {i} OK"); } - for (i, &preprocessed_poly) in fixed_preprocessed_polys_sp1.iter().enumerate() { + for (i, &preprocessed_poly) in self.sp1_protocol.preprocessed.iter().enumerate() + { log::debug!("load const (sp1) {i}"); preprocessed_polys_sp1.push( config @@ -293,7 +298,7 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> { .field_chip() .range() .gate() - .assign_constant(&mut ctx, fixed_transcript_init_state_halo2) + .assign_constant(&mut ctx, self.halo2_protocol.init_state) .expect("IntegerInstructions::assign_constant infallible"); log::debug!("load transcript OK"); let transcript_init_state_sp1 = config @@ -301,7 +306,7 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> { .field_chip() .range() .gate() - .assign_constant(&mut ctx, fixed_transcript_init_state_sp1) + .assign_constant(&mut ctx, self.sp1_protocol.init_state) .expect("IntegerInstructions::assign_constant infallible"); log::info!("populating constants OK"); diff --git a/aggregator/src/constants.rs b/aggregator/src/constants.rs index 9637ec0226..7d5d633172 100644 --- a/aggregator/src/constants.rs +++ b/aggregator/src/constants.rs @@ -1,6 +1,3 @@ -use halo2_proofs::halo2curves::bn256::{Fr, G1Affine}; -use std::sync::LazyLock; - // A chain_id is u64 and uses 8 bytes pub(crate) const CHAIN_ID_LEN: usize = 8; @@ -91,61 +88,3 @@ pub const MAX_AGG_SNARKS: usize = 45; // Number of bytes in a u256. pub const N_BYTES_U256: usize = 32; - -/// Alias for a list of G1 points. -type PreprocessedPolyCommits = Vec<G1Affine>; -/// Alias for the transcript's initial state. -type TranscriptInitState = Fr; -/// Alias for the fixed part of the protocol which consists of the commitments to the preprocessed -/// polynomials and the initial state of the transcript. -type FixedProtocol = (PreprocessedPolyCommits, TranscriptInitState); - -/// The [`Batch Circuit`] supports aggregation of up to [`MAX_AGG_SNARKS`] SNARKs, where either -/// SNARK is of 2 kinds, namely: -/// -/// 1. halo2-based [`SuperCircuit`] -> [`CompressionCircuit`] (wide) -> `CompressionCircuit` (thin) -/// 2. sp1-based STARK -> halo2-based backend -> `CompressionCircuit` (thin) -/// -/// For each SNARK witness provided for aggregation, we require that the commitments to the -/// preprocessed polynomials and the transcript's initial state belong to a fixed set, one -/// belonging to each of the above SNARK kinds. -/// -/// Represents the fixed commitments to the preprocessed polynomials and the initial state of the -/// transcript for [`ChunkKind::Halo2`]. -pub static FIXED_PROTOCOL_HALO2: LazyLock<FixedProtocol> = LazyLock::new(|| { - let name = - std::env::var("HALO2_CHUNK_PROTOCOL").unwrap_or("chunk_chunk_halo2.protocol".to_string()); - let dir = - std::env::var("SCROLL_PROVER_ASSETS_DIR").unwrap_or("./tests/test_assets".to_string()); - let path = std::path::Path::new(&dir).join(name); - let file = std::fs::File::open(&path).expect("could not open file"); - let reader = std::io::BufReader::new(file); - let protocol: snark_verifier::Protocol<G1Affine> = - serde_json::from_reader(reader).expect("could not deserialise protocol"); - ( - protocol.preprocessed, - protocol - .transcript_initial_state - .expect("transcript initial state is None"), - ) -}); - -/// Represents the fixed commitments to the preprocessed polynomials and the initial state of the -/// transcript for [`ChunkKind::Sp1`]. -pub static FIXED_PROTOCOL_SP1: LazyLock<FixedProtocol> = LazyLock::new(|| { - let name = - std::env::var("SP1_CHUNK_PROTOCOL").unwrap_or("chunk_chunk_sp1.protocol".to_string()); - let dir = - std::env::var("SCROLL_PROVER_ASSETS_DIR").unwrap_or("./tests/test_assets".to_string()); - let path = std::path::Path::new(&dir).join(name); - let file = std::fs::File::open(&path).expect("could not open file"); - let reader = std::io::BufReader::new(file); - let protocol: snark_verifier::Protocol<G1Affine> = - serde_json::from_reader(reader).expect("could not deserialise protocol"); - ( - protocol.preprocessed, - protocol - .transcript_initial_state - .expect("transcript initial state is None"), - ) -}); diff --git a/aggregator/src/tests/aggregation.rs b/aggregator/src/tests/aggregation.rs index ec374c420b..96c5034464 100644 --- a/aggregator/src/tests/aggregation.rs +++ b/aggregator/src/tests/aggregation.rs @@ -209,6 +209,7 @@ fn build_new_batch_circuit<const N_SNARKS: usize>( }) .collect_vec() }; + let snark_protocol = real_snarks[0].protocol.clone(); // ========================== // padded chunks @@ -225,6 +226,8 @@ fn build_new_batch_circuit<const N_SNARKS: usize>( [real_snarks, padded_snarks].concat().as_ref(), rng, batch_hash, + &snark_protocol, + &snark_protocol, ) .unwrap() } @@ -293,6 +296,8 @@ fn build_batch_circuit_skip_encoding<const N_SNARKS: usize>() -> BatchCircuit<N_ }) .collect_vec() }; + let snark_protocol = real_snarks[0].protocol.clone(); + // ========================== // padded chunks // ========================== @@ -302,6 +307,8 @@ fn build_batch_circuit_skip_encoding<const N_SNARKS: usize>() -> BatchCircuit<N_ [real_snarks, padded_snarks].concat().as_ref(), rng, batch_hash, + &snark_protocol, + &snark_protocol, ) .unwrap() } diff --git a/prover/src/aggregator/prover.rs b/prover/src/aggregator/prover.rs index e648f7dd44..8c4f9356ce 100644 --- a/prover/src/aggregator/prover.rs +++ b/prover/src/aggregator/prover.rs @@ -238,6 +238,8 @@ impl<'params> Prover<'params> { LayerId::Layer3.id(), LayerId::Layer3.degree(), batch_info, + &self.halo2_protocol, + &self.sp1_protocol, &layer2_snarks, output_dir, )?; diff --git a/prover/src/common/prover/aggregation.rs b/prover/src/common/prover/aggregation.rs index 4d4ca2bc1b..cca60d5443 100644 --- a/prover/src/common/prover/aggregation.rs +++ b/prover/src/common/prover/aggregation.rs @@ -6,6 +6,7 @@ use crate::{ }; use aggregator::{BatchCircuit, BatchHash}; use anyhow::{anyhow, Result}; +use halo2_proofs::halo2curves::bn256::G1Affine; use rand::Rng; use snark_verifier_sdk::Snark; use std::env; @@ -17,13 +18,26 @@ impl<'params> Prover<'params> { degree: u32, mut rng: impl Rng + Send, batch_info: BatchHash<N_SNARKS>, + halo2_protocol: &[u8], + sp1_protocol: &[u8], previous_snarks: &[Snark], ) -> Result<Snark> { env::set_var("AGGREGATION_CONFIG", layer_config_path(id)); - let circuit: BatchCircuit<N_SNARKS> = - BatchCircuit::new(self.params(degree), previous_snarks, &mut rng, batch_info) - .map_err(|err| anyhow!("Failed to construct aggregation circuit: {err:?}"))?; + let halo2_protocol = + serde_json::from_slice::<snark_verifier::Protocol<G1Affine>>(halo2_protocol)?; + let sp1_protocol = + serde_json::from_slice::<snark_verifier::Protocol<G1Affine>>(sp1_protocol)?; + + let circuit: BatchCircuit<N_SNARKS> = BatchCircuit::new( + self.params(degree), + previous_snarks, + &mut rng, + batch_info, + halo2_protocol, + sp1_protocol, + ) + .map_err(|err| anyhow!("Failed to construct aggregation circuit: {err:?}"))?; self.gen_snark(id, degree, &mut rng, circuit, "gen_agg_snark") } @@ -34,6 +48,8 @@ impl<'params> Prover<'params> { id: &str, degree: u32, batch_info: BatchHash<N_SNARKS>, + halo2_protocol: &[u8], + sp1_protocol: &[u8], previous_snarks: &[Snark], output_dir: Option<&str>, ) -> Result<Snark> { @@ -48,7 +64,15 @@ impl<'params> Prover<'params> { Some(snark) => Ok(snark), None => { let rng = gen_rng(); - let result = self.gen_agg_snark(id, degree, rng, batch_info, previous_snarks); + let result = self.gen_agg_snark( + id, + degree, + rng, + batch_info, + halo2_protocol, + sp1_protocol, + previous_snarks, + ); if let (Some(_), Ok(snark)) = (output_dir, &result) { write_snark(&file_path, snark); } diff --git a/prover/src/consts.rs b/prover/src/consts.rs index a919f86cdf..19b1800ddb 100644 --- a/prover/src/consts.rs +++ b/prover/src/consts.rs @@ -13,11 +13,22 @@ pub fn chunk_vk_filename() -> String { read_env_var("CHUNK_VK_FILENAME", "vk_chunk.vkey".to_string()) } -pub static CHUNK_PROTOCOL_FILENAME: LazyLock<String> = - LazyLock::new(|| read_env_var("CHUNK_PROTOCOL_FILENAME", "chunk.protocol".to_string())); - +/// The file descriptor for the JSON serialised SNARK [`protocol`][protocol] that +/// defines the [`CompressionCircuit`][compr_circuit] SNARK that uses halo2-based +/// [`SuperCircuit`][super_circuit]. +/// +/// [protocol]: snark_verifier::Protocol +/// [compr_circuit]: aggregator::CompressionCircuit +/// [super_circuit]: zkevm_circuits::super_circuit::SuperCircuit pub static FD_HALO2_CHUNK_PROTOCOL: LazyLock<String> = LazyLock::new(|| read_env_var("HALO2_CHUNK_PROTOCOL", "chunk_halo2.protocol".to_string())); + +/// The file descriptor for the JSON serialised SNARK [`protocol`][protocol] that +/// defines the [`CompressionCircuit`][compr_circuit] SNARK that uses sp1-based +/// STARK that is SNARKified using a halo2-backend. +/// +/// [protocol]: snark_verifier::Protocol +/// [compr_circuit]: aggregator::CompressionCircuit pub static FD_SP1_CHUNK_PROTOCOL: LazyLock<String> = LazyLock::new(|| read_env_var("SP1_CHUNK_PROTOCOL", "chunk_sp1.protocol".to_string())); From 4afe87ef1cb759c82b8f0c4146f8c9a904c84280 Mon Sep 17 00:00:00 2001 From: Rohit Narurkar <rohit.narurkar@proton.me> Date: Tue, 5 Nov 2024 14:06:54 +0000 Subject: [PATCH 11/12] chore: clippy --- aggregator/src/aggregation.rs | 1 - aggregator/src/aggregation/circuit.rs | 17 ++--------------- prover/src/common/prover/aggregation.rs | 2 ++ testool/src/statetest/executor.rs | 2 +- 4 files changed, 5 insertions(+), 17 deletions(-) diff --git a/aggregator/src/aggregation.rs b/aggregator/src/aggregation.rs index 501c2fb591..a616965750 100644 --- a/aggregator/src/aggregation.rs +++ b/aggregator/src/aggregation.rs @@ -54,7 +54,6 @@ impl From<&Protocol<G1Affine>> for FixedProtocol { preprocessed: protocol.preprocessed.clone(), init_state: protocol .transcript_initial_state - .clone() .expect("protocol transcript init state None"), } } diff --git a/aggregator/src/aggregation/circuit.rs b/aggregator/src/aggregation/circuit.rs index 4ef74b977d..064dfe9d88 100644 --- a/aggregator/src/aggregation/circuit.rs +++ b/aggregator/src/aggregation/circuit.rs @@ -225,13 +225,10 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> { let loader: Rc<Halo2Loader<G1Affine, EccChip<Fr, FpConfig<Fr, Fq>>>> = Halo2Loader::new(ecc_chip, ctx); - // // extract the assigned values for // - instances which are the public inputs of each chunk (prefixed with 12 // instances from previous accumulators) // - new accumulator - // - log::debug!("aggregation: chunk aggregation"); let ( assigned_aggregation_instances, acc, @@ -272,26 +269,19 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> { log::info!("populating constants"); let mut preprocessed_polys_halo2 = Vec::with_capacity(7); let mut preprocessed_polys_sp1 = Vec::with_capacity(7); - for (i, &preprocessed_poly) in - self.halo2_protocol.preprocessed.iter().enumerate() - { - log::debug!("load const {i}"); + for &preprocessed_poly in self.halo2_protocol.preprocessed.iter() { preprocessed_polys_halo2.push( config .ecc_chip() .assign_constant_point(&mut ctx, preprocessed_poly), ); - log::debug!("load const {i} OK"); } - for (i, &preprocessed_poly) in self.sp1_protocol.preprocessed.iter().enumerate() - { - log::debug!("load const (sp1) {i}"); + for &preprocessed_poly in self.sp1_protocol.preprocessed.iter() { preprocessed_polys_sp1.push( config .ecc_chip() .assign_constant_point(&mut ctx, preprocessed_poly), ); - log::debug!("load const (sp1) {i} OK"); } let transcript_init_state_halo2 = config .ecc_chip() @@ -300,7 +290,6 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> { .gate() .assign_constant(&mut ctx, self.halo2_protocol.init_state) .expect("IntegerInstructions::assign_constant infallible"); - log::debug!("load transcript OK"); let transcript_init_state_sp1 = config .ecc_chip() .field_chip() @@ -308,7 +297,6 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> { .gate() .assign_constant(&mut ctx, self.sp1_protocol.init_state) .expect("IntegerInstructions::assign_constant infallible"); - log::info!("populating constants OK"); // Commitments to the preprocessed polynomials. for preprocessed_polys in preprocessed_poly_sets.iter() { @@ -372,7 +360,6 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> { ctx.print_stats(&["protocol check"]); - log::debug!("batching: assigning barycentric"); let barycentric = config.blob_consistency_config.assign_barycentric( &mut ctx, &self.batch_hash.blob_bytes, diff --git a/prover/src/common/prover/aggregation.rs b/prover/src/common/prover/aggregation.rs index cca60d5443..d17e838a94 100644 --- a/prover/src/common/prover/aggregation.rs +++ b/prover/src/common/prover/aggregation.rs @@ -12,6 +12,7 @@ use snark_verifier_sdk::Snark; use std::env; impl<'params> Prover<'params> { + #[allow(clippy::too_many_arguments)] pub fn gen_agg_snark<const N_SNARKS: usize>( &mut self, id: &str, @@ -42,6 +43,7 @@ impl<'params> Prover<'params> { self.gen_snark(id, degree, &mut rng, circuit, "gen_agg_snark") } + #[allow(clippy::too_many_arguments)] pub fn load_or_gen_agg_snark<const N_SNARKS: usize>( &mut self, name: &str, diff --git a/testool/src/statetest/executor.rs b/testool/src/statetest/executor.rs index 2dcecc8d4c..34dbbc4485 100644 --- a/testool/src/statetest/executor.rs +++ b/testool/src/statetest/executor.rs @@ -644,7 +644,7 @@ pub fn run_test( eth_types::constants::set_env_coinbase(&st.env.current_coinbase); prover::test::chunk_prove( &test_id, - prover::ChunkProvingTask::from(vec![_scroll_trace]), + prover::ChunkProvingTask::new(vec![_scroll_trace], prover::ChunkKind::Halo2), ); } From d364d76d97b6911a41eaf790fee20fcdaefa978a Mon Sep 17 00:00:00 2001 From: Rohit Narurkar <rohit.narurkar@proton.me> Date: Wed, 6 Nov 2024 13:16:24 +0000 Subject: [PATCH 12/12] test: ignore to dbg later with e2e tests --- aggregator/src/tests/aggregation.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/aggregator/src/tests/aggregation.rs b/aggregator/src/tests/aggregation.rs index 96c5034464..d096a1f612 100644 --- a/aggregator/src/tests/aggregation.rs +++ b/aggregator/src/tests/aggregation.rs @@ -17,6 +17,7 @@ use crate::{ }; #[test] +#[ignore = "dbg: insufficient number of advice columns"] fn batch_circuit_raw() { let k = 21; let circuit: BatchCircuit<MAX_AGG_SNARKS> = build_batch_circuit_skip_encoding(); @@ -26,6 +27,7 @@ fn batch_circuit_raw() { } #[test] +#[ignore = "dbg: insufficient number of advice columns"] fn batch_circuit_encode() { let k = 21; let circuit: BatchCircuit<MAX_AGG_SNARKS> = build_new_batch_circuit(2, k);