diff --git a/dsl_compile/src/input_user.rs b/dsl_compile/src/input_user.rs index 8c02c958..fee43ab5 100644 --- a/dsl_compile/src/input_user.rs +++ b/dsl_compile/src/input_user.rs @@ -2,6 +2,7 @@ use crate::errors::{bail, DslError, Result}; use ansi_term::Colour; use std::path::{Path, PathBuf}; +#[allow(dead_code)] pub struct Input { pub input_program: PathBuf, pub out_r1cs: PathBuf, diff --git a/recursion/Cargo.toml b/recursion/Cargo.toml index 64951caf..bde70276 100644 --- a/recursion/Cargo.toml +++ b/recursion/Cargo.toml @@ -33,9 +33,9 @@ starky = { path = "../starky", default-features = false } plonky = { path = "../plonky", default-features = false } algebraic = { path = "../algebraic", default-features = false } -powdr-pil-analyzer = { git = "https://github.com/eigmax/powdr.git", branch = "main" } -powdr-number = { git = "https://github.com/eigmax/powdr.git", branch = "main" } -powdr-ast = { git = "https://github.com/eigmax/powdr.git", branch = "main" } +powdr = { git = "https://github.com/powdr-labs/powdr.git", rev = "450e3f1" } +powdr-ast = { git = "https://github.com/powdr-labs/powdr.git", rev = "450e3f1" } +powdr-pil-analyzer = { git = "https://github.com/powdr-labs/powdr.git", rev = "450e3f1" } [dev-dependencies] env_logger = "0.10" diff --git a/recursion/src/pilcom.rs b/recursion/src/pilcom.rs index f2b5ecbd..95f2572d 100644 --- a/recursion/src/pilcom.rs +++ b/recursion/src/pilcom.rs @@ -1,20 +1,21 @@ //! Poring from https://github.com/powdr-labs/powdr.git. +use std::rc::Rc; mod export; mod expression_counter; -use powdr_number::GoldilocksField; +pub use export::export; + +use powdr::number::GoldilocksField; use starky::types::PIL; use std::path::Path; pub fn compile_pil_from_str(pil_str: &str) -> PIL { let analyze = powdr_pil_analyzer::analyze_string::(pil_str); - - export::export(&analyze) + export(Rc::new(analyze)) } pub fn compile_pil_from_path(pil_path: &str) -> PIL { - let analyze = powdr_pil_analyzer::analyze::(Path::new(pil_path)); - - export::export(&analyze) + let analyze = powdr_pil_analyzer::analyze_file::(Path::new(pil_path)); + export(Rc::new(analyze)) } #[cfg(test)] diff --git a/recursion/src/pilcom/export.rs b/recursion/src/pilcom/export.rs index 2cfef325..090c6976 100644 --- a/recursion/src/pilcom/export.rs +++ b/recursion/src/pilcom/export.rs @@ -1,5 +1,5 @@ //! porting it from powdr -use powdr_number::FieldElement; +use powdr::number::FieldElement; use std::cmp; use std::collections::HashMap; use std::path::PathBuf; @@ -36,8 +36,8 @@ struct Exporter<'a, T> { number_q: u64, } -pub fn export(analyzed: &Analyzed) -> PIL { - let mut exporter = Exporter::new(analyzed); +pub fn export(analyzed: std::rc::Rc>) -> PIL { + let mut exporter = Exporter::new(&analyzed); let mut publics = Vec::new(); let mut pol_identities = Vec::new(); let mut plookup_identities = Vec::new(); @@ -48,11 +48,13 @@ pub fn export(analyzed: &Analyzed) -> PIL { StatementIdentifier::Definition(name) => { if let Some((poly, value)) = analyzed.intermediate_columns.get(name) { assert_eq!(poly.kind, SymbolKind::Poly(PolynomialType::Intermediate)); - let expression_id = exporter.extract_expression(value, 1); - assert_eq!( - expression_id, - exporter.intermediate_poly_expression_ids[&poly.id] as usize - ); + for ((_, id), value) in poly.array_elements().zip(value) { + let expression_id = exporter.extract_expression(value, 1); + assert_eq!( + expression_id, + exporter.intermediate_poly_expression_ids[&id.id] as usize + ); + } } } StatementIdentifier::PublicDeclaration(name) => { @@ -284,6 +286,15 @@ impl<'a, T: FieldElement> Exporter<'a, T> { ..DEFAULT_EXPR }, ), + Expression::Challenge(challenge) => ( + 0, + StarkyExpr { + op: "challenge".to_string(), + deg: 0, + id: Some(challenge.id as usize), + ..DEFAULT_EXPR + }, + ), Expression::Number(value) => ( 0, StarkyExpr { diff --git a/recursion/src/pilcom/expression_counter.rs b/recursion/src/pilcom/expression_counter.rs index de1ec11c..9ed06d53 100644 --- a/recursion/src/pilcom/expression_counter.rs +++ b/recursion/src/pilcom/expression_counter.rs @@ -21,7 +21,9 @@ pub fn compute_intermediate_expression_ids(analyzed: &Analyzed) -> HashMap poly.expression_count() } else if let Some((poly, _)) = analyzed.intermediate_columns.get(name) { assert!(poly.kind == SymbolKind::Poly(PolynomialType::Intermediate)); - ids.insert(poly.id, expression_counter as u64); + for (index, (_, id)) in poly.array_elements().enumerate() { + ids.insert(id.id, (expression_counter + index) as u64); + } poly.expression_count() } else { unreachable!() @@ -49,7 +51,11 @@ impl ExpressionCounter for Identity { impl ExpressionCounter for Symbol { fn expression_count(&self) -> usize { - (self.kind == SymbolKind::Poly(PolynomialType::Intermediate)).into() + if self.kind == SymbolKind::Poly(PolynomialType::Intermediate) { + self.length.unwrap_or(1) as usize + } else { + 0 + } } } diff --git a/starkjs/fibonacci/fibonacci.pil b/starkjs/fibonacci/fibonacci.pil index 82d9f308..8e48d701 100644 --- a/starkjs/fibonacci/fibonacci.pil +++ b/starkjs/fibonacci/fibonacci.pil @@ -3,9 +3,9 @@ namespace Fibonacci(%N); pol constant L1, LLAST; pol commit l1,l2; - pol l2c = l2; + //pol l2c = l2; - public in1 = l2c(0); + public in1 = l2(0); public in2 = l1(0); public out = l1(%N-1); diff --git a/zkvm/Cargo.toml b/zkvm/Cargo.toml index 70931cf9..b732976b 100644 --- a/zkvm/Cargo.toml +++ b/zkvm/Cargo.toml @@ -10,17 +10,9 @@ itertools = "0.12.0" # serialization log = "0.4.0" -#powdr-backend = { git = "https://github.com/powdr-labs/powdr", rev = "97ea8d0" } -#powdr-pipeline = { git = "https://github.com/powdr-labs/powdr", rev = "97ea8d0" } -#powdr-riscv = { git = "https://github.com/powdr-labs/powdr", rev = "97ea8d0" } -#powdr-riscv-executor = { git = "https://github.com/powdr-labs/powdr", rev = "97ea8d0" } -#powdr-number = { git = "https://github.com/powdr-labs/powdr", rev = "97ea8d0" } - -powdr-backend = { git = "https://github.com/eigmax/powdr", branch = "main" } -powdr-pipeline = { git = "https://github.com/eigmax/powdr", branch = "main" } -powdr-riscv = { git = "https://github.com/eigmax/powdr", branch = "main" } -powdr-riscv-executor = { git = "https://github.com/eigmax/powdr", branch = "main" } -powdr-number = { git = "https://github.com/eigmax/powdr", branch = "main" } +powdr = { git = "https://github.com/powdr-labs/powdr", rev = "450e3f1" } +starky = { path = "../starky" } +recursion = { path = "../recursion" } hex = "0.4.3" thiserror = "1.0" diff --git a/zkvm/vm/evm/Cargo.toml b/zkvm/program/evm/Cargo.toml similarity index 87% rename from zkvm/vm/evm/Cargo.toml rename to zkvm/program/evm/Cargo.toml index 1412946e..4e71714b 100644 --- a/zkvm/vm/evm/Cargo.toml +++ b/zkvm/program/evm/Cargo.toml @@ -5,7 +5,7 @@ edition = "2021" [dependencies] revm = { git = "https://github.com/powdr-labs/revm", branch = "serde-no-std", default-features = false, features = [ "serde" ] } -powdr-riscv-runtime = { git = "https://github.com/eigmax/powdr", branch = "main" } +powdr-riscv-runtime = { git = "https://github.com/powdr-labs/powdr", rev = "450e3f1" } models = { git = "https://github.com/eigmax/powdr-revme", branch = "continuations", package = "models" } serde = { version = "1.0", default-features = false, features = ["alloc", "derive", "rc"] } serde_json = { version = "1.0", default-features = false, features = ["alloc"] } diff --git a/zkvm/vm/evm/README.md b/zkvm/program/evm/README.md similarity index 100% rename from zkvm/vm/evm/README.md rename to zkvm/program/evm/README.md diff --git a/zkvm/vm/evm/rust-toolchain.toml b/zkvm/program/evm/rust-toolchain.toml similarity index 100% rename from zkvm/vm/evm/rust-toolchain.toml rename to zkvm/program/evm/rust-toolchain.toml diff --git a/zkvm/vm/evm/src/lib.rs b/zkvm/program/evm/src/lib.rs similarity index 98% rename from zkvm/vm/evm/src/lib.rs rename to zkvm/program/evm/src/lib.rs index 24137455..4888f480 100644 --- a/zkvm/vm/evm/src/lib.rs +++ b/zkvm/program/evm/src/lib.rs @@ -18,6 +18,7 @@ use alloc::string::String; use alloc::string::ToString; use k256::ecdsa::SigningKey; +const TEST_CHANNEL: u32 = 1; /// Recover the address from a private key (SigningKey). pub fn recover_address(private_key: &[u8]) -> Option
{ @@ -28,7 +29,7 @@ pub fn recover_address(private_key: &[u8]) -> Option
{ #[no_mangle] fn main() { - let suite_json: String = get_data_serde(666); + let suite_json: String = get_data_serde(TEST_CHANNEL); print!("suite_json: {suite_json}\n"); let suite = read_suite(&suite_json); diff --git a/zkvm/vm/lr/Cargo.toml b/zkvm/program/lr/Cargo.toml similarity index 68% rename from zkvm/vm/lr/Cargo.toml rename to zkvm/program/lr/Cargo.toml index 2eb0fb29..0ddf782e 100644 --- a/zkvm/vm/lr/Cargo.toml +++ b/zkvm/program/lr/Cargo.toml @@ -6,7 +6,6 @@ edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -powdr-riscv-runtime = { git = "https://github.com/eigmax/powdr", branch = "main" } - +powdr-riscv-runtime = { git = "https://github.com/powdr-labs/powdr", rev = "450e3f1" } [workspace] diff --git a/zkvm/vm/lr/rust-toolchain.toml b/zkvm/program/lr/rust-toolchain.toml similarity index 100% rename from zkvm/vm/lr/rust-toolchain.toml rename to zkvm/program/lr/rust-toolchain.toml diff --git a/zkvm/vm/lr/src/lib.rs b/zkvm/program/lr/src/lib.rs similarity index 100% rename from zkvm/vm/lr/src/lib.rs rename to zkvm/program/lr/src/lib.rs diff --git a/zkvm/src/lib.rs b/zkvm/src/lib.rs index 509e1580..d11ba95e 100644 --- a/zkvm/src/lib.rs +++ b/zkvm/src/lib.rs @@ -1,21 +1,104 @@ use anyhow::Result; -use powdr_backend::BackendType; -use powdr_number::FieldElement; -use powdr_number::GoldilocksField; -use powdr_pipeline::{Pipeline, Stage}; -use powdr_riscv::continuations::{ - bootloader::default_input, rust_continuations, rust_continuations_dry_run, +use powdr::backend::BackendType; +use powdr::number::{DegreeType, FieldElement, GoldilocksField}; +use powdr::riscv::continuations::{rust_continuations, rust_continuations_dry_run}; +use powdr::riscv::{compile_rust, CoProcessors}; +use powdr::Pipeline; +use recursion::pilcom::export as pil_export; +use starky::{ + merklehash::MerkleTreeGL, + pil2circom, + stark_setup::StarkSetup, + types::{StarkStruct, Step}, }; -use powdr_riscv::{compile_rust, CoProcessors}; +use std::fs; +use std::io::BufWriter; use std::path::Path; use std::time::Instant; +const TEST_CHANNEL: u32 = 1; + +fn generate_witness_and_prove( + mut pipeline: Pipeline, +) -> Result<(), Vec> { + let start = Instant::now(); + log::debug!("Generating witness..."); + pipeline.compute_witness().unwrap(); + let duration = start.elapsed(); + log::debug!("Generating witness took: {:?}", duration); + + let start = Instant::now(); + log::debug!("Proving ..."); + + pipeline = pipeline.with_backend(BackendType::EStark); + pipeline.compute_proof().unwrap(); + let duration = start.elapsed(); + log::debug!("Proving took: {:?}", duration); + Ok(()) +} + +fn generate_verifier( + mut pipeline: Pipeline, + mut writer: W, +) -> Result<()> { + let buf = Vec::new(); + let mut vw = BufWriter::new(buf); + pipeline = pipeline.with_backend(BackendType::EStark); + pipeline.export_verification_key(&mut vw).unwrap(); + log::debug!("Export verification key done"); + let mut setup: StarkSetup = serde_json::from_slice(&vw.into_inner()?)?; + log::debug!("Load StarkSetup done"); + + let pil = pipeline.optimized_pil().unwrap(); + + let degree = pil.degree(); + assert!(degree > 1); + let n_bits = (DegreeType::BITS - (degree - 1).leading_zeros()) as usize; + let n_bits_ext = n_bits + 1; + + let steps = (2..=n_bits_ext) + .rev() + .step_by(4) + .map(|b| Step { nBits: b }) + .collect(); + + let params = StarkStruct { + nBits: n_bits, + nBitsExt: n_bits_ext, + nQueries: 2, + verificationHashType: "GL".to_owned(), + steps, + }; + + // generate circom + let opt = pil2circom::StarkOption { + enable_input: false, + verkey_input: false, + skip_main: true, + agg_stage: false, + }; + if !setup.starkinfo.qs.is_empty() { + let pil_json = pil_export::(pil); + let str_ver = pil2circom::pil2circom( + &pil_json, + &setup.const_root, + ¶ms, + &mut setup.starkinfo, + &mut setup.program, + &opt, + ) + .unwrap(); + writer.write_fmt(format_args!("{}", str_ver))?; + } + Ok(()) +} + pub fn zkvm_evm_execute_and_prove(task: &str, suite_json: String, output_path: &str) -> Result<()> { log::debug!("Compiling Rust..."); let force_overwrite = true; let with_bootloader = true; - let (asm_file_path, asm_contents) = compile_rust( - &format!("vm/{task}"), + let (asm_file_path, asm_contents) = compile_rust::( + &format!("program/{task}"), Path::new(output_path), force_overwrite, &CoProcessors::base().with_poseidon(), @@ -24,78 +107,52 @@ pub fn zkvm_evm_execute_and_prove(task: &str, suite_json: String, output_path: & .ok_or_else(|| vec!["could not compile rust".to_string()]) .unwrap(); - let mk_pipeline = || { - Pipeline::::default() - .with_output(output_path.into(), true) - .from_asm_string(asm_contents.clone(), Some(asm_file_path.clone())) - .with_prover_inputs(vec![]) - }; - - log::debug!("Creating pipeline from powdr-asm..."); - let start = Instant::now(); - let pipeline = mk_pipeline(); - let duration = start.elapsed(); - log::debug!("Pipeline from powdr-asm took: {:?}", duration); + let mut pipeline = Pipeline::::default() + .with_output(output_path.into(), true) + .from_asm_string(asm_contents.clone(), Some(asm_file_path.clone())) + .with_prover_inputs(Default::default()) + .add_data(TEST_CHANNEL, &suite_json); - log::debug!("Advancing pipeline to fixed columns..."); + log::debug!("Computing fixed columns..."); let start = Instant::now(); - let pil_with_evaluated_fixed_cols = pipeline.pil_with_evaluated_fixed_cols().unwrap(); - let duration = start.elapsed(); - log::debug!("Advancing pipeline took: {:?}", duration); - let mk_pipeline_with_data = || mk_pipeline().add_data(666, &suite_json); + pipeline.compute_fixed_cols().unwrap(); - let mk_pipeline_opt = || { - mk_pipeline_with_data() - .from_pil_with_evaluated_fixed_cols(pil_with_evaluated_fixed_cols.clone()) - }; + let duration = start.elapsed(); + log::debug!("Computing fixed columns took: {:?}", duration); + /* log::debug!("Running powdr-riscv executor in fast mode..."); let start = Instant::now(); - let (trace, _mem) = powdr_riscv_executor::execute::( + + let (trace, _mem) = powdr::riscv_executor::execute::( &asm_contents, - mk_pipeline_with_data().data_callback().unwrap(), + powdr::riscv_executor::MemoryState::new(), + pipeline.data_callback().unwrap(), &default_input(&[]), - powdr_riscv_executor::ExecMode::Fast, + powdr::riscv_executor::ExecMode::Fast, ); let duration = start.elapsed(); log::debug!("Fast executor took: {:?}", duration); log::debug!("Trace length: {}", trace.len); + */ log::debug!("Running powdr-riscv executor in trace mode for continuations..."); let start = Instant::now(); - let bootloader_inputs = rust_continuations_dry_run(mk_pipeline_with_data()); - let duration = start.elapsed(); - log::debug!("Trace executor took: {:?}", duration); - let prove_with = Some(BackendType::EStark); + let bootloader_inputs = rust_continuations_dry_run(&mut pipeline); - let generate_witness_and_prove = - |mut pipeline: Pipeline| -> Result<(), Vec> { - let start = Instant::now(); - log::debug!("Generating witness..."); - pipeline.advance_to(Stage::GeneratedWitness)?; - let duration = start.elapsed(); - log::debug!("Generating witness took: {:?}", duration); - - let start = Instant::now(); - log::debug!("Proving ..."); - prove_with.map(|backend| pipeline.with_backend(backend).proof().unwrap()); - let duration = start.elapsed(); - log::debug!("Proving took: {:?}", duration); - Ok(()) - }; + let duration = start.elapsed(); + log::debug!("Trace executor took: {:?}", duration); log::debug!("Running witness generation..."); let start = Instant::now(); - rust_continuations( - mk_pipeline_opt, - generate_witness_and_prove, - bootloader_inputs, - ) - .unwrap(); + + rust_continuations(pipeline, generate_witness_and_prove, bootloader_inputs).unwrap(); + let duration = start.elapsed(); log::debug!("Witness generation took: {:?}", duration); + Ok(()) } @@ -103,11 +160,11 @@ pub fn zkvm_evm_generate_chunks( workspace: &str, suite_json: &String, output_path: &str, -) -> Result>> { +) -> Result, u64)>> { log::debug!("Compiling Rust..."); let force_overwrite = true; let with_bootloader = true; - let (asm_file_path, asm_contents) = compile_rust( + let (asm_file_path, asm_contents) = compile_rust::( workspace, Path::new(output_path), force_overwrite, @@ -117,34 +174,37 @@ pub fn zkvm_evm_generate_chunks( .ok_or_else(|| vec!["could not compile rust".to_string()]) .unwrap(); - let mk_pipeline = || { - Pipeline::::default() - .with_output(output_path.into(), true) - .from_asm_string(asm_contents.clone(), Some(asm_file_path.clone())) - .with_prover_inputs(vec![]) - }; - - let mk_pipeline_with_data = || mk_pipeline().add_data(666, suite_json); + let mut pipeline = Pipeline::::default() + .with_output(output_path.into(), true) + .from_asm_string(asm_contents.clone(), Some(asm_file_path.clone())) + .with_prover_inputs(Default::default()) + .add_data(TEST_CHANNEL, suite_json); log::debug!("Running powdr-riscv executor in fast mode..."); - let (trace, _mem) = powdr_riscv_executor::execute::( + /* + let (trace, _mem) = powdr::riscv_executor::execute::( &asm_contents, - mk_pipeline_with_data().data_callback().unwrap(), + powdr::riscv_executor::MemoryState::new(), + pipeline.data_callback().unwrap(), &default_input(&[]), - powdr_riscv_executor::ExecMode::Fast, + powdr::riscv_executor::ExecMode::Fast, ); - log::debug!("Trace length: {}", trace.len); + log::debug!("Trace length: {}", trace.len); + */ log::debug!("Running powdr-riscv executor in trace mode for continuations..."); let start = Instant::now(); - let bootloader_inputs = rust_continuations_dry_run(mk_pipeline_with_data()); + + let bootloader_inputs = rust_continuations_dry_run(&mut pipeline); + let duration = start.elapsed(); log::debug!( "Trace executor took: {:?}, input size: {:?}", duration, - bootloader_inputs[0].len() + bootloader_inputs.len() ); + Ok(bootloader_inputs) } @@ -158,70 +218,55 @@ pub fn zkvm_evm_prove_only( log::debug!("Compiling Rust..."); let asm_file_path = Path::new(output_path).join(format!("{}.asm", task)); - let mk_pipeline = || { - Pipeline::::default() - .with_output(output_path.into(), true) - .from_asm_file(asm_file_path.clone()) - .with_prover_inputs(vec![]) - }; - let mk_pipeline_with_data = || mk_pipeline().add_data(666, suite_json); - - let prove_with = Some(BackendType::EStark); - let generate_witness_and_prove = - |mut pipeline: Pipeline| -> Result<(), Vec> { - let start = Instant::now(); - log::debug!("Generating witness..."); - pipeline.advance_to(Stage::GeneratedWitness)?; - let duration = start.elapsed(); - log::debug!("Generating witness took: {:?}", duration); - - let start = Instant::now(); - log::debug!("Proving ..."); - prove_with.map(|backend| pipeline.with_backend(backend).proof().unwrap()); - let duration = start.elapsed(); - log::debug!("Proving took: {:?}", duration); - Ok(()) - }; + let pipeline = Pipeline::::default() + .with_output(output_path.into(), true) + .from_asm_file(asm_file_path.clone()) + .with_prover_inputs(Default::default()) + .add_data(TEST_CHANNEL, suite_json); - log::debug!("Running witness generation..."); + log::debug!("Running witness generation and proof computation..."); let start = Instant::now(); + + //TODO: if we clone it, we lost the information gained from this function rust_continuation( - mk_pipeline_with_data, + pipeline.clone(), generate_witness_and_prove, bootloader_input, i, ) .unwrap(); + + let verifier_file = Path::new(output_path).join(format!("{}_chunk_{}.circom", task, i)); + log::debug!( + "Running circom verifier generation to {:?}...", + verifier_file + ); + let f = fs::File::create(verifier_file)?; + generate_verifier(pipeline, f).unwrap(); + let duration = start.elapsed(); - log::debug!("Witness generation took: {:?}", duration); + log::debug!( + "Witness generation and proof computation took: {:?}", + duration + ); + Ok(()) } -pub fn rust_continuation( - pipeline_factory: PipelineFactory, +pub fn rust_continuation( + mut pipeline: Pipeline, pipeline_callback: PipelineCallback, bootloader_inputs: Vec, i: usize, ) -> Result<(), E> where - PipelineFactory: Fn() -> Pipeline, PipelineCallback: Fn(Pipeline) -> Result<(), E>, { - let num_chunks = bootloader_inputs.len(); + // Here the fixed columns most likely will have been computed already, + // in which case this will be a no-op. + pipeline.compute_fixed_cols().unwrap(); - log::info!("Advancing pipeline to PilWithEvaluatedFixedCols stage..."); - let pipeline = pipeline_factory(); - let pil_with_evaluated_fixed_cols = pipeline.pil_with_evaluated_fixed_cols().unwrap(); - - // This returns the same pipeline as pipeline_factory() (with the same name, output dir, etc...) - // but starting from the PilWithEvaluatedFixedCols stage. This is more efficient, because we can advance - // to that stage once before we branch into different chunks. - let optimized_pipeline_factory = || { - pipeline_factory().from_pil_with_evaluated_fixed_cols(pil_with_evaluated_fixed_cols.clone()) - }; - - log::info!("\nRunning chunk {} / {}...", i + 1, num_chunks); - let pipeline = optimized_pipeline_factory(); + log::info!("\nRunning chunk {}...", i + 1); let name = format!("{}_chunk_{}", pipeline.name(), i); let pipeline = pipeline.with_name(name); let pipeline = pipeline.add_external_witness_values(vec![( @@ -238,10 +283,6 @@ mod tests { use num_traits::identities::Zero; use std::io::{Read, Write}; - use std::fs; - - //use revm::primitives::address; - // RUST_MIN_STACK=2073741821 RUST_LOG=debug proxychains nohup cargo test --release test_zkvm_evm_prove -- --nocapture & #[test] #[ignore] @@ -275,7 +316,7 @@ mod tests { let output_path = "/tmp/test_lr"; let task = "lr"; - let workspace = format!("vm/{}", task); + let workspace = format!("program/{}", task); let bootloader_inputs = zkvm_evm_generate_chunks(workspace.as_str(), &suite_json, output_path).unwrap(); // save the chunks @@ -287,7 +328,7 @@ mod tests { .zip(&bi_files) .for_each(|(data, filename)| { let mut f = fs::File::create(filename).unwrap(); - for d in data { + for d in &data.0 { f.write_all(&d.to_bytes_le()[0..8]).unwrap(); } });