diff --git a/.github/workflows/saffron.yml b/.github/workflows/saffron.yml new file mode 100644 index 0000000000..5cf6148aa2 --- /dev/null +++ b/.github/workflows/saffron.yml @@ -0,0 +1,58 @@ +name: Saffron CI + +on: + workflow_dispatch: + pull_request: + push: + branches: + - master + +jobs: + run: + name: Run saffron e2e tests + + runs-on: ["ubuntu-latest"] + + strategy: + matrix: + rust_toolchain_version: ["1.74"] + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Use shared Rust toolchain setting up steps + uses: ./.github/actions/toolchain-shared + with: + rust_toolchain_version: ${{ matrix.rust_toolchain_version }} + + - name: Apply the Rust smart cacheing + uses: Swatinem/rust-cache@v2 + + - name: Cache SRS data + id: cache-srs + uses: actions/cache@v3 + with: + path: ./srs + # The SRS generation never changes, so we don't need a content-based key + key: srs-cache-key + + - name: Generate and Cache SRS + if: steps.cache-srs.outputs.cache-hit != 'true' + run: cargo test -p kimchi heavy_test_srs_serialization --release + + - name: Build the saffron cli binary + run: | + cargo build --release --bin saffron + + - name: Run the saffron e2e encoding tests on small lorem file + run: | + ./saffron/test-encoding.sh saffron/fixtures/lorem.txt ./srs/test_vesta.srs + + # Randomly generate an input file between roughly 50MB and 200MB + - name: Run the saffron e2e encoding on large random file + run: | + (base64 /dev/urandom | head -c $(shuf -i 50000000-200000000 -n 1) | tr -dc "A-Za-z0-9 " | fold -w100 > bigfile.txt) 2>/dev/null + RUST_LOG=debug ./saffron/test-encoding.sh bigfile.txt ./srs/test_vesta.srs \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index 456d4ce2d5..6ba59084a4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -122,6 +122,12 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "anyhow" +version = "1.0.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" + [[package]] name = "ark-algebra-test-templates" version = "0.4.2" @@ -645,7 +651,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.58", ] [[package]] @@ -905,6 +911,16 @@ dependencies = [ "memchr", ] +[[package]] +name = "ctor" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a2785755761f3ddc1492979ce1e48d2c00d09311c39e4466429188f3dd6501" +dependencies = [ + "quote", + "syn 2.0.58", +] + [[package]] name = "cty" version = "0.2.2" @@ -932,7 +948,7 @@ dependencies = [ "proc-macro2", "quote", "strsim 0.10.0", - "syn 2.0.48", + "syn 2.0.58", ] [[package]] @@ -943,7 +959,7 @@ checksum = "1d1545d67a2149e1d93b7e5c7752dce5a7426eb5d1357ddcfd89336b94444f77" dependencies = [ "darling_core", "quote", - "syn 2.0.48", + "syn 2.0.58", ] [[package]] @@ -1455,9 +1471,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.67" +version = "0.3.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a1d36f1235bc969acba30b7f5990b864423a6068a10f7c90ae8f0112e3a59d1" +checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a" dependencies = [ "wasm-bindgen", ] @@ -1652,6 +1668,15 @@ version = "0.4.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + [[package]] name = "memchr" version = "2.7.1" @@ -1903,6 +1928,16 @@ dependencies = [ "winapi 0.3.9", ] +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi 0.3.9", +] + [[package]] name = "num-bigint" version = "0.4.4" @@ -1924,7 +1959,7 @@ checksum = "cfb77679af88f8b125209d354a202862602672222e7f2313fdd6dc349bad4712" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.58", ] [[package]] @@ -2149,6 +2184,12 @@ version = "6.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1" +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + [[package]] name = "owo-colors" version = "3.5.0" @@ -2198,7 +2239,7 @@ dependencies = [ "pest_meta", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.58", ] [[package]] @@ -2212,6 +2253,12 @@ dependencies = [ "sha2", ] +[[package]] +name = "pin-project-lite" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" + [[package]] name = "pkg-config" version = "0.3.29" @@ -2481,10 +2528,19 @@ checksum = "b62dbe01f0b06f9d8dc7d49e05a0785f153b00b2c227856282f671e0318c9b15" dependencies = [ "aho-corasick", "memchr", - "regex-automata", + "regex-automata 0.4.5", "regex-syntax 0.8.2", ] +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", +] + [[package]] name = "regex-automata" version = "0.4.5" @@ -2594,6 +2650,37 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef703b7cb59335eae2eb93ceb664c0eb7ea6bf567079d843e09420219668e072" +[[package]] +name = "saffron" +version = "0.1.0" +dependencies = [ + "anyhow", + "ark-ec", + "ark-ff", + "ark-poly", + "ark-serialize", + "ark-std", + "clap 4.4.18", + "ctor", + "hex", + "kimchi", + "mina-curves", + "mina-poseidon", + "o1-utils", + "once_cell", + "poly-commitment", + "proptest", + "rand", + "rayon", + "rmp-serde", + "serde", + "serde_with", + "thiserror", + "time", + "tracing", + "tracing-subscriber", +] + [[package]] name = "same-file" version = "1.0.6" @@ -2654,7 +2741,7 @@ checksum = "33c85360c95e7d137454dc81d9a4ed2b8efd8fbe19cee57357b32b9771fccb67" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.58", ] [[package]] @@ -2694,7 +2781,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.58", ] [[package]] @@ -2718,6 +2805,15 @@ dependencies = [ "keccak", ] +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + [[package]] name = "shell-words" version = "1.1.0" @@ -2733,6 +2829,12 @@ dependencies = [ "autocfg", ] +[[package]] +name = "smallvec" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" + [[package]] name = "smawk" version = "0.3.2" @@ -2786,7 +2888,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.48", + "syn 2.0.58", ] [[package]] @@ -2836,9 +2938,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.48" +version = "2.0.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f" +checksum = "44cfb93f38070beee36b3fef7d4f5a16f27751d94b187b666a5cc5e9b0d30687" dependencies = [ "proc-macro2", "quote", @@ -2961,7 +3063,17 @@ checksum = "fa0faa943b50f3db30a20aa7e265dbc66076993efed8463e8de414e5d06d3471" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.58", +] + +[[package]] +name = "thread_local" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +dependencies = [ + "cfg-if 1.0.0", + "once_cell", ] [[package]] @@ -3037,6 +3149,68 @@ dependencies = [ "serde", ] +[[package]] +name = "tracing" +version = "0.1.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" +dependencies = [ + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.58", +] + +[[package]] +name = "tracing-core" +version = "0.1.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex", + "sharded-slab", + "smallvec", + "thread_local", + "time", + "tracing", + "tracing-core", + "tracing-log", +] + [[package]] name = "turshi" version = "0.1.0" @@ -3123,6 +3297,12 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" +[[package]] +name = "valuable" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" + [[package]] name = "vec_map" version = "0.8.2" @@ -3162,9 +3342,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.90" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1223296a201415c7fad14792dbefaace9bd52b62d33453ade1c5b5f07555406" +checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" dependencies = [ "cfg-if 1.0.0", "wasm-bindgen-macro", @@ -3172,24 +3352,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.90" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcdc935b63408d58a32f8cc9738a0bffd8f05cc7c002086c6ef20b7312ad9dcd" +checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.58", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.90" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e4c238561b2d428924c49815533a8b9121c664599558a5d9ec51f8a1740a999" +checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -3197,28 +3377,28 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.90" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bae1abb6806dc1ad9e560ed242107c0f6c84335f1749dd4e8ddb012ebd5e25a7" +checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.58", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.90" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d91413b1c31d7539ba5ef2451af3f0b833a005eb27a631cec32bc0635a8602b" +checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" [[package]] name = "web-sys" -version = "0.3.67" +version = "0.3.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58cd2333b6e0be7a39605f0e255892fd7418a682d8da8fe042fe25128794d2ed" +checksum = "9b85cbef8c220a6abc02aefd892dfc0fc23afb1c6a426316ec33253a3877249b" dependencies = [ "js-sys", "wasm-bindgen", @@ -3459,7 +3639,7 @@ checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.58", ] [[package]] @@ -3479,5 +3659,5 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.58", ] diff --git a/Cargo.toml b/Cargo.toml index 6e35076b45..d98a57c192 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -18,7 +18,8 @@ members = [ "utils", "internal-tracing", "ivc", - "folding" + "folding", + "saffron" ] resolver = "2" @@ -78,7 +79,7 @@ strum_macros = "0.26.1" syn = { version = "1.0.109", features = ["full"] } thiserror = "1.0.30" tinytemplate = "1.1" -wasm-bindgen = "=0.2.90" +wasm-bindgen = "=0.2.87" arrabbiata = { path = "./arrabbiata", version = "0.1.0" } @@ -96,6 +97,7 @@ o1-utils = { path = "./utils", version = "0.1.0" } o1vm = { path = "./o1vm", version = "0.1.0" } optimism = { path = "./optimism", version = "0.1.0" } poly-commitment = { path = "./poly-commitment", version = "0.1.0" } +saffron = { path = "./poly-commitment", version = "0.1.0" } signer = { path = "./signer", version = "0.1.0" } turshi = { path = "./turshi", version = "0.1.0" } utils = { path = "./utils", version = "0.1.0" } diff --git a/arrabbiata/src/constraints.rs b/arrabbiata/src/constraints.rs index 02e1bc5e0e..4f72c381aa 100644 --- a/arrabbiata/src/constraints.rs +++ b/arrabbiata/src/constraints.rs @@ -5,7 +5,6 @@ use crate::{ interpreter::{self, Instruction, Side}, MAX_DEGREE, NUMBER_OF_COLUMNS, NUMBER_OF_PUBLIC_INPUTS, }; -use ark_ec::{short_weierstrass::SWCurveConfig, CurveConfig}; use ark_ff::PrimeField; use kimchi::circuits::{ expr::{ConstantTerm::Literal, Expr, ExprInner, Operations, Variable}, @@ -14,10 +13,12 @@ use kimchi::circuits::{ use log::debug; use num_bigint::BigInt; use o1_utils::FieldHelpers; -use poly_commitment::commitment::CommitmentCurve; #[derive(Clone, Debug)] -pub struct Env { +pub struct Env +where + C::BaseField: PrimeField, +{ /// The parameter a is the coefficients of the elliptic curve in affine /// coordinates. pub a: BigInt, @@ -30,11 +31,12 @@ pub struct Env { impl Env where - <::Params as CurveConfig>::BaseField: PrimeField, + C::BaseField: PrimeField, { pub fn new() -> Self { // This check might not be useful - let a: BigInt = ::Params::COEFF_A.to_biguint().into(); + let a: C::BaseField = C::get_curve_params().0; + let a: BigInt = a.to_biguint().into(); assert!( a < C::ScalarField::modulus_biguint().into(), "a is too large" @@ -55,7 +57,10 @@ where /// proof. /// The constraint environment must be instantiated only once, at the last step /// of the computation. -impl InterpreterEnv for Env { +impl InterpreterEnv for Env +where + C::BaseField: PrimeField, +{ type Position = (Column, CurrOrNext); type Variable = E; @@ -113,7 +118,7 @@ impl InterpreterEnv for Env { fn add_constraint(&mut self, constraint: Self::Variable) { let degree = constraint.degree(1, 0); debug!("Adding constraint of degree {degree}: {:}", constraint); - assert!(degree <= MAX_DEGREE, "degree is too high: {}. The folding scheme used currently allows constraint up to degree {}", degree, MAX_DEGREE); + assert!(degree <= MAX_DEGREE.try_into().unwrap(), "degree is too high: {}. The folding scheme used currently allows constraint up to degree {}", degree, MAX_DEGREE); self.constraints.push(constraint); } @@ -311,7 +316,10 @@ impl InterpreterEnv for Env { } } -impl Env { +impl Env +where + C::BaseField: PrimeField, +{ /// Get all the constraints for the IVC circuit, only. /// /// The following gadgets are used in the IVC circuit: @@ -380,7 +388,7 @@ impl Env { impl Default for Env where - <::Params as CurveConfig>::BaseField: PrimeField, + C::BaseField: PrimeField, { fn default() -> Self { Self::new() diff --git a/arrabbiata/src/curve.rs b/arrabbiata/src/curve.rs index cc1a72b2c7..7f0d265c0c 100644 --- a/arrabbiata/src/curve.rs +++ b/arrabbiata/src/curve.rs @@ -5,10 +5,13 @@ //! The goal of this trait is to parametrize the whole library with the //! different curves. -use ark_ec::short_weierstrass::Affine; +use ark_ec::short_weierstrass::{Affine, SWCurveConfig}; +use ark_ff::PrimeField; use kimchi::curve::{pallas_endos, vesta_endos}; use mina_curves::pasta::curves::{pallas::PallasParameters, vesta::VestaParameters}; -use mina_poseidon::{constants::SpongeConstants, poseidon::ArithmeticSpongeParams}; +use mina_poseidon::{ + constants::SpongeConstants, poseidon::ArithmeticSpongeParams, sponge::DefaultFqSponge, FqSponge, +}; use poly_commitment::commitment::{CommitmentCurve, EndoCurve}; #[derive(Clone)] @@ -28,7 +31,13 @@ impl SpongeConstants for PlonkSpongeConstants { /// Represents additional information that a curve needs in order to be used /// with Arrabbiata. -pub trait ArrabbiataCurve: CommitmentCurve + EndoCurve { +/// +/// The trait [CommitmentCurve] enforces the curve to be given in short +/// Weierstrass form. +pub trait ArrabbiataCurve: CommitmentCurve + EndoCurve +where + Self::BaseField: PrimeField, +{ /// A human readable name. const NAME: &'static str; @@ -52,6 +61,33 @@ pub trait ArrabbiataCurve: CommitmentCurve + EndoCurve { /// Provides the coefficient for the curve endomorphism over the other /// field, called q in some places. fn other_curve_endo() -> &'static Self::ScalarField; + + /// Return the coefficients `a` and `b` of the equation + /// `y^2 = x^3 + a x + b` defining the curve. + fn get_curve_params() -> (Self::BaseField, Self::BaseField); + + /// Create a new sponge, with an empty state (i.e. initialized to zero). + fn create_new_sponge() -> DefaultFqSponge; + + /// Absorb an element of the base field into the sponge. + /// + /// This method is supposed to be an alias to `sponge.absorb_fq(&[fq])`. + /// However, it seems that the compiler requests some additional type + /// constraints if there is generic code over the trait `ArrabbiataCurve`. + fn absorb_fq( + sponge: &mut DefaultFqSponge, + fq: Self::BaseField, + ); + + /// Absorb a list of curve points into the sponge. + /// + /// This method is supposed to be an alias to `sponge.absorb_g(&[gs])`. + /// However, it seems that the compiler requests some additional type + /// constraints if there is generic code over the trait `ArrabbiataCurve`. + fn absorb_curve_points( + sponge: &mut DefaultFqSponge, + comms: &[Self], + ); } impl ArrabbiataCurve for Affine { @@ -76,6 +112,30 @@ impl ArrabbiataCurve for Affine { fn other_curve_endo() -> &'static Self::ScalarField { &vesta_endos().0 } + + fn get_curve_params() -> (Self::BaseField, Self::BaseField) { + (PallasParameters::COEFF_A, PallasParameters::COEFF_B) + } + + fn create_new_sponge() -> DefaultFqSponge { + let sponge: DefaultFqSponge = + DefaultFqSponge::new(Self::other_curve_sponge_params()); + sponge + } + + fn absorb_fq( + sponge: &mut DefaultFqSponge, + fq: Self::BaseField, + ) { + sponge.absorb_fq(&[fq]) + } + + fn absorb_curve_points( + sponge: &mut DefaultFqSponge, + comms: &[Self], + ) { + sponge.absorb_g(comms) + } } impl ArrabbiataCurve for Affine { @@ -100,4 +160,28 @@ impl ArrabbiataCurve for Affine { fn other_curve_endo() -> &'static Self::ScalarField { &pallas_endos().0 } + + fn get_curve_params() -> (Self::BaseField, Self::BaseField) { + (VestaParameters::COEFF_A, VestaParameters::COEFF_B) + } + + fn create_new_sponge() -> DefaultFqSponge { + let sponge: DefaultFqSponge = + DefaultFqSponge::new(Self::other_curve_sponge_params()); + sponge + } + + fn absorb_fq( + sponge: &mut DefaultFqSponge, + fq: Self::BaseField, + ) { + sponge.absorb_fq(&[fq]) + } + + fn absorb_curve_points( + sponge: &mut DefaultFqSponge, + comms: &[Self], + ) { + sponge.absorb_g(comms) + } } diff --git a/arrabbiata/src/interpreter.rs b/arrabbiata/src/interpreter.rs index 7c145b972f..3f5264fd0f 100644 --- a/arrabbiata/src/interpreter.rs +++ b/arrabbiata/src/interpreter.rs @@ -329,8 +329,8 @@ //! the messages are kept in an "application environment", located in the //! "witness environment". The structure [crate::witness::Env] is used to keep //! track of the messages that must be passed. -//! Each step starts with an "application state" and end with another that are -//! accumuated. The final state is passed through a "digest" to the next +//! Each step starts with an "program state" and end with another that is +//! accumulated. The final state is passed through a "digest" to the next //! instance. The digest is performed using a hash function (see [Hash - //! Poseidon](#hash---poseidon)). We often use the term "sponge" to refer to the //! hash function or the state of the hash function. @@ -359,7 +359,7 @@ //! - `Ct_(p, n, i)` for the commitments to the cross-terms of degree `i`. //! witness/commitments. //! - `u_(p, n)` for the challenge used to homogenize the constraints. -//! - `o_(p, n)` for the final digest of the application state. +//! - `o_(p, n)` for the final digest of the sponge state. //! //! Here a diagram (FIXME: this is not complete) that shows the messages that //! must be passed: @@ -392,10 +392,10 @@ //! | Receive in PI | | //! | -------------- | | //! | - Commitments to w_(p, (n - 1)) | | -//! | - Final digest of the application | | +//! | - Final digest of the program | | //! | state at instance (n - 1) | | //! | (o_(q, n - 1)). | | -//! | - Final digest of the application | | +//! | - Final digest of the program | | //! | state at instance (n - 2) | | //! | (o_(p, n - 1)). | | //! | | | diff --git a/arrabbiata/src/lib.rs b/arrabbiata/src/lib.rs index c3758725f2..6358963b28 100644 --- a/arrabbiata/src/lib.rs +++ b/arrabbiata/src/lib.rs @@ -17,7 +17,7 @@ pub mod witness; /// The maximum degree of the polynomial that can be represented by the /// polynomial-time function the library supports. -pub const MAX_DEGREE: u64 = 5; +pub const MAX_DEGREE: usize = 5; /// The minimum SRS size required to use Nova, in base 2. /// Requiring at least 2^16 to perform 16bits range checks. diff --git a/arrabbiata/src/main.rs b/arrabbiata/src/main.rs index 69b72b0c67..5e2cbea153 100644 --- a/arrabbiata/src/main.rs +++ b/arrabbiata/src/main.rs @@ -101,15 +101,14 @@ pub fn main() { elapsed = start_iteration.elapsed().as_micros() ); - // FIXME: - // update current instance with the previous "next" commitments (i.e. - // env.next_commitments) - // update next instance with current commitments - // FIXME: Check twice the updated commitments - env.compute_and_update_previous_commitments(); - - // FIXME: - // Absorb all commitments in the sponge. + // Commit to the program state. + // Depending on the iteration, either E1 or E2 will be used. + // The environment will keep the commitments to the program state to + // verify and accumulate it at the next iteration. + env.commit_state(); + + // Absorb the last program state. + env.absorb_state(); // FIXME: // Coin chalenges β and γ for the permutation argument diff --git a/arrabbiata/src/witness.rs b/arrabbiata/src/witness.rs index ed9805038f..f6514cdd28 100644 --- a/arrabbiata/src/witness.rs +++ b/arrabbiata/src/witness.rs @@ -1,4 +1,4 @@ -use ark_ec::models::short_weierstrass::SWCurveConfig; +use ark_ec::CurveConfig; use ark_ff::PrimeField; use ark_poly::Evaluations; use kimchi::circuits::{domains::EvaluationDomains, gate::CurrOrNext}; @@ -7,7 +7,7 @@ use mina_poseidon::constants::SpongeConstants; use num_bigint::{BigInt, BigUint}; use num_integer::Integer; use o1_utils::field_helpers::FieldHelpers; -use poly_commitment::{ipa::SRS, PolyComm, SRS as _}; +use poly_commitment::{commitment::CommitmentCurve, ipa::SRS, PolyComm, SRS as _}; use rayon::iter::{IntoParallelRefIterator, ParallelIterator}; use std::time::Instant; @@ -19,31 +19,47 @@ use crate::{ NUMBER_OF_VALUES_TO_ABSORB_PUBLIC_IO, }; -/// The first instruction in the IVC is the Poseidon permutation. It is used to -/// start hashing the public input. +/// The first instruction in the verifier circuit (often shortened in "IVC" in +/// the crate) is the Poseidon permutation. It is used to start hashing the +/// public input. pub const IVC_STARTING_INSTRUCTION: Instruction = Instruction::Poseidon(0); -/// An environment that can be shared between IVC instances. +/// An environment is used to contain the state of a long "running program". /// -/// It contains all the accumulators that can be picked for a given fold -/// instance k, including the sponges. +/// The running program is composed of two parts: one user application and one +/// verifier application. The verifier application is used to encode the +/// correctness of previous program states computations. +/// +/// The term "app(lication) state" will be used to refer to the state of the +/// user application, and the term "IVC state" will be used to refer to the +/// state of the verifier application. The term program state will be used to refer to +/// the state of the whole program. +/// +/// The environment contains all the accumulators that can be picked for a given +/// fold instance k, including the sponges. /// /// The environment is run over big integers to avoid performing /// reduction at all step. Instead the user implementing the interpreter can /// reduce in the corresponding field when they want. +/// +/// The environment is generic over two curves (called E1 and E2) that are +/// supposed to form a cycle. pub struct Env< Fp: PrimeField, Fq: PrimeField, E1: ArrabbiataCurve, E2: ArrabbiataCurve, -> { +> where + E1::BaseField: PrimeField, + E2::BaseField: PrimeField, +{ // ---------------- // Setup related (domains + SRS) /// Domain for Fp - pub domain_fp: EvaluationDomains, + pub domain_fp: EvaluationDomains, /// Domain for Fq - pub domain_fq: EvaluationDomains, + pub domain_fq: EvaluationDomains, /// SRS for the first curve pub srs_e1: SRS, @@ -61,9 +77,9 @@ pub struct Env< // FIXME: use a blinded comm and also fold the blinder pub ivc_accumulator_e2: Vec>, - /// Commitments to the previous instances - pub previous_commitments_e1: Vec>, - pub previous_commitments_e2: Vec>, + /// Commitments to the previous program states. + pub previous_committed_state_e1: Vec>, + pub previous_committed_state_e2: Vec>, // ---------------- // ---------------- @@ -124,11 +140,38 @@ pub struct Env< pub sponge_e1: [BigInt; PlonkSpongeConstants::SPONGE_WIDTH], pub sponge_e2: [BigInt; PlonkSpongeConstants::SPONGE_WIDTH], + /// Sponge state used by the prover for the current iteration. + /// + /// This sponge is used at the current iteration to absorb commitments of + /// the program state and generate the challenges for the current iteration. + /// The outputs of the sponge will be verified by the verifier of the next + /// iteration. + pub prover_sponge_state: [BigInt; PlonkSpongeConstants::SPONGE_WIDTH], + + /// Sponge state used by the verifier for the current iteration. + /// + /// This sponge is used at the current iteration to build the verifier + /// circuit. The state will need to match with the previous prover sopnge + /// state. + pub verifier_sponge_state: [BigInt; PlonkSpongeConstants::SPONGE_WIDTH], + /// The current iteration of the IVC pub current_iteration: u64, - /// A previous hash, encoded in 2 chunks of 128 bits. - pub previous_hash: [u128; 2], + /// The digest of the program state before executing the last iteration. + /// The value will be used to initialize the execution trace of the verifier + /// in the next iteration, in particular to verify that the challenges have + /// been generated correctly. + /// + /// The value is a 128bits value. + pub last_program_digest_before_execution: BigInt, + + /// The digest of the program state after executing the last iteration. + /// The value will be used to initialize the sponge before committing to the + /// next iteration. + /// + /// The value is a 128bits value. + pub last_program_digest_after_execution: BigInt, /// The coin folding combiner will be used to generate the combinaison of /// folding instances @@ -191,8 +234,8 @@ impl< E2: ArrabbiataCurve, > InterpreterEnv for Env where - ::BaseField: PrimeField, - ::BaseField: PrimeField, + E1::BaseField: PrimeField, + E2::BaseField: PrimeField, { type Position = (Column, CurrOrNext); @@ -242,9 +285,9 @@ where unimplemented!("Only works for private inputs") }; let modulus: BigInt = if self.current_iteration % 2 == 0 { - Fp::modulus_biguint().into() + E1::ScalarField::modulus_biguint().into() } else { - Fq::modulus_biguint().into() + E2::ScalarField::modulus_biguint().into() }; let v = v.mod_floor(&modulus); match row { @@ -264,9 +307,9 @@ where unimplemented!("Only works for public input columns") }; let modulus: BigInt = if self.current_iteration % 2 == 0 { - Fp::modulus_biguint().into() + E1::ScalarField::modulus_biguint().into() } else { - Fq::modulus_biguint().into() + E2::ScalarField::modulus_biguint().into() }; let v = v.mod_floor(&modulus); self.public_state[idx].clone_from(&v); @@ -281,9 +324,9 @@ where fn constrain_boolean(&mut self, x: Self::Variable) { let modulus: BigInt = if self.current_iteration % 2 == 0 { - Fp::modulus_biguint().into() + E1::ScalarField::modulus_biguint().into() } else { - Fq::modulus_biguint().into() + E2::ScalarField::modulus_biguint().into() }; let x = x.mod_floor(&modulus); assert!(x == BigInt::from(0_usize) || x == BigInt::from(1_usize)); @@ -415,10 +458,10 @@ where unsafe fn save_poseidon_state(&mut self, x: Self::Variable, i: usize) { if self.current_iteration % 2 == 0 { - let modulus: BigInt = Fp::modulus_biguint().into(); + let modulus: BigInt = E1::ScalarField::modulus_biguint().into(); self.sponge_e1[i] = x.mod_floor(&modulus) } else { - let modulus: BigInt = Fq::modulus_biguint().into(); + let modulus: BigInt = E2::ScalarField::modulus_biguint().into(); self.sponge_e2[i] = x.mod_floor(&modulus) } } @@ -498,7 +541,7 @@ where if self.current_iteration % 2 == 0 { match side { Side::Left => { - let pt = self.previous_commitments_e2[i_comm].get_first_chunk(); + let pt = self.previous_committed_state_e2[i_comm].get_first_chunk(); // We suppose we never have a commitment equals to the // point at infinity let (pt_x, pt_y) = pt.to_coordinates().unwrap(); @@ -524,7 +567,7 @@ where } else { match side { Side::Left => { - let pt = self.previous_commitments_e1[i_comm].get_first_chunk(); + let pt = self.previous_committed_state_e1[i_comm].get_first_chunk(); // We suppose we never have a commitment equals to the // point at infinity let (pt_x, pt_y) = pt.to_coordinates().unwrap(); @@ -568,11 +611,11 @@ where } Side::Right => { if self.current_iteration % 2 == 0 { - let pt = self.previous_commitments_e2[i_comm].get_first_chunk(); + let pt = self.previous_committed_state_e2[i_comm].get_first_chunk(); let (x, y) = pt.to_coordinates().unwrap(); (x.to_biguint().into(), y.to_biguint().into()) } else { - let pt = self.previous_commitments_e1[i_comm].get_first_chunk(); + let pt = self.previous_committed_state_e1[i_comm].get_first_chunk(); let (x, y) = pt.to_coordinates().unwrap(); (x.to_biguint().into(), y.to_biguint().into()) } @@ -634,14 +677,14 @@ where /// Zero is not allowed as an input. unsafe fn inverse(&mut self, pos: Self::Position, x: Self::Variable) -> Self::Variable { let res = if self.current_iteration % 2 == 0 { - Fp::from_biguint(&x.to_biguint().unwrap()) + E1::ScalarField::from_biguint(&x.to_biguint().unwrap()) .unwrap() .inverse() .unwrap() .to_biguint() .into() } else { - Fq::from_biguint(&x.to_biguint().unwrap()) + E2::ScalarField::from_biguint(&x.to_biguint().unwrap()) .unwrap() .inverse() .unwrap() @@ -661,9 +704,9 @@ where y2: Self::Variable, ) -> Self::Variable { let modulus: BigInt = if self.current_iteration % 2 == 0 { - Fp::modulus_biguint().into() + E1::ScalarField::modulus_biguint().into() } else { - Fq::modulus_biguint().into() + E2::ScalarField::modulus_biguint().into() }; // If it is not the same point, we compute lambda as: // - λ = (Y1 - Y2) / (X1 - X2) @@ -685,9 +728,11 @@ where }; let num = { let a: BigInt = if self.current_iteration % 2 == 0 { - (E2::Params::COEFF_A).to_biguint().into() + let a: E2::BaseField = E2::get_curve_params().0; + a.to_biguint().into() } else { - (E1::Params::COEFF_A).to_biguint().into() + let a: E1::BaseField = E1::get_curve_params().0; + a.to_biguint().into() }; let x1_square = x1.clone() * x1.clone(); let two_x1_square = x1_square.clone() + x1_square.clone(); @@ -709,9 +754,9 @@ where y1: Self::Variable, ) -> (Self::Variable, Self::Variable) { let modulus: BigInt = if self.current_iteration % 2 == 0 { - Fp::modulus_biguint().into() + E1::ScalarField::modulus_biguint().into() } else { - Fq::modulus_biguint().into() + E2::ScalarField::modulus_biguint().into() }; // - λ = (3X1^2 + a) / (2Y1) // We compute λ and use an additional column as a temporary value @@ -725,9 +770,11 @@ where }; let num = { let a: BigInt = if self.current_iteration % 2 == 0 { - (E2::Params::COEFF_A).to_biguint().into() + let a: E2::BaseField = E2::get_curve_params().0; + a.to_biguint().into() } else { - (E1::Params::COEFF_A).to_biguint().into() + let a: E1::BaseField = E1::get_curve_params().0; + a.to_biguint().into() }; let x1_square = x1.clone() * x1.clone(); let two_x1_square = x1_square.clone() + x1_square.clone(); @@ -757,6 +804,11 @@ impl< E1: ArrabbiataCurve, E2: ArrabbiataCurve, > Env +where + E1::BaseField: PrimeField, + E2::BaseField: PrimeField, + <::Params as CurveConfig>::BaseField: PrimeField, + <::Params as CurveConfig>::BaseField: PrimeField, { pub fn new( srs_log2_size: usize, @@ -765,16 +817,16 @@ impl< sponge_e2: [BigInt; PlonkSpongeConstants::SPONGE_WIDTH], ) -> Self { { - assert!(Fp::MODULUS_BIT_SIZE <= MAXIMUM_FIELD_SIZE_IN_BITS.try_into().unwrap(), "The size of the field Fp is too large, it should be less than {MAXIMUM_FIELD_SIZE_IN_BITS}"); + assert!(E1::ScalarField::MODULUS_BIT_SIZE <= MAXIMUM_FIELD_SIZE_IN_BITS.try_into().unwrap(), "The size of the field Fp is too large, it should be less than {MAXIMUM_FIELD_SIZE_IN_BITS}"); assert!(Fq::MODULUS_BIT_SIZE <= MAXIMUM_FIELD_SIZE_IN_BITS.try_into().unwrap(), "The size of the field Fq is too large, it should be less than {MAXIMUM_FIELD_SIZE_IN_BITS}"); - let modulus_fp = Fp::modulus_biguint(); + let modulus_fp = E1::ScalarField::modulus_biguint(); let alpha = PlonkSpongeConstants::PERM_SBOX; assert!( (modulus_fp - BigUint::from(1_u64)).gcd(&BigUint::from(alpha)) == BigUint::from(1_u64), "The modulus of Fp should be coprime with {alpha}" ); - let modulus_fq = Fq::modulus_biguint(); + let modulus_fq = E2::ScalarField::modulus_biguint(); let alpha = PlonkSpongeConstants::PERM_SBOX; assert!( (modulus_fq - BigUint::from(1_u64)).gcd(&BigUint::from(alpha)) @@ -783,8 +835,8 @@ impl< ); } let srs_size = 1 << srs_log2_size; - let domain_fp = EvaluationDomains::::create(srs_size).unwrap(); - let domain_fq = EvaluationDomains::::create(srs_size).unwrap(); + let domain_fp = EvaluationDomains::::create(srs_size).unwrap(); + let domain_fq = EvaluationDomains::::create(srs_size).unwrap(); info!("Create an SRS of size {srs_log2_size} for the first curve"); let srs_e1: SRS = { @@ -822,10 +874,10 @@ impl< }; // Default set to the blinders. Using double to make the EC scaling happy. - let previous_commitments_e1: Vec> = (0..NUMBER_OF_COLUMNS) + let previous_committed_state_e1: Vec> = (0..NUMBER_OF_COLUMNS) .map(|_| PolyComm::new(vec![(srs_e1.h + srs_e1.h).into()])) .collect(); - let previous_commitments_e2: Vec> = (0..NUMBER_OF_COLUMNS) + let previous_committed_state_e2: Vec> = (0..NUMBER_OF_COLUMNS) .map(|_| PolyComm::new(vec![(srs_e2.h + srs_e2.h).into()])) .collect(); // FIXME: zero will not work. @@ -839,6 +891,10 @@ impl< // FIXME: challenges let challenges: Vec = vec![]; + let prover_sponge_state: [BigInt; PlonkSpongeConstants::SPONGE_WIDTH] = + std::array::from_fn(|_| BigInt::from(0_u64)); + let verifier_sponge_state: [BigInt; PlonkSpongeConstants::SPONGE_WIDTH] = + std::array::from_fn(|_| BigInt::from(0_u64)); Self { // ------- // Setup @@ -851,8 +907,8 @@ impl< // IVC only ivc_accumulator_e1, ivc_accumulator_e2, - previous_commitments_e1, - previous_commitments_e2, + previous_committed_state_e1, + previous_committed_state_e2, // ------ // ------ idx_var: 0, @@ -867,8 +923,13 @@ impl< current_instruction: IVC_STARTING_INSTRUCTION, sponge_e1, sponge_e2, + prover_sponge_state, + verifier_sponge_state, current_iteration: 0, - previous_hash: [0; 2], + // FIXME: set a correct value + last_program_digest_before_execution: BigInt::from(0_u64), + // FIXME: set a correct value + last_program_digest_after_execution: BigInt::from(0_u64), r: BigInt::from(0_usize), // Initialize the temporary accumulators with 0 temporary_accumulators: ( @@ -909,40 +970,104 @@ impl< // TODO } - /// Compute the commitments to the current witness, and update the previous - /// instances. - // Might be worth renaming this function - pub fn compute_and_update_previous_commitments(&mut self) { + /// Commit to the program state and updating the environment with the + /// result. + /// + /// This method is supposed to be called after a new iteration of the + /// program has been executed. + pub fn commit_state(&mut self) { if self.current_iteration % 2 == 0 { + assert_eq!( + self.current_row as u64, + self.domain_fp.d1.size, + "The program has not been fully executed. Missing {} rows", + self.domain_fp.d1.size - self.current_row as u64, + ); let comms: Vec> = self .witness .par_iter() .map(|evals| { - let evals: Vec = evals + let evals: Vec = evals .par_iter() - .map(|x| Fp::from_biguint(&x.to_biguint().unwrap()).unwrap()) + .map(|x| E1::ScalarField::from_biguint(&x.to_biguint().unwrap()).unwrap()) .collect(); let evals = Evaluations::from_vec_and_domain(evals.to_vec(), self.domain_fp.d1); self.srs_e1 .commit_evaluations_non_hiding(self.domain_fp.d1, &evals) }) .collect(); - self.previous_commitments_e1 = comms + self.previous_committed_state_e1 = comms } else { + assert_eq!( + self.current_row as u64, + self.domain_fq.d1.size, + "The program has not been fully executed. Missing {} rows", + self.domain_fq.d1.size - self.current_row as u64, + ); let comms: Vec> = self .witness .iter() .map(|evals| { - let evals: Vec = evals + let evals: Vec = evals .par_iter() - .map(|x| Fq::from_biguint(&x.to_biguint().unwrap()).unwrap()) + .map(|x| E2::ScalarField::from_biguint(&x.to_biguint().unwrap()).unwrap()) .collect(); let evals = Evaluations::from_vec_and_domain(evals.to_vec(), self.domain_fq.d1); self.srs_e2 .commit_evaluations_non_hiding(self.domain_fq.d1, &evals) }) .collect(); - self.previous_commitments_e2 = comms + self.previous_committed_state_e2 = comms + } + } + + /// Absorb the last committed program state in the correct sponge. + /// + /// For a description of the messages to be given to the sponge, including + /// the expected instantiation, refer to the section "Message Passing" in + /// [crate::interpreter]. + pub fn absorb_state(&mut self) { + if self.current_iteration % 2 == 0 { + let mut sponge = E1::create_new_sponge(); + let previous_state: E1::BaseField = E1::BaseField::from_biguint( + &self + .last_program_digest_after_execution + .to_biguint() + .unwrap(), + ) + .unwrap(); + E1::absorb_fq(&mut sponge, previous_state); + self.previous_committed_state_e1 + .iter() + .for_each(|comm| E1::absorb_curve_points(&mut sponge, &comm.chunks)); + let state: Vec = sponge + .sponge + .state + .iter() + .map(|x| x.to_biguint().into()) + .collect(); + self.prover_sponge_state = state.try_into().unwrap() + } else { + let mut sponge = E2::create_new_sponge(); + let previous_state: E2::BaseField = E2::BaseField::from_biguint( + &self + .last_program_digest_after_execution + .to_biguint() + .unwrap(), + ) + .unwrap(); + E2::absorb_fq(&mut sponge, previous_state); + self.previous_committed_state_e2 + .iter() + .for_each(|comm| E2::absorb_curve_points(&mut sponge, &comm.chunks)); + + let state: Vec = sponge + .sponge + .state + .iter() + .map(|x| x.to_biguint().into()) + .collect(); + self.prover_sponge_state = state.try_into().unwrap() } } diff --git a/arrabbiata/tests/witness.rs b/arrabbiata/tests/witness.rs index 400937091b..dd9d9530a2 100644 --- a/arrabbiata/tests/witness.rs +++ b/arrabbiata/tests/witness.rs @@ -79,7 +79,7 @@ fn test_unit_witness_elliptic_curve_addition() { assert_eq!(env.current_iteration, 0); let (exp_x3, exp_y3) = { let res: Pallas = (env.ivc_accumulator_e2[0].get_first_chunk() - + env.previous_commitments_e2[0].get_first_chunk()) + + env.previous_committed_state_e2[0].get_first_chunk()) .into(); let (x3, y3) = res.to_coordinates().unwrap(); ( @@ -99,7 +99,7 @@ fn test_unit_witness_elliptic_curve_addition() { assert_eq!(env.current_iteration, 1); let (exp_x3, exp_y3) = { let res: Vesta = (env.ivc_accumulator_e1[0].get_first_chunk() - + env.previous_commitments_e1[0].get_first_chunk()) + + env.previous_committed_state_e1[0].get_first_chunk()) .into(); let (x3, y3) = res.to_coordinates().unwrap(); ( @@ -119,7 +119,7 @@ fn test_unit_witness_elliptic_curve_addition() { assert_eq!(env.current_iteration, 2); let (exp_x3, exp_y3) = { let res: Pallas = (env.ivc_accumulator_e2[0].get_first_chunk() - + env.previous_commitments_e2[0].get_first_chunk()) + + env.previous_committed_state_e2[0].get_first_chunk()) .into(); let (x3, y3) = res.to_coordinates().unwrap(); ( @@ -188,7 +188,7 @@ where let x = Fq::rand(rng); Pallas::generator().mul_bigint(x.into_bigint()).into() }; - env.previous_commitments_e2[0] = PolyComm::new(vec![p1]); + env.previous_committed_state_e2[0] = PolyComm::new(vec![p1]); // We only go up to the maximum bit field size. (0..MAXIMUM_FIELD_SIZE_IN_BITS).for_each(|bit_idx| { diff --git a/circuit-construction/src/constants.rs b/circuit-construction/src/constants.rs deleted file mode 100644 index 803f18a9c0..0000000000 --- a/circuit-construction/src/constants.rs +++ /dev/null @@ -1,44 +0,0 @@ -use ark_ec::AffineRepr; -use ark_ff::Field; -use kimchi::curve::KimchiCurve; -use mina_curves::pasta::{Fp, Fq, Pallas as PallasAffine, Vesta as VestaAffine}; -use mina_poseidon::poseidon::ArithmeticSpongeParams; -use poly_commitment::{commitment::CommitmentCurve, srs::endos}; - -/// The type of possible constants in the circuit -#[derive(Clone)] -pub struct Constants { - pub poseidon: &'static ArithmeticSpongeParams, - pub endo: F, - pub base: (F, F), -} - -/// Constants for the base field of Pallas -/// /// -/// # Panics -/// -/// Will panic if `PallasAffine::generator()` returns None. -pub fn fp_constants() -> Constants { - let (endo_q, _endo_r) = endos::(); - let base = PallasAffine::generator().to_coordinates().unwrap(); - Constants { - poseidon: VestaAffine::sponge_params(), - endo: endo_q, - base, - } -} - -/// Constants for the base field of Vesta -/// -/// # Panics -/// -/// Will panic if `VestaAffine::generator()` returns None. -pub fn fq_constants() -> Constants { - let (endo_q, _endo_r) = endos::(); - let base = VestaAffine::generator().to_coordinates().unwrap(); - Constants { - poseidon: PallasAffine::sponge_params(), - endo: endo_q, - base, - } -} diff --git a/circuit-construction/src/lib.rs b/circuit-construction/src/lib.rs deleted file mode 100644 index 27618ed679..0000000000 --- a/circuit-construction/src/lib.rs +++ /dev/null @@ -1,33 +0,0 @@ -#![doc = include_str!("../../README.md")] - -/// Definition of possible constants in circuits -pub mod constants; -/// This contains the prover functions, ranging from curves definitions to prover index and proof generation -pub mod prover; -/// This is the actual writer with all of the available functions to set up a circuit and its corresponding constraint system -pub mod writer; - -#[cfg(test)] -mod tests; - -/// This contains the Kimchi dependencies being used -pub mod prologue { - pub use super::constants::{fp_constants, fq_constants, Constants}; - pub use super::prover::{generate_prover_index, prove, CoordinateCurve}; - pub use super::writer::{Cs, Var}; - pub use ark_ec::{AffineRepr, CurveGroup}; - pub use ark_ff::{FftField, PrimeField, UniformRand}; - pub use ark_poly::{EvaluationDomain, Radix2EvaluationDomain}; - pub use groupmap::GroupMap; - pub use kimchi::verifier::verify; - pub use mina_curves::pasta::{ - Fp, Pallas as PallasAffine, Vesta as VestaAffine, VestaParameters, - }; - pub use mina_poseidon::{ - constants::*, - poseidon::{ArithmeticSponge, Sponge}, - sponge::{DefaultFqSponge, DefaultFrSponge}, - }; - pub use poly_commitment::{commitment::CommitmentCurve, srs::SRS}; - pub use std::sync::Arc; -} diff --git a/circuit-construction/src/prover.rs b/circuit-construction/src/prover.rs deleted file mode 100644 index 2841c8dfbd..0000000000 --- a/circuit-construction/src/prover.rs +++ /dev/null @@ -1,136 +0,0 @@ -use crate::writer::{Cs, GateSpec, System, Var, WitnessGenerator}; -use ark_ec::AffineRepr; -use ark_ff::{One, PrimeField, Zero}; -use kimchi::{ - circuits::{constraints::ConstraintSystem, gate::GateType, wires::COLUMNS}, - curve::KimchiCurve, - plonk_sponge::FrSponge, - proof::ProverProof, - prover_index::ProverIndex, -}; -use mina_poseidon::FqSponge; -use poly_commitment::{ - commitment::{CommitmentCurve, PolyComm}, - srs::{endos, SRS}, -}; -use std::array; - -/// Given an index, a group map, custom blinders for the witness, a public input vector, and a circuit `main`, it creates a proof. -/// -/// # Panics -/// -/// Will panic if recursive proof creation returns `ProverError`. -pub fn prove( - index: &ProverIndex, - group_map: &G::Map, - blinders: Option<[Option; COLUMNS]>, - public_input: &[G::ScalarField], - mut main: H, -) -> ProverProof -where - H: FnMut(&mut WitnessGenerator, Vec>), - G::BaseField: PrimeField, - G: KimchiCurve, - EFqSponge: Clone + FqSponge, - EFrSponge: FrSponge, -{ - // create the witness generator - let mut gen: WitnessGenerator = WitnessGenerator::new(public_input); - - // run the witness generation - let public_vars = public_input - .iter() - .map(|x| Var { - index: 0, - value: Some(*x), - }) - .collect(); - main(&mut gen, public_vars); - - // get the witness columns - gen.curr_gate_count(); - let columns = gen.columns(); - - // custom blinders for the witness commitment - let blinders: [Option>; COLUMNS] = match blinders { - None => array::from_fn(|_| None), - Some(bs) => array::from_fn(|i| { - bs[i].map(|b| PolyComm { - unshifted: vec![b], - shifted: None, - }) - }), - }; - - // create the proof - ProverProof::create_recursive::( - group_map, - columns, - &[], - index, - vec![], - Some(blinders), - ) - .unwrap() -} - -/// Creates the prover index on input an `srs`, used `constants`, parameters for Poseidon, number of public inputs, and a specific circuit -/// -/// # Panics -/// -/// Will panic if `constraint_system` is not built with `public` input. -pub fn generate_prover_index( - srs: std::sync::Arc>, - public: usize, - main: Circuit, -) -> ProverIndex -where - Circuit: FnOnce(&mut System, Vec>), - Curve: KimchiCurve, -{ - let mut system: System = System::default(); - let z = Curve::ScalarField::zero(); - - // create public input variables - let public_input_row = vec![Curve::ScalarField::one(), z, z, z, z, z, z, z, z, z]; - let public_input: Vec<_> = (0..public) - .map(|_| { - let v = system.var(|| panic!("fail")); - - system.gate(GateSpec { - typ: GateType::Generic, - row: vec![Some(v)], - coeffs: public_input_row.clone(), - }); - v - }) - .collect(); - - main(&mut system, public_input); - - let gates = system.gates(); - - // Other base field = self scalar field - let (endo_q, _endo_r) = endos::(); - //let (endo_q, _endo_r) = Curve::endos(); - - let constraint_system = ConstraintSystem::::create(gates) - .public(public) - .build() - // TODO: return a Result instead of panicking - .expect("couldn't construct constraint system"); - - ProverIndex::::create(constraint_system, endo_q, srs) -} - -/// Handling coordinates in an affine curve -pub trait CoordinateCurve: AffineRepr { - /// Returns the coordinates in the curve as two points of the base field - fn to_coords(&self) -> Option<(Self::BaseField, Self::BaseField)>; -} - -impl CoordinateCurve for G { - fn to_coords(&self) -> Option<(Self::BaseField, Self::BaseField)> { - CommitmentCurve::to_coordinates(self) - } -} diff --git a/circuit-construction/src/tests/example_proof.rs b/circuit-construction/src/tests/example_proof.rs deleted file mode 100644 index ffa08a3527..0000000000 --- a/circuit-construction/src/tests/example_proof.rs +++ /dev/null @@ -1,102 +0,0 @@ -use crate::prologue::*; -use kimchi::curve::KimchiCurve; -use std::ops::Mul; - -type SpongeQ = DefaultFqSponge; -type SpongeR = DefaultFrSponge; - -pub struct Witness { - pub s: G::ScalarField, - pub preimage: G::BaseField, -} - -// Prove knowledge of discrete log and poseidon preimage of a hash -pub fn circuit< - F: PrimeField + FftField, - G: AffineRepr + CoordinateCurve, - Sys: Cs, ->( - constants: &Constants, - // The witness - witness: Option<&Witness>, - sys: &mut Sys, - public_input: Vec>, -) { - let zero = sys.constant(F::zero()); - - let constant_curve_pt = |sys: &mut Sys, (x, y)| { - let x = sys.constant(x); - let y = sys.constant(y); - (x, y) - }; - - let base = constant_curve_pt(sys, G::generator().to_coords().unwrap()); - let scalar = sys.scalar(G::ScalarField::MODULUS_BIT_SIZE as usize, || { - witness.as_ref().unwrap().s - }); - let actual = sys.scalar_mul(zero, base, scalar); - - let preimage = sys.var(|| witness.as_ref().unwrap().preimage); - let actual_hash = sys.poseidon(constants, vec![preimage, zero, zero])[0]; - - sys.assert_eq(actual.0, public_input[0]); - sys.assert_eq(actual.1, public_input[1]); - sys.assert_eq(actual_hash, public_input[2]); -} - -const PUBLIC_INPUT_LENGTH: usize = 3; - -#[test] -fn test_example_circuit() { - use mina_curves::pasta::{Pallas, Vesta}; - // create SRS - let srs = { - let srs = SRS::::create(1 << 7); // 2^7 = 128 - srs.get_lagrange_basis(Radix2EvaluationDomain::new(srs.g.len()).unwrap()); - Arc::new(srs) - }; - - let proof_system_constants = fp_constants(); - - // generate circuit and index - let prover_index = generate_prover_index::<_, _>(srs, PUBLIC_INPUT_LENGTH, |sys, p| { - circuit::<_, Pallas, _>(&proof_system_constants, None, sys, p) - }); - - let group_map = ::Map::setup(); - - let mut rng = rand::thread_rng(); - - // create witness - let private_key = ::ScalarField::rand(&mut rng); - let preimage = ::BaseField::rand(&mut rng); - - let witness = Witness { - s: private_key, - preimage, - }; - - // create public input - let public_key = Pallas::generator().mul(private_key).into_affine(); - let hash = { - let mut s: ArithmeticSponge<_, PlonkSpongeConstantsKimchi> = - ArithmeticSponge::new(Vesta::sponge_params()); - s.absorb(&[preimage]); - s.squeeze() - }; - - // generate proof - let public_input = vec![public_key.x, public_key.y, hash]; - let proof = prove::( - &prover_index, - &group_map, - None, - &public_input, - |sys, p| circuit::(&proof_system_constants, Some(&witness), sys, p), - ); - - // verify proof - let verifier_index = prover_index.verifier_index(); - - verify::<_, SpongeQ, SpongeR>(&group_map, &verifier_index, &proof, &public_input).unwrap(); -} diff --git a/circuit-construction/src/writer.rs b/circuit-construction/src/writer.rs deleted file mode 100644 index 0caec23c1c..0000000000 --- a/circuit-construction/src/writer.rs +++ /dev/null @@ -1,1007 +0,0 @@ -use ark_ff::{BigInteger, FftField, PrimeField}; -use kimchi::circuits::{ - gate::{CircuitGate, GateType}, - polynomials::generic::{ - DOUBLE_GENERIC_COEFFS, DOUBLE_GENERIC_REGISTERS, GENERIC_COEFFS, GENERIC_REGISTERS, - }, - wires::{Wire, COLUMNS}, -}; -use mina_poseidon::{ - constants::{PlonkSpongeConstantsKimchi, SpongeConstants}, - permutation::full_round, -}; -use std::array; -use std::collections::HashMap; - -use crate::constants::Constants; - -/// A variable in our circuit. -/// Variables are assigned with an index to differentiate from each other. -/// Optionally, they can eventually take as value a field element. -#[derive(Hash, Eq, PartialEq, Debug, Clone, Copy)] -pub struct Var { - pub index: usize, - pub value: Option, -} - -impl Var { - /// Returns the value inside a variable [Var]. - /// - /// # Panics - /// - /// Will panic if it is `None`. - pub fn val(&self) -> F { - self.value.unwrap() - } -} - -/// A variable that corresponds to scalar that is shifted by a certain amount. -pub struct ShiftedScalar(Var); - -/// Specifies a gate within a circuit. -/// A gate will have a type, -/// will refer to a row of variables, -/// and will have associated vector of coefficients. -pub struct GateSpec { - pub typ: GateType, - pub row: Vec>>, - pub coeffs: Vec, -} - -impl GateSpec { - pub fn get_var_val_or(&self, col: usize, default: F) -> F { - match self.row.get(col) { - Some(Some(var)) => var.val(), - _ => default, - } - } - - pub fn get_var_idx(&self, col: usize) -> Option { - match self.row.get(col) { - Some(Some(var)) => Some(var.index), - _ => None, - } - } -} - -/// A set of gates within the circuit. -/// It carries the index for the next available variable, -/// and the vector of [`GateSpec`] created so far. -/// It also keeps track of the queue of generic gates and cached constants. -#[derive(Default)] -pub struct System { - pub next_variable: usize, - pub generic_gate_queue: Vec>, - // pub equivalence_classes: HashMap>, - pub gates: Vec>, - pub cached_constants: HashMap>, -} - -/// Carries a vector of rows corresponding to the witness, a queue of generic gates, and stores the cached constants -#[derive(Default)] -pub struct WitnessGenerator -where - F: PrimeField, -{ - pub generic_gate_queue: Vec>, - pub rows: Vec>, - pub cached_constants: HashMap>, -} - -impl WitnessGenerator -where - F: PrimeField, -{ - /// Given a list of public inputs, creates the witness generator. - pub fn new(public_inputs: &[F]) -> Self { - let mut gen = Self::default(); - - for input in public_inputs { - let row = array::from_fn(|i| if i == 0 { *input } else { F::zero() }); - gen.rows.push(row); - } - - gen - } -} - -/// A row is an array of [COLUMNS] elements -type Row = [V; COLUMNS]; - -/// This trait includes all the operations that can be executed -/// by the elements in the circuits. -/// It allows for different behaviours depending on the struct for -/// which it is implemented for. -/// In particular, the circuit mode and the witness generation mode. -pub trait Cs { - /// In cases where you want to create a free variable in the circuit, - /// as in the variable is not constrained _yet_ - /// and can be anything that the prover wants. - /// For example, division can be implemented as: - /// - /// ```ignore - /// let a = sys.constant(5u32.into()); - /// let b = sys.constant(10u32.into()); - /// let c = sys.var(|| { - /// b.value * a.value.inverse().unwrap() - /// }); - /// sys.assert_eq(a * c, b); - /// ``` - /// - fn var(&mut self, g: G) -> Var - where - G: FnOnce() -> F; - - /// Returns the number of gates that the current [Self] contains. - fn curr_gate_count(&self) -> usize; - - /// Returns a variable containing a field element as value that is - /// computed as the equivalent `BigInteger` number returned by - /// function `g`, only if the length is a multiple of 4. - fn endo_scalar(&mut self, length: usize, g: G) -> Var - where - G: FnOnce() -> N, - { - assert_eq!(length % 4, 0); - - self.var(|| { - let y = g(); - let bits = y.to_bits_le(); - F::from_bigint(F::BigInt::from_bits_le(&bits)).unwrap() - }) - } - - /// This function creates a [`ShiftedScalar`] variable from a field element that is - /// returned by function `g()`, and a length that should be a multiple of 5. - fn scalar(&mut self, length: usize, g: G) -> ShiftedScalar - where - G: FnOnce() -> Fr, - { - assert_eq!(length % 5, 0); - - let v = self.var(|| { - // TODO: No need to recompute this each time. - let two = Fr::from(2u64); - let shift = Fr::one() + two.pow([length as u64]); - - let x = g(); - // x = 2 y + shift - // y = (x - shift) / 2 - // TODO: Could cache value of 1/2 to avoid division - let y = (x - shift) / two; - let bits = y.into_bigint().to_bits_le(); - F::from_bigint(F::BigInt::from_bits_le(&bits)).unwrap() - }); - ShiftedScalar(v) - } - - /// In circuit mode, adds a gate to the circuit. - /// In witness generation mode, adds the corresponding row to the witness. - fn gate(&mut self, g: GateSpec); - - /// Creates a `Generic` gate that constrains if two variables are equal. - /// This is done by setting `x1` in the left wire and `x2` in the right wire - /// with left coefficient `1` and right coefficient `-1`, so that `x1 - x2 = 0`. - // TODO: Optimize to use permutation argument. - fn assert_eq(&mut self, x1: Var, x2: Var) { - // | 0 | 1 | 2 | ... - // | x1 | x2 | 0 | ... - let vars = [Some(x1), Some(x2), None]; - - // constrain `x1 - x2 = 0` - let mut coeffs = [F::zero(); GENERIC_COEFFS]; - coeffs[0] = F::one(); - coeffs[1] = -F::one(); - - self.generic(coeffs, vars); - } - - /// Checks if a constant `x` is already in the cached constants of `self` and returns it. - /// Otherwise, it creates a variable for it and caches it. - fn cached_constants(&mut self, x: F) -> Var; - - /// Creates a `Generic` gate to include a constant in the circuit, and returns the variable containing it. - /// It sets the left wire to be the variable containing the constant `x` and the rest to zero. - /// Then the left coefficient is set to one and the coefficient for constants is set to `-x`. - /// This way, the constraint `1 * x - x = 0` holds. - fn constant(&mut self, x: F) -> Var { - let v = self.cached_constants(x); - - let mut coeffs = [F::zero(); GENERIC_COEFFS]; - coeffs[0] = F::one(); - coeffs[GENERIC_REGISTERS + 1] = -x; - - let vars = [Some(v), None, None]; - - self.generic(coeffs, vars); - - v - } - - /// Stores a generic gate until it can combine two of them - /// into a double generic gate. - fn generic_queue(&mut self, gate: GateSpec) -> Option>; - - /// Adds a generic gate. - /// - /// Warning: this assumes that some finalization occurs to flush - /// any queued generic gate. - fn generic(&mut self, coeffs: [F; GENERIC_COEFFS], vars: [Option>; GENERIC_REGISTERS]) { - let gate = GateSpec { - typ: GateType::Generic, - row: vars.to_vec(), - coeffs: coeffs.to_vec(), - }; - // we queue the single generic gate until we have two of them - if let Some(double_generic_gate) = self.generic_queue(gate) { - self.gate(double_generic_gate); - } - } - - /// Creates a `Generic` gate to constrain that a variable `v` is scaled by an `x` amount and returns it. - /// First, it creates a new variable with a scaled value (meaning, the value in `v` times `x`). - /// Then, it creates a row that sets the left wire to be `v` and the right wire to be the scaled variable. - /// Finally, it sets the left coefficient to `x` and the right coefficient to `-1`. - /// That way, the constraint `x * v - 1 * xv = 0` is created. - fn scale(&mut self, x: F, v: Var) -> Var { - let xv = self.var(|| v.val() * x); - - let vars = [Some(v), Some(xv), None]; - - let mut coeffs = [F::zero(); GENERIC_COEFFS]; - coeffs[0] = x; - coeffs[1] = -F::one(); - - self.generic(coeffs, vars); - - xv - } - - /// Performs curve point addition. - /// It creates the corresponding `CompleteAdd` gate for the points `(x1, y1)` and `(x2,y2)` - /// and returns the third point resulting from the addition as a tuple of variables. - fn add_group( - &mut self, - zero: Var, - (x1, y1): (Var, Var), - (x2, y2): (Var, Var), - ) -> (Var, Var) { - let mut same_x_bool = false; - let same_x = self.var(|| { - let same_x = x1.val() == x2.val(); - same_x_bool = same_x; - F::from(u64::from(same_x)) - }); - - let inf = zero; - let x21_inv = self.var(|| { - if x1.val() == x2.val() { - F::zero() - } else { - (x2.val() - x1.val()).inverse().unwrap() - } - }); - - let s = self.var(|| { - if same_x_bool { - let x1_squared = x1.val().square(); - (x1_squared.double() + x1_squared).div(y1.val().double()) - } else { - (y2.val() - y1.val()) * x21_inv.val() - } - }); - - let inf_z = self.var(|| { - if y1.val() == y2.val() { - F::zero() - } else if same_x_bool { - (y2.val() - y1.val()).inverse().unwrap() - } else { - F::zero() - } - }); - - let x3 = self.var(|| s.val().square() - (x1.val() + x2.val())); - - let y3 = self.var(|| s.val() * (x1.val() - x3.val()) - y1.val()); - - self.gate(GateSpec { - typ: GateType::CompleteAdd, - row: vec![ - Some(x1), - Some(y1), - Some(x2), - Some(y2), - Some(x3), - Some(y3), - Some(inf), - Some(same_x), - Some(s), - Some(inf_z), - Some(x21_inv), - ], - coeffs: vec![], - }); - (x3, y3) - } - - /// Doubles one curve point `(x1, y1)`, using internally the `add_group()` function. - /// It creates a `CompleteAdd` gate for this point addition (with itself). - /// Returns a tuple of variables corresponding to the doubled point. - fn double(&mut self, zero: Var, (x1, y1): (Var, Var)) -> (Var, Var) { - self.add_group(zero, (x1, y1), (x1, y1)) - } - - /// Creates a `CompleteAdd` gate that checks whether a third point `(x3, y3)` is the addition - /// of the two first points `(x1, y1)` and `(x2, y2)`. - /// The difference between this function and `add_group()` is that in `assert_add_group` the - /// third point is given, whereas in the other one it is computed with the formula. - fn assert_add_group( - &mut self, - zero: Var, - (x1, y1): (Var, Var), - (x2, y2): (Var, Var), - (x3, y3): (Var, Var), - ) { - let mut same_x_bool = false; - let same_x = self.var(|| { - let same_x = x1.val() == x2.val(); - same_x_bool = same_x; - F::from(u64::from(same_x)) - }); - - let inf = zero; - let x21_inv = self.var(|| { - if x1.val() == x2.val() { - F::zero() - } else { - (x2.val() - x1.val()).inverse().unwrap() - } - }); - - let s = self.var(|| { - if same_x_bool { - let x1_squared = x1.val().square(); - (x1_squared.double() + x1_squared).div(y1.val().double()) - } else { - (y2.val() - y1.val()) * x21_inv.val() - } - }); - - let inf_z = self.var(|| { - if y1.val() == y2.val() { - F::zero() - } else if same_x_bool { - (y2.val() - y1.val()).inverse().unwrap() - } else { - F::zero() - } - }); - - self.gate(GateSpec { - typ: GateType::CompleteAdd, - row: vec![ - Some(x1), - Some(y1), - Some(x2), - Some(y2), - Some(x3), - Some(y3), - Some(inf), - Some(same_x), - Some(s), - Some(inf_z), - Some(x21_inv), - ], - coeffs: vec![], - }); - } - - /// This function is used to include conditionals in circuits. - /// It creates three `Generic` gates to simulate the logics of the conditional. - /// It receives as input: - /// - `b`: the branch - /// - `t`: the true - /// - `f`: the false - /// And simulates the following equation: `res = b * ( t - f ) + f` - /// ( when the condition is false, `res = 1` ) - /// ( when the condition is true, `res = b` ) - /// This is constrained using three `Generic` gates - /// 1. Constrain `delta = t - f` - /// 2. Constrain `res1 = b * delta` - /// 3. Constrain `res = res1 + f` - /// For (1): - /// - Creates a row with left wire `t`, right wire `f`, and output wire `delta` - /// - Assigns `1` to the left coefficient, `-1` to the right coefficient, and `-1` to the output coefficient. - /// - That way, it creates a first gate constraining: `1 * t - 1 * f - delta = 0` - /// For (2): - /// - Creates a row with left wire `b`, right wire `delta`, and output wire `res1`. - /// - Assigns `-1` to the output coefficient, and `1` to the multiplication coefficient. - /// - That way, it creates a second gate constraining: `-1 * res + 1 * b * delta = 0` - /// For (3): - /// - Creates a row with left wire `res1`, right wire `f`, and output wire `res`. - /// - Assigns `1` to the left coefficient, `1` to the right coefficient, and `-1` to the output coefficient. - /// - That way, it creates a third gate constraining: `1 * res1 + 1 * f - 1 * res = 0` - fn cond_select(&mut self, b: Var, t: Var, f: Var) -> Var { - // Could be more efficient. Currently uses three constraints :( - // delta = t - f - // res1 = b * delta - // res = res1 + f - - let delta = self.var(|| t.val() - f.val()); - let res1 = self.var(|| b.val() * delta.val()); - let res = self.var(|| f.val() + res1.val()); - - let row1 = [Some(t), Some(f), Some(delta)]; - let mut c1 = [F::zero(); GENERIC_COEFFS]; - c1[0] = F::one(); - c1[1] = -F::one(); - c1[2] = -F::one(); - - self.generic(c1, row1); - - let row2 = [Some(b), Some(delta), Some(res1)]; - - let mut c2 = [F::zero(); GENERIC_COEFFS]; - c2[0] = F::zero(); - c2[1] = F::zero(); - c2[2] = -F::one(); - c2[3] = F::one(); - - self.generic(c2, row2); - - let row3 = [Some(res1), Some(f), Some(res)]; - let mut c3 = [F::zero(); GENERIC_COEFFS]; - c3[0] = F::one(); - c3[1] = F::one(); - c3[2] = -F::one(); - - self.generic(c3, row3); - - res - } - - /// Performs a scalar multiplication between a [`ShiftedScalar`] and a point `(xt, yt)`. - /// This function creates 51 rows pairs of rows. - fn scalar_mul( - &mut self, - zero: Var, - (xt, yt): (Var, Var), - scalar: ShiftedScalar, - ) -> (Var, Var) { - let num_bits = 255; - let num_row_pairs = num_bits / 5; - let mut witness: [Vec; COLUMNS] = array::from_fn(|_| vec![]); - - let acc0 = self.add_group(zero, (xt, yt), (xt, yt)); - - let _ = self.var(|| { - witness = array::from_fn(|_| vec![F::zero(); 2 * num_row_pairs]); - // Creates a vector of bits from the value inside the scalar, with the most significant bit upfront - let bits_msb: Vec = scalar - .0 - .val() - .into_bigint() - .to_bits_le() - .iter() - .take(num_bits) - .copied() - .rev() - .collect(); - // Creates a witness for the VarBaseMul gate. - kimchi::circuits::polynomials::varbasemul::witness( - &mut witness, - 0, - (xt.val(), yt.val()), - &bits_msb, - (acc0.0.val(), acc0.1.val()), - ); - F::zero() - }); - - // For each of the pairs, it generates a VarBaseMul and a Zero gate. - let mut res = None; - for i in 0..num_row_pairs { - let mut row1: [_; COLUMNS] = array::from_fn(|j| self.var(|| witness[j][2 * i])); - let row2: [_; COLUMNS] = array::from_fn(|j| self.var(|| witness[j][2 * i + 1])); - - row1[0] = xt; - row1[1] = yt; - if i == 0 { - row1[2] = acc0.0; - row1[3] = acc0.1; - row1[4] = zero; - } - if i == num_row_pairs - 1 { - row1[5] = scalar.0; - res = Some((row2[0], row2[1])); - } - - self.gate(GateSpec { - row: row1.into_iter().map(Some).collect(), - typ: GateType::VarBaseMul, - coeffs: vec![], - }); - - self.gate(GateSpec { - row: row2.into_iter().map(Some).collect(), - typ: GateType::Zero, - coeffs: vec![], - }); - } - - res.unwrap() - } - - /// Creates an endoscalar multiplication gadget with `length_in_bits/4 + 1` gates. - /// For each row, it adds one `EndoMul` gate. The gadget is finalized with a `Zero` gate. - /// - /// | row | `GateType` | - /// | --- | ---------- | - /// | i | `EndoMul` | - /// | i+1 | `EndoMul` | - /// | ... | ... | - /// | r | `EndoMul` | - /// | r+1 | `Zero` | - /// - fn endo( - &mut self, - zero: Var, - constants: &Constants, - (xt, yt): (Var, Var), - scalar: Var, - length_in_bits: usize, - ) -> (Var, Var) { - let bits_per_row = 4; - let rows = length_in_bits / 4; - assert_eq!(0, length_in_bits % 4); - - let mut bits_ = vec![]; - let bits: Vec<_> = (0..length_in_bits) - .map(|i| { - self.var(|| { - if bits_.is_empty() { - bits_ = scalar - .val() - .into_bigint() - .to_bits_le() - .iter() - .take(length_in_bits) - .copied() - .rev() - .collect(); - } - F::from(u64::from(bits_[i])) - }) - }) - .collect(); - - let one = F::one(); - - let endo = constants.endo; - let mut acc = { - let phip = (self.scale(endo, xt), yt); - let phip_p = self.add_group(zero, phip, (xt, yt)); - self.double(zero, phip_p) - }; - - let mut n_acc = zero; - - // TODO: Could be more efficient - for i in 0..rows { - let b1 = bits[i * bits_per_row]; - let b2 = bits[i * bits_per_row + 1]; - let b3 = bits[i * bits_per_row + 2]; - let b4 = bits[i * bits_per_row + 3]; - - let (xp, yp) = acc; - - let xq1 = self.var(|| (one + (endo - one) * b1.val()) * xt.val()); - let yq1 = self.var(|| (b2.val().double() - one) * yt.val()); - - let s1 = self.var(|| (yq1.val() - yp.val()) / (xq1.val() - xp.val())); - let s1_squared = self.var(|| s1.val().square()); - // (2*xp – s1^2 + xq) * ((xp – xr) * s1 + yr + yp) = (xp – xr) * 2*yp - // => 2 yp / (2*xp – s1^2 + xq) = s1 + (yr + yp) / (xp – xr) - // => 2 yp / (2*xp – s1^2 + xq) - s1 = (yr + yp) / (xp – xr) - // - // s2 := 2 yp / (2*xp – s1^2 + xq) - s1 - // - // (yr + yp)^2 = (xp – xr)^2 * (s1^2 – xq1 + xr) - // => (s1^2 – xq1 + xr) = (yr + yp)^2 / (xp – xr)^2 - // - // => xr = s2^2 - s1^2 + xq - // => yr = s2 * (xp - xr) - yp - let s2 = self.var(|| { - yp.val().double() / (xp.val().double() + xq1.val() - s1_squared.val()) - s1.val() - }); - - // (xr, yr) - let xr = self.var(|| xq1.val() + s2.val().square() - s1_squared.val()); - let yr = self.var(|| (xp.val() - xr.val()) * s2.val() - yp.val()); - - let xq2 = self.var(|| (one + (endo - one) * b3.val()) * xt.val()); - let yq2 = self.var(|| (b4.val().double() - one) * yt.val()); - let s3 = self.var(|| (yq2.val() - yr.val()) / (xq2.val() - xr.val())); - let s3_squared = self.var(|| s3.val().square()); - let s4 = self.var(|| { - yr.val().double() / (xr.val().double() + xq2.val() - s3_squared.val()) - s3.val() - }); - - let xs = self.var(|| xq2.val() + s4.val().square() - s3_squared.val()); - let ys = self.var(|| (xr.val() - xs.val()) * s4.val() - yr.val()); - - self.gate(GateSpec { - typ: GateType::EndoMul, - row: vec![ - Some(xt), - Some(yt), - None, - None, - Some(xp), - Some(yp), - Some(n_acc), - Some(xr), - Some(yr), - Some(s1), - Some(s3), - Some(b1), - Some(b2), - Some(b3), - Some(b4), - ], - coeffs: vec![], - }); - - acc = (xs, ys); - - n_acc = self.var(|| { - let mut n_acc = n_acc.val(); - n_acc.double_in_place(); - n_acc += b1.val(); - n_acc.double_in_place(); - n_acc += b2.val(); - n_acc.double_in_place(); - n_acc += b3.val(); - n_acc.double_in_place(); - n_acc += b4.val(); - n_acc - }); - } - - // TODO: use a generic gate with zero coeffs - self.gate(GateSpec { - typ: GateType::Zero, - row: vec![ - None, - None, - None, - None, - Some(acc.0), - Some(acc.1), - Some(scalar), - None, - None, - None, - None, - None, - None, - None, - None, - ], - coeffs: vec![], - }); - acc - } - - /// Checks that a string of bits (with LSB first) correspond to the value inside variable `x`. - /// It splits the bitstring across rows, where each row takes care of 8 crumbs of 2 bits each. - /// - fn assert_pack(&mut self, zero: Var, x: Var, bits_lsb: &[Var]) { - let crumbs_per_row = 8; - let bits_per_row = 2 * crumbs_per_row; - assert_eq!(bits_lsb.len() % bits_per_row, 0); - let num_rows = bits_lsb.len() / bits_per_row; - - // Reverse string of bits to have MSB first in the vector - let bits_msb: Vec<_> = bits_lsb.iter().rev().collect(); - - let mut a = self.var(|| F::from(2u64)); - let mut b = self.var(|| F::from(2u64)); - let mut n = zero; - - let one = F::one(); - let neg_one = -one; - - // For each of the chunks, get the corresponding bits - for (i, row_bits) in bits_msb[..].chunks(bits_per_row).enumerate() { - let mut row: [Var; COLUMNS] = array::from_fn(|_| self.var(|| F::zero())); - row[0] = n; - row[2] = a; - row[3] = b; - - // For this row, get crumbs of 2 bits each - for (j, crumb_bits) in row_bits.chunks(2).enumerate() { - // Remember the MSB of each crumb is in the 0 index - let b0 = crumb_bits[1]; // less valued - let b1 = crumb_bits[0]; // more valued - - // Value of the 2-bit crumb in MSB - let crumb = self.var(|| b0.val() + b1.val().double()); - // Stores the 8 of them in positions [6..13] of the row - row[6 + j] = crumb; - - a = self.var(|| { - let x = a.val().double(); - if b1.val().is_zero() { - x - } else { - x + if b0.val().is_one() { one } else { neg_one } - } - }); - - b = self.var(|| { - let x = b.val().double(); - if b1.val().is_zero() { - x + if b0.val().is_one() { one } else { neg_one } - } else { - x - } - }); - - // Accumulated chunk value - n = self.var(|| n.val().double().double() + crumb.val()); - } - - // In final row, this is the input value, otherwise the accumulated value - row[1] = if i == num_rows - 1 { x } else { n }; - row[4] = a; - row[5] = b; - - row[14] = self.var(|| F::zero()); - } - } - - /// Creates a Poseidon gadget for given constants and a given input. - /// It generates a number of `Poseidon` gates followed by a final `Zero` gate. - fn poseidon(&mut self, constants: &Constants, input: Vec>) -> Vec> { - use kimchi::circuits::polynomials::poseidon::{POS_ROWS_PER_HASH, ROUNDS_PER_ROW}; - - let params = constants.poseidon; - let rc = ¶ms.round_constants; - let width = PlonkSpongeConstantsKimchi::SPONGE_WIDTH; - - let mut states = vec![input]; - - for row in 0..POS_ROWS_PER_HASH { - let offset = row * ROUNDS_PER_ROW; - - for i in 0..ROUNDS_PER_ROW { - let mut s: Option> = None; - states.push( - (0..3) - .map(|col| { - self.var(|| { - match &s { - Some(s) => s[col], - None => { - // Do one full round on the previous value - let mut acc = states[states.len() - 1] - .iter() - .map(|x| x.val()) - .collect(); - full_round::( - params, - &mut acc, - offset + i, - ); - let res = acc[col]; - s = Some(acc); - res - } - } - }) - }) - .collect(), - ); - } - - self.gate(GateSpec { - typ: kimchi::circuits::gate::GateType::Poseidon, - coeffs: (0..COLUMNS) - .map(|i| rc[offset + (i / width)][i % width]) - .collect(), - row: vec![ - Some(states[offset][0]), - Some(states[offset][1]), - Some(states[offset][2]), - Some(states[offset + 4][0]), - Some(states[offset + 4][1]), - Some(states[offset + 4][2]), - Some(states[offset + 1][0]), - Some(states[offset + 1][1]), - Some(states[offset + 1][2]), - Some(states[offset + 2][0]), - Some(states[offset + 2][1]), - Some(states[offset + 2][2]), - Some(states[offset + 3][0]), - Some(states[offset + 3][1]), - Some(states[offset + 3][2]), - ], - }); - } - - let final_state = &states[states.len() - 1]; - let final_row = vec![ - Some(final_state[0]), - Some(final_state[1]), - Some(final_state[2]), - ]; - self.gate(GateSpec { - typ: kimchi::circuits::gate::GateType::Zero, - coeffs: vec![], - row: final_row, - }); - - states.pop().unwrap() - } -} - -impl Cs for WitnessGenerator { - /// Creates a variable with value given by a function `g` with index `0` - fn var(&mut self, g: G) -> Var - where - G: FnOnce() -> F, - { - Var { - index: 0, - value: Some(g()), - } - } - - /// Returns the number of rows. - fn curr_gate_count(&self) -> usize { - self.rows.len() - } - - /// Pushes a new row corresponding to the values in the row of gate `g`. - fn gate(&mut self, g: GateSpec) { - assert!(g.row.len() <= COLUMNS); - - let row: [F; COLUMNS] = array::from_fn(|col| g.get_var_val_or(col, F::zero())); - self.rows.push(row); - } - - fn generic_queue(&mut self, gate: GateSpec) -> Option> { - if let Some(mut other) = self.generic_gate_queue.pop() { - other.row.extend(&gate.row); - assert_eq!(other.row.len(), DOUBLE_GENERIC_REGISTERS); - Some(other) - } else { - self.generic_gate_queue.push(gate); - None - } - } - - fn cached_constants(&mut self, x: F) -> Var { - match self.cached_constants.get(&x) { - Some(var) => *var, - None => { - let var = self.var(|| x); - self.cached_constants.insert(x, var); - var - } - } - } -} - -impl WitnessGenerator { - /// Returns the columns of the witness. - pub fn columns(&mut self) -> [Vec; COLUMNS] { - // flush any queued generic gate - if let Some(gate) = self.generic_gate_queue.pop() { - self.gate(gate); - } - - // transpose - array::from_fn(|col| self.rows.iter().map(|row| row[col]).collect()) - } -} - -impl Cs for System { - fn var(&mut self, _: V) -> Var { - let v = self.next_variable; - self.next_variable += 1; - Var { - index: v, - value: None, - } - } - - /// Outputs the number of gates in the circuit - fn curr_gate_count(&self) -> usize { - self.gates.len() - } - - fn gate(&mut self, g: GateSpec) { - self.gates.push(g); - } - - fn generic_queue(&mut self, gate: GateSpec) -> Option> { - if let Some(mut other) = self.generic_gate_queue.pop() { - other.row.extend(&gate.row); - assert_eq!(other.row.len(), DOUBLE_GENERIC_REGISTERS); - other.coeffs.extend(&gate.coeffs); - assert_eq!(other.coeffs.len(), DOUBLE_GENERIC_COEFFS); - Some(other) - } else { - self.generic_gate_queue.push(gate); - None - } - } - - fn cached_constants(&mut self, x: F) -> Var { - match self.cached_constants.get(&x) { - Some(var) => *var, - None => { - let var = self.var(|| x); - self.cached_constants.insert(x, var); - var - } - } - } -} - -impl System { - /// Compiles our intermediate representation into a circuit. - /// - /// # Panics - /// - /// Will not panic ever since it is permutation inside gates - pub fn gates(&mut self) -> Vec> { - let mut first_cell: HashMap = HashMap::new(); - let mut most_recent_cell: HashMap = HashMap::new(); - let mut gates = vec![]; - - // flush any queued generic gate - if let Some(gate) = self.generic_gate_queue.pop() { - self.gate(gate); - } - - // convert GateSpec into CircuitGate - for (row, gate) in self.gates.iter().enumerate() { - // while tracking the wiring - let wires = array::from_fn(|col| { - let curr = Wire { row, col }; - - if let Some(index) = gate.get_var_idx(col) { - // wire this cell to the previous one - match most_recent_cell.insert(index, curr) { - Some(w) => w, - // unless it is the first cell, - // in which case we just save it for the very end - // (to complete the cycle) - None => { - first_cell.insert(index, curr); - curr - } - } - } else { - // if no var to be found, it's a cell wired to itself - curr - } - }); - - let g = CircuitGate::new(gate.typ, wires, gate.coeffs.clone()); - gates.push(g); - } - - // finish the permutation cycle - for (var, first) in &first_cell { - let last = *most_recent_cell.get(var).unwrap(); - gates[first.row].wires[first.col] = last; - } - - gates - } -} diff --git a/folding/src/instance_witness.rs b/folding/src/instance_witness.rs index 52626a84c0..28defedcd2 100644 --- a/folding/src/instance_witness.rs +++ b/folding/src/instance_witness.rs @@ -1,7 +1,7 @@ //! This module defines a list of traits and structures that are used by the //! folding scheme. //! The folding library is built over generic traits like [Instance] and -//! [Witness] that defines the the NP relation R. +//! [Witness] that defines the NP relation R. //! //! This module describes 3 different types of instance/witness pairs: //! - [Instance] and [Witness]: the original instance and witness. These are the diff --git a/ivc/src/lib.rs b/ivc/src/lib.rs index 1037832192..1827883c83 100644 --- a/ivc/src/lib.rs +++ b/ivc/src/lib.rs @@ -58,7 +58,7 @@ //! env.assert_eq(env.output_left, env.hash_state[OutputCommitment]) //! ``` //! -//! The order of the execution is encoded in the fact the the hash contains the +//! The order of the execution is encoded in the fact the hash contains the //! step `i` when we check the left input and `i + 1` when we compress the //! folded output. The fact that the prover encodes the computation on the same //! initial input is encoded by adding the initial value `z0` into the hash for diff --git a/kimchi/src/circuits/argument.rs b/kimchi/src/circuits/argument.rs index 92daa0f4df..3fa7e7a067 100644 --- a/kimchi/src/circuits/argument.rs +++ b/kimchi/src/circuits/argument.rs @@ -155,7 +155,7 @@ pub struct ArgumentData { pub challenges: BerkeleyChallenges, } -/// Witness data for a argument +/// Witness data for an argument pub struct ArgumentWitness { /// Witness for current row pub curr: [T; COLUMNS], diff --git a/kimchi/src/circuits/expr.rs b/kimchi/src/circuits/expr.rs index 8235109145..b299644ab2 100644 --- a/kimchi/src/circuits/expr.rs +++ b/kimchi/src/circuits/expr.rs @@ -3150,6 +3150,27 @@ where res.push(e); res } + + /// Converts the expression in OCaml code + pub fn ocaml_str(&self) -> String { + let mut env = HashMap::new(); + let e = self.ocaml(&mut env); + + let mut env: Vec<_> = env.into_iter().collect(); + // HashMap deliberately uses an unstable order; here we sort to ensure + // that the output is consistent when printing. + env.sort_by(|(x, _), (y, _)| x.cmp(y)); + + let mut res = String::new(); + for (k, v) in env { + let rhs = v.ocaml_str(); + let cached = format!("let {} = {rhs} in ", k.var_name()); + res.push_str(&cached); + } + + res.push_str(&e); + res + } } // diff --git a/kimchi/src/circuits/gate.rs b/kimchi/src/circuits/gate.rs index 69eb4c4894..d7308d723e 100644 --- a/kimchi/src/circuits/gate.rs +++ b/kimchi/src/circuits/gate.rs @@ -6,7 +6,7 @@ use crate::{ berkeley_columns::BerkeleyChallenges, constraints::ConstraintSystem, polynomials::{ - complete_add, endomul_scalar, endosclmul, foreign_field_add, foreign_field_mul, keccak, + complete_add, endomul_scalar, endosclmul, foreign_field_add, foreign_field_mul, poseidon, range_check, rot, turshi, varbasemul, xor, }, wires::*, @@ -108,8 +108,6 @@ pub enum GateType { // Gates for Keccak Xor16, Rot64, - KeccakRound, - KeccakSponge, } /// Gate error @@ -211,12 +209,6 @@ impl CircuitGate { Rot64 => self .verify_witness::(row, witness, &index.cs, public) .map_err(|e| e.to_string()), - KeccakRound => self - .verify_witness::(row, witness, &index.cs, public) - .map_err(|e| e.to_string()), - KeccakSponge => self - .verify_witness::(row, witness, &index.cs, public) - .map_err(|e| e.to_string()), } } @@ -319,12 +311,6 @@ impl CircuitGate { } GateType::Xor16 => xor::Xor16::constraint_checks(&env, &mut cache), GateType::Rot64 => rot::Rot64::constraint_checks(&env, &mut cache), - GateType::KeccakRound => { - keccak::circuitgates::KeccakRound::constraint_checks(&env, &mut cache) - } - GateType::KeccakSponge => { - keccak::circuitgates::KeccakSponge::constraint_checks(&env, &mut cache) - } }; // Check for failed constraints diff --git a/kimchi/src/circuits/polynomials/keccak/circuitgates.rs b/kimchi/src/circuits/polynomials/keccak/circuitgates.rs deleted file mode 100644 index b35b14333c..0000000000 --- a/kimchi/src/circuits/polynomials/keccak/circuitgates.rs +++ /dev/null @@ -1,324 +0,0 @@ -//! Keccak gadget -use crate::{ - auto_clone, auto_clone_array, - circuits::{ - argument::{Argument, ArgumentEnv, ArgumentType}, - berkeley_columns::BerkeleyChallengeTerm, - expr::{ - constraints::{boolean, ExprOps}, - Cache, - }, - gate::GateType, - polynomials::keccak::{constants::*, OFF}, - }, - grid, -}; -use ark_ff::PrimeField; -use std::marker::PhantomData; - -#[macro_export] -macro_rules! from_quarters { - ($quarters:ident, $x:ident) => { - $quarters($x, 0) - + T::two_pow(16) * $quarters($x, 1) - + T::two_pow(32) * $quarters($x, 2) - + T::two_pow(48) * $quarters($x, 3) - }; - ($quarters:ident, $y:ident, $x:ident) => { - $quarters($y, $x, 0) - + T::two_pow(16) * $quarters($y, $x, 1) - + T::two_pow(32) * $quarters($y, $x, 2) - + T::two_pow(48) * $quarters($y, $x, 3) - }; -} - -#[macro_export] -macro_rules! from_shifts { - ($shifts:ident, $i:ident) => { - $shifts($i) - + T::two_pow(1) * $shifts(100 + $i) - + T::two_pow(2) * $shifts(200 + $i) - + T::two_pow(3) * $shifts(300 + $i) - }; - ($shifts:ident, $x:ident, $q:ident) => { - $shifts(0, $x, $q) - + T::two_pow(1) * $shifts(1, $x, $q) - + T::two_pow(2) * $shifts(2, $x, $q) - + T::two_pow(3) * $shifts(3, $x, $q) - }; - ($shifts:ident, $y:ident, $x:ident, $q:ident) => { - $shifts(0, $y, $x, $q) - + T::two_pow(1) * $shifts(1, $y, $x, $q) - + T::two_pow(2) * $shifts(2, $y, $x, $q) - + T::two_pow(3) * $shifts(3, $y, $x, $q) - }; -} - -//~ -//~ | `KeccakRound` | [0...265) | [265...1165) | [1165...1965) | -//~ | ------------- | --------- | ------------ | ------------- | -//~ | Curr | theta | pirho | chi | -//~ -//~ | `KeccakRound` | [0...100) | -//~ | ------------- | --------- | -//~ | Next | iota | -//~ -//~ ----------------------------------------------------------------------------------------------------------------------------------------------------------------------- -//~ -//~ | Columns | [0...100) | [100...180) | [180...200) | [200...205) | [205...225) | [225...245) | [245...265) | -//~ | -------- | --------- | ----------- | ----------- | ----------- | ------------ | ------------ | ------------ | -//~ | theta | state_a | shifts_c | dense_c | quotient_c | remainder_c | dense_rot_c | expand_rot_c | -//~ -//~ | Columns | [265...665) | [665...765) | [765...865) | [865...965) | [965...1065) | [1065...1165) | -//~ | -------- | ----------- | ----------- | ------------ | ----------- | ------------ | ------------- | -//~ | pirho | shifts_e | dense_e | quotient_e | remainder_e | dense_rot_e | expand_rot_e | -//~ -//~ | Columns | [1165...1565) | [1565...1965) | -//~ | -------- | ------------- | ------------- | -//~ | chi | shifts_b | shifts_sum | -//~ -//~ | Columns | [0...4) | [4...100) | -//~ | -------- | ------- | --------- | -//~ | iota | g00 | rest_g | -//~ -#[derive(Default)] -pub struct KeccakRound(PhantomData); - -impl Argument for KeccakRound -where - F: PrimeField, -{ - const ARGUMENT_TYPE: ArgumentType = ArgumentType::Gate(GateType::KeccakRound); - const CONSTRAINTS: u32 = 389; - - // Constraints for one round of the Keccak permutation function - fn constraint_checks>( - env: &ArgumentEnv, - _cache: &mut Cache, - ) -> Vec { - let mut constraints = vec![]; - - // DEFINE ROUND CONSTANT - let rc = [env.coeff(0), env.coeff(1), env.coeff(2), env.coeff(3)]; - - // LOAD STATES FROM WITNESS LAYOUT - // THETA - let state_a = grid!( - 100, - env.witness_curr_chunk(THETA_STATE_A_OFF, THETA_SHIFTS_C_OFF) - ); - let shifts_c = grid!( - 80, - env.witness_curr_chunk(THETA_SHIFTS_C_OFF, THETA_DENSE_C_OFF) - ); - let dense_c = grid!( - 20, - env.witness_curr_chunk(THETA_DENSE_C_OFF, THETA_QUOTIENT_C_OFF) - ); - let quotient_c = grid!( - 5, - env.witness_curr_chunk(THETA_QUOTIENT_C_OFF, THETA_REMAINDER_C_OFF) - ); - let remainder_c = grid!( - 20, - env.witness_curr_chunk(THETA_REMAINDER_C_OFF, THETA_DENSE_ROT_C_OFF) - ); - let dense_rot_c = grid!( - 20, - env.witness_curr_chunk(THETA_DENSE_ROT_C_OFF, THETA_EXPAND_ROT_C_OFF) - ); - let expand_rot_c = grid!( - 20, - env.witness_curr_chunk(THETA_EXPAND_ROT_C_OFF, PIRHO_DENSE_E_OFF) - ); - // PI-RHO - let shifts_e = grid!( - 400, - env.witness_curr_chunk(PIRHO_SHIFTS_E_OFF, PIRHO_DENSE_E_OFF) - ); - let dense_e = grid!( - 100, - env.witness_curr_chunk(PIRHO_DENSE_E_OFF, PIRHO_QUOTIENT_E_OFF) - ); - let quotient_e = grid!( - 100, - env.witness_curr_chunk(PIRHO_QUOTIENT_E_OFF, PIRHO_REMAINDER_E_OFF) - ); - let remainder_e = grid!( - 100, - env.witness_curr_chunk(PIRHO_REMAINDER_E_OFF, PIRHO_DENSE_ROT_E_OFF) - ); - let dense_rot_e = grid!( - 100, - env.witness_curr_chunk(PIRHO_DENSE_ROT_E_OFF, PIRHO_EXPAND_ROT_E_OFF) - ); - let expand_rot_e = grid!( - 100, - env.witness_curr_chunk(PIRHO_EXPAND_ROT_E_OFF, CHI_SHIFTS_B_OFF) - ); - // CHI - let shifts_b = grid!( - 400, - env.witness_curr_chunk(CHI_SHIFTS_B_OFF, CHI_SHIFTS_SUM_OFF) - ); - let shifts_sum = grid!( - 400, - env.witness_curr_chunk(CHI_SHIFTS_SUM_OFF, IOTA_STATE_G_OFF) - ); - // IOTA - let state_g = grid!(100, env.witness_next_chunk(0, IOTA_STATE_G_LEN)); - - // Define vectors containing witness expressions which are not in the layout for efficiency - let mut state_c: Vec> = vec![vec![T::zero(); QUARTERS]; DIM]; - let mut state_d: Vec> = vec![vec![T::zero(); QUARTERS]; DIM]; - let mut state_e: Vec>> = vec![vec![vec![T::zero(); QUARTERS]; DIM]; DIM]; - let mut state_b: Vec>> = vec![vec![vec![T::zero(); QUARTERS]; DIM]; DIM]; - let mut state_f: Vec>> = vec![vec![vec![T::zero(); QUARTERS]; DIM]; DIM]; - - // STEP theta: 5 * ( 3 + 4 * 1 ) = 35 constraints - for x in 0..DIM { - let word_c = from_quarters!(dense_c, x); - let rem_c = from_quarters!(remainder_c, x); - let rot_c = from_quarters!(dense_rot_c, x); - - constraints - .push(word_c * T::two_pow(1) - (quotient_c(x) * T::two_pow(64) + rem_c.clone())); - constraints.push(rot_c - (quotient_c(x) + rem_c)); - constraints.push(boolean("ient_c(x))); - - for q in 0..QUARTERS { - state_c[x][q] = state_a(0, x, q) - + state_a(1, x, q) - + state_a(2, x, q) - + state_a(3, x, q) - + state_a(4, x, q); - constraints.push(state_c[x][q].clone() - from_shifts!(shifts_c, x, q)); - - state_d[x][q] = - shifts_c(0, (x + DIM - 1) % DIM, q) + expand_rot_c((x + 1) % DIM, q); - - for (y, column_e) in state_e.iter_mut().enumerate() { - column_e[x][q] = state_a(y, x, q) + state_d[x][q].clone(); - } - } - } // END theta - - // STEP pirho: 5 * 5 * (2 + 4 * 1) = 150 constraints - for (y, col) in OFF.iter().enumerate() { - for (x, off) in col.iter().enumerate() { - let word_e = from_quarters!(dense_e, y, x); - let quo_e = from_quarters!(quotient_e, y, x); - let rem_e = from_quarters!(remainder_e, y, x); - let rot_e = from_quarters!(dense_rot_e, y, x); - - constraints.push( - word_e * T::two_pow(*off) - (quo_e.clone() * T::two_pow(64) + rem_e.clone()), - ); - constraints.push(rot_e - (quo_e.clone() + rem_e)); - - for q in 0..QUARTERS { - constraints.push(state_e[y][x][q].clone() - from_shifts!(shifts_e, y, x, q)); - state_b[(2 * x + 3 * y) % DIM][y][q] = expand_rot_e(y, x, q); - } - } - } // END pirho - - // STEP chi: 4 * 5 * 5 * 2 = 200 constraints - for q in 0..QUARTERS { - for x in 0..DIM { - for y in 0..DIM { - let not = T::literal(F::from(0x1111111111111111u64)) - - shifts_b(0, y, (x + 1) % DIM, q); - let sum = not + shifts_b(0, y, (x + 2) % DIM, q); - let and = shifts_sum(1, y, x, q); - - constraints.push(state_b[y][x][q].clone() - from_shifts!(shifts_b, y, x, q)); - constraints.push(sum - from_shifts!(shifts_sum, y, x, q)); - state_f[y][x][q] = shifts_b(0, y, x, q) + and; - } - } - } // END chi - - // STEP iota: 4 constraints - for (q, c) in rc.iter().enumerate() { - constraints.push(state_g(0, 0, q) - (state_f[0][0][q].clone() + c.clone())); - } // END iota - - constraints - } -} - -//~ -//~ | `KeccakSponge` | [0...100) | [100...168) | [168...200) | [200...400] | [400...800) | -//~ | -------------- | --------- | ----------- | ----------- | ----------- | ----------- | -//~ | Curr | old_state | new_block | zeros | bytes | shifts | -//~ | Next | xor_state | -//~ -#[derive(Default)] -pub struct KeccakSponge(PhantomData); - -impl Argument for KeccakSponge -where - F: PrimeField, -{ - const ARGUMENT_TYPE: ArgumentType = ArgumentType::Gate(GateType::KeccakSponge); - const CONSTRAINTS: u32 = 532; - - // Constraints for the Keccak sponge - fn constraint_checks>( - env: &ArgumentEnv, - _cache: &mut Cache, - ) -> Vec { - let mut constraints = vec![]; - - // LOAD WITNESS - let old_state = env.witness_curr_chunk(SPONGE_OLD_STATE_OFF, SPONGE_NEW_STATE_OFF); - let new_state = env.witness_curr_chunk(SPONGE_NEW_STATE_OFF, SPONGE_BYTES_OFF); - let zeros = env.witness_curr_chunk(SPONGE_ZEROS_OFF, SPONGE_BYTES_OFF); - let xor_state = env.witness_next_chunk(0, SPONGE_XOR_STATE_LEN); - let bytes = env.witness_curr_chunk(SPONGE_BYTES_OFF, SPONGE_SHIFTS_OFF); - let shifts = - env.witness_curr_chunk(SPONGE_SHIFTS_OFF, SPONGE_SHIFTS_OFF + SPONGE_SHIFTS_LEN); - auto_clone_array!(old_state); - auto_clone_array!(new_state); - auto_clone_array!(xor_state); - auto_clone_array!(bytes); - auto_clone_array!(shifts); - - // LOAD COEFFICIENTS - let absorb = env.coeff(0); - let squeeze = env.coeff(1); - let root = env.coeff(2); - let flags = env.coeff_chunk(4, 140); - let pad = env.coeff_chunk(200, 336); - auto_clone!(root); - auto_clone!(absorb); - auto_clone!(squeeze); - auto_clone_array!(flags); - auto_clone_array!(pad); - - // 32 + 100 * 3 + 64 + 136 = 532 - for z in zeros { - // Absorb phase pads with zeros the new state - constraints.push(absorb() * z); - } - for i in 0..STATE_LEN { - // In first absorb, root state is all zeros - constraints.push(root() * old_state(i)); - // Absorbs the new block by performing XOR with the old state - constraints.push(absorb() * (xor_state(i) - (old_state(i) + new_state(i)))); - // In absorb, Check shifts correspond to the decomposition of the new state - constraints.push(absorb() * (new_state(i) - from_shifts!(shifts, i))); - } - for i in 0..64 { - // In squeeze, Check shifts correspond to the 256-bit prefix digest of the old state (current) - constraints.push(squeeze() * (old_state(i) - from_shifts!(shifts, i))); - } - for i in 0..RATE_IN_BYTES { - // Check padding - constraints.push(flags(i) * (pad(i) - bytes(i))); - } - - constraints - } -} diff --git a/kimchi/src/circuits/polynomials/keccak/gadget.rs b/kimchi/src/circuits/polynomials/keccak/gadget.rs deleted file mode 100644 index f204771fe9..0000000000 --- a/kimchi/src/circuits/polynomials/keccak/gadget.rs +++ /dev/null @@ -1,93 +0,0 @@ -//! Keccak gadget -use crate::circuits::{ - gate::{CircuitGate, GateType}, - wires::Wire, -}; -use ark_ff::PrimeField; - -use super::{ - constants::{RATE_IN_BYTES, ROUNDS}, - Keccak, RC, -}; - -const SPONGE_COEFFS: usize = 336; - -impl CircuitGate { - /// Extends a Keccak circuit to hash one message - /// Note: - /// Requires at least one more row after the Keccak gadget so that - /// constraints can access the next row in the squeeze - pub fn extend_keccak(circuit: &mut Vec, bytelength: usize) -> usize { - let mut gates = Self::create_keccak(circuit.len(), bytelength); - circuit.append(&mut gates); - circuit.len() - } - - /// Creates a Keccak256 circuit, capacity 512 bits, rate 1088 bits, message of a given bytelength - fn create_keccak(new_row: usize, bytelength: usize) -> Vec { - let padded_len = Keccak::padded_length(bytelength); - let extra_bytes = padded_len - bytelength; - let num_blocks = padded_len / RATE_IN_BYTES; - let mut gates = vec![]; - for block in 0..num_blocks { - let root = block == 0; - let pad = block == num_blocks - 1; - gates.push(Self::create_keccak_absorb( - new_row + gates.len(), - root, - pad, - extra_bytes, - )); - for round in 0..ROUNDS { - gates.push(Self::create_keccak_round(new_row + gates.len(), round)); - } - } - gates.push(Self::create_keccak_squeeze(new_row + gates.len())); - gates - } - - fn create_keccak_squeeze(new_row: usize) -> Self { - CircuitGate { - typ: GateType::KeccakSponge, - wires: Wire::for_row(new_row), - coeffs: { - let mut c = vec![F::zero(); SPONGE_COEFFS]; - c[1] = F::one(); - c - }, - } - } - - fn create_keccak_absorb(new_row: usize, root: bool, pad: bool, pad_bytes: usize) -> Self { - let mut coeffs = vec![F::zero(); SPONGE_COEFFS]; - coeffs[0] = F::one(); // absorb - if root { - coeffs[2] = F::one(); // root - } - if pad { - // Check pad 0x01 (0x00 ... 0x00)* 0x80 or 0x81 if only one byte for padding - for i in 0..pad_bytes { - coeffs[140 - i] = F::one(); // flag for padding - if i == 0 { - coeffs[SPONGE_COEFFS - 1 - i] += F::from(0x80u8); // pad - } - if i == pad_bytes - 1 { - coeffs[SPONGE_COEFFS - 1 - i] += F::one(); // pad - } - } - } - CircuitGate { - typ: GateType::KeccakSponge, - wires: Wire::for_row(new_row), - coeffs, - } - } - - fn create_keccak_round(new_row: usize, round: usize) -> Self { - CircuitGate { - typ: GateType::KeccakRound, - wires: Wire::for_row(new_row), - coeffs: Keccak::expand_word(RC[round]), - } - } -} diff --git a/kimchi/src/circuits/polynomials/keccak/mod.rs b/kimchi/src/circuits/polynomials/keccak/mod.rs index ec7a094c25..d35a0492dd 100644 --- a/kimchi/src/circuits/polynomials/keccak/mod.rs +++ b/kimchi/src/circuits/polynomials/keccak/mod.rs @@ -1,7 +1,5 @@ //! Keccak hash module -pub mod circuitgates; pub mod constants; -pub mod gadget; pub mod witness; use crate::circuits::expr::constraints::ExprOps; diff --git a/kimchi/src/tests/framework.rs b/kimchi/src/tests/framework.rs index eec339da2a..fe1e4d2558 100644 --- a/kimchi/src/tests/framework.rs +++ b/kimchi/src/tests/framework.rs @@ -317,7 +317,7 @@ where OpeningProof::SRS: Clone, VerifierIndex: Clone, { - /// Regression test: Create a proof and check that is is equal to + /// Regression test: Create a proof and check that is equal to /// the given serialized implementation (and that deserializes /// correctly). pub(crate) fn prove_and_check_serialization_regression< diff --git a/kimchi/src/tests/keccak.rs b/kimchi/src/tests/keccak.rs index 80565f59d7..0cd25d6289 100644 --- a/kimchi/src/tests/keccak.rs +++ b/kimchi/src/tests/keccak.rs @@ -1,11 +1,8 @@ use std::array; use crate::{ - circuits::{ - constraints::ConstraintSystem, - gate::{CircuitGate, GateType}, - polynomials::keccak::{constants::KECCAK_COLS, witness::extend_keccak_witness, Keccak}, - wires::Wire, + circuits::polynomials::keccak::{ + constants::KECCAK_COLS, witness::extend_keccak_witness, Keccak, }, curve::KimchiCurve, }; @@ -14,24 +11,6 @@ use mina_curves::pasta::Pallas; use num_bigint::BigUint; use o1_utils::{BigUintHelpers, FieldHelpers}; -fn create_test_constraint_system( - bytelength: usize, -) -> ConstraintSystem -where - G::BaseField: PrimeField, -{ - let mut gates = vec![]; - let next_row = CircuitGate::extend_keccak(&mut gates, bytelength); - // Adding dummy row to avoid out of bounds in squeeze constraints accessing Next row - gates.push(CircuitGate { - typ: GateType::Zero, - wires: Wire::for_row(next_row), - coeffs: vec![], - }); - - ConstraintSystem::create(gates).build().unwrap() -} - fn create_keccak_witness(message: BigUint) -> [Vec; KECCAK_COLS] where G::BaseField: PrimeField, @@ -88,13 +67,6 @@ fn setup_keccak_test(message: BigUint) -> BigUint where G::BaseField: PrimeField, { - let bytelength = message.to_bytes_be().len(); - let padded_len = { - let mut sized = message.to_bytes_be(); - sized.resize(bytelength - sized.len(), 0); - Keccak::pad(&sized).len() - }; - let _index = create_test_constraint_system::(padded_len); let witness = create_keccak_witness::(message); for r in 1..=24 { diff --git a/kimchi/src/verifier.rs b/kimchi/src/verifier.rs index 09369e56ab..552e3ed373 100644 --- a/kimchi/src/verifier.rs +++ b/kimchi/src/verifier.rs @@ -87,8 +87,6 @@ impl<'a, G: KimchiCurve, OpeningProof: OpenProof> Context<'a, G, OpeningProof ForeignFieldMul => Some(self.verifier_index.foreign_field_mul_comm.as_ref()?), Xor16 => Some(self.verifier_index.xor_comm.as_ref()?), Rot64 => Some(self.verifier_index.rot_comm.as_ref()?), - KeccakRound => todo!(), - KeccakSponge => todo!(), } } } diff --git a/mvpoly/src/lib.rs b/mvpoly/src/lib.rs index 1b4683948a..789caaf57c 100644 --- a/mvpoly/src/lib.rs +++ b/mvpoly/src/lib.rs @@ -156,7 +156,7 @@ pub trait MVPoly: /// /// The parameter `offset_next_row` is an optional argument that is used to /// support the case where the "next row" is used. In this case, the type - /// parameter `N` must include this offset (i.e. if 4 variables are in ued, + /// parameter `N` must include this offset (i.e. if 4 variables are in used, /// N should be at least `8 = 2 * 4`). fn from_expr, ChallengeTerm: Clone>(expr: Expr, Column>, offset_next_row: Option) -> Self { use kimchi::circuits::expr::Operations::*; @@ -258,7 +258,7 @@ pub trait MVPoly: /// scalar. /// /// More explicitly, given a polynomial `P(X1, ..., Xn)` and a scalar α, the - /// method computes the the cross-terms of the polynomial `Q(X1, ..., Xn, α) + /// method computes the cross-terms of the polynomial `Q(X1, ..., Xn, α) /// = α * P(X1, ..., Xn)`. For this reason, the method takes as input the /// two different scalars `scalar1` and `scalar2` as we are considering the /// scaling factor as a variable. diff --git a/mvpoly/src/monomials.rs b/mvpoly/src/monomials.rs index 828d1b7ea1..1d161cf75e 100644 --- a/mvpoly/src/monomials.rs +++ b/mvpoly/src/monomials.rs @@ -342,6 +342,10 @@ impl MVPoly for Sparse() -> Vec { } /// Compute all the possible two factors decomposition of a number n. -/// It uses an cache where previous values have been computed. +/// It uses a cache where previous values have been computed. /// For instance, if n = 6, the function will return [(1, 6), (2, 3), (3, 2), (6, 1)]. /// The cache might be used to store the results of previous computations. /// The cache is a hashmap where the key is the number and the value is the diff --git a/o1vm/src/interpreters/mips/interpreter.rs b/o1vm/src/interpreters/mips/interpreter.rs index 8604af7986..326e2e10a4 100644 --- a/o1vm/src/interpreters/mips/interpreter.rs +++ b/o1vm/src/interpreters/mips/interpreter.rs @@ -31,6 +31,8 @@ pub enum Instruction { RType(RTypeInstruction), JType(JTypeInstruction), IType(ITypeInstruction), + // A no-op operation that should only be used for testing. The semantic is + // not clearly defined. NoOp, } @@ -976,6 +978,9 @@ pub fn interpret_instruction(env: &mut Env, instr: Instruct } } +// FIXME: the noop should not be used in production. The interpreter semantic +// should be refined. The padding is only for testing purposes when padding is +// required to reach the size of the domain. pub fn interpret_noop(env: &mut Env) { let instruction_pointer = env.get_instruction_pointer(); let instruction = { @@ -993,7 +998,6 @@ pub fn interpret_noop(env: &mut Env) { unsafe { env.bitmask(&instruction, 32, 26, pos) } }; - env.range_check8(&opcode, 6); env.assert_is_zero(opcode); let next_instruction_pointer = env.get_next_instruction_pointer(); env.set_instruction_pointer(next_instruction_pointer.clone()); diff --git a/saffron/.gitignore b/saffron/.gitignore new file mode 100644 index 0000000000..402fd5d873 --- /dev/null +++ b/saffron/.gitignore @@ -0,0 +1 @@ +proptest-regressions diff --git a/circuit-construction/Cargo.toml b/saffron/Cargo.toml similarity index 52% rename from circuit-construction/Cargo.toml rename to saffron/Cargo.toml index fdd047984b..79703b4361 100644 --- a/circuit-construction/Cargo.toml +++ b/saffron/Cargo.toml @@ -1,47 +1,46 @@ [package] -name = "circuit-construction" +name = "saffron" version = "0.1.0" -description = "A simple circuit writer for kimchi" +description = "A mutable state layer" repository = "https://github.com/o1-labs/proof-systems" -edition = "2021" -license = "Apache-2.0" homepage = "https://o1-labs.github.io/proof-systems/" documentation = "https://o1-labs.github.io/proof-systems/rustdoc/" -readme = "../README.md" +readme = "README.md" +edition = "2021" +license = "Apache-2.0" + +# [lib] +# path = "src/lib.rs" -[lib] -path = "src/lib.rs" -bench = false # needed for criterion (https://bheisler.github.io/criterion.rs/book/faq.html#cargo-bench-gives-unrecognized-option-errors-for-valid-command-line-options) +[[bin]] +name = "saffron" +path = "src/main.rs" [dependencies] -ark-ff.workspace = true +anyhow = "1.0" ark-ec.workspace = true +ark-ff.workspace = true ark-poly.workspace = true -ark-serialize.workspace = true -blake2.workspace = true -num-derive.workspace = true -num-traits.workspace = true -itertools.workspace = true +ark-serialize = { workspace = true, features = ["derive"]} +clap = { workspace = true, features = ["derive"] } +hex.workspace = true +kimchi.workspace = true +mina-curves.workspace = true +mina-poseidon.workspace = true +o1-utils.workspace = true +poly-commitment.workspace = true rand.workspace = true -rand_core.workspace = true rayon.workspace = true rmp-serde.workspace = true serde.workspace = true serde_with.workspace = true thiserror.workspace = true - -poly-commitment.workspace = true -groupmap.workspace = true -mina-curves.workspace = true -o1-utils.workspace = true -mina-poseidon.workspace = true -kimchi.workspace = true +time = { version = "0.3", features = ["macros"] } +tracing = "0.1" +tracing-subscriber = { version = "0.3", features = [ "ansi", "env-filter", "fmt", "time" ] } [dev-dependencies] +ark-std.workspace = true +ctor = "0.2" proptest.workspace = true -proptest-derive.workspace = true -colored.workspace = true - -# benchmarks -criterion.workspace = true -iai.workspace = true +once_cell.workspace = true diff --git a/saffron/fixtures/lorem.txt b/saffron/fixtures/lorem.txt new file mode 100644 index 0000000000..1b376877f4 --- /dev/null +++ b/saffron/fixtures/lorem.txt @@ -0,0 +1 @@ +Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. diff --git a/saffron/src/blob.rs b/saffron/src/blob.rs new file mode 100644 index 0000000000..f051cc2bb1 --- /dev/null +++ b/saffron/src/blob.rs @@ -0,0 +1,164 @@ +use crate::{ + commitment::fold_commitments, + utils::{decode_into, encode_for_domain}, +}; +use ark_ff::PrimeField; +use ark_poly::{univariate::DensePolynomial, EvaluationDomain, Evaluations}; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; +use kimchi::curve::KimchiCurve; +use mina_poseidon::FqSponge; +use o1_utils::FieldHelpers; +use poly_commitment::{commitment::CommitmentCurve, ipa::SRS, PolyComm, SRS as _}; +use rayon::prelude::*; +use serde::{Deserialize, Serialize}; +use serde_with::serde_as; +use tracing::{debug, debug_span, instrument}; + +// A FieldBlob represents the encoding of a Vec as a list of polynomials over F, +// where F is a prime field. The polyonomials are represented in the monomial basis. +#[serde_as] +#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)] +#[serde(bound = "G::ScalarField : CanonicalDeserialize + CanonicalSerialize")] +pub struct FieldBlob { + pub n_bytes: usize, + pub domain_size: usize, + pub commitments: Vec>, + pub folded_commitment: PolyComm, + #[serde_as(as = "o1_utils::serialization::SerdeAs")] + pub alpha: G::ScalarField, + #[serde_as(as = "Vec")] + pub data: Vec>, +} + +#[instrument(skip_all, level = "debug")] +fn commit_to_blob_data( + srs: &SRS, + data: &[DensePolynomial], +) -> Vec> { + let num_chunks = 1; + data.par_iter() + .map(|p| srs.commit_non_hiding(p, num_chunks)) + .collect() +} + +impl FieldBlob { + #[instrument(skip_all, level = "debug")] + pub fn encode< + D: EvaluationDomain, + EFqSponge: Clone + FqSponge, + >( + srs: &SRS, + domain: D, + bytes: &[u8], + ) -> FieldBlob { + let field_elements = encode_for_domain(&domain, bytes); + let domain_size = domain.size(); + + let data: Vec> = debug_span!("fft").in_scope(|| { + field_elements + .par_iter() + .map(|chunk| Evaluations::from_vec_and_domain(chunk.to_vec(), domain).interpolate()) + .collect() + }); + + let commitments = commit_to_blob_data(srs, &data); + + let (folded_commitment, alpha) = { + let mut sponge = EFqSponge::new(G::other_curve_sponge_params()); + fold_commitments(&mut sponge, &commitments) + }; + + debug!( + "Encoded {:.2} MB into {} polynomials", + bytes.len() as f32 / 1_000_000.0, + data.len() + ); + + FieldBlob { + n_bytes: bytes.len(), + domain_size, + commitments, + folded_commitment, + alpha, + data, + } + } + + #[instrument(skip_all, level = "debug")] + pub fn decode>(domain: D, blob: FieldBlob) -> Vec { + // TODO: find an Error type and use Result + if domain.size() != blob.domain_size { + panic!( + "Domain size mismatch, got {}, expected {}", + blob.domain_size, + domain.size() + ); + } + let n = (G::ScalarField::MODULUS_BIT_SIZE / 8) as usize; + let m = G::ScalarField::size_in_bytes(); + let mut bytes = Vec::with_capacity(blob.n_bytes); + let mut buffer = vec![0u8; m]; + + for p in blob.data { + let evals = p.evaluate_over_domain(domain).evals; + for x in evals { + decode_into(&mut buffer, x); + bytes.extend_from_slice(&buffer[(m - n)..m]); + } + } + + bytes.truncate(blob.n_bytes); + bytes + } +} + +#[cfg(test)] +mod tests { + use crate::{commitment::commit_to_field_elems, env}; + + use super::*; + use crate::utils::test_utils::*; + use ark_poly::Radix2EvaluationDomain; + use mina_curves::pasta::{Fp, Vesta, VestaParameters}; + use mina_poseidon::{constants::PlonkSpongeConstantsKimchi, sponge::DefaultFqSponge}; + use once_cell::sync::Lazy; + use proptest::prelude::*; + + static SRS: Lazy> = Lazy::new(|| { + if let Ok(srs) = std::env::var("SRS_FILEPATH") { + env::get_srs_from_cache(srs) + } else { + SRS::create(1 << 16) + } + }); + + static DOMAIN: Lazy> = + Lazy::new(|| Radix2EvaluationDomain::new(SRS.size()).unwrap()); + + // check that Vec -> FieldBlob -> Vec is the identity function + proptest! { + #![proptest_config(ProptestConfig::with_cases(20))] + #[test] + fn test_round_trip_blob_encoding(UserData(xs) in UserData::arbitrary()) + { let blob = FieldBlob::::encode::<_, DefaultFqSponge>(&*SRS, *DOMAIN, &xs); + let bytes = rmp_serde::to_vec(&blob).unwrap(); + let a = rmp_serde::from_slice(&bytes).unwrap(); + // check that ark-serialize is behaving as expected + prop_assert_eq!(blob.clone(), a); + let ys = FieldBlob::::decode(*DOMAIN, blob); + // check that we get the byte blob back again + prop_assert_eq!(xs,ys); + } + } + + proptest! { + #![proptest_config(ProptestConfig::with_cases(10))] + #[test] + fn test_user_and_storage_provider_commitments_equal(UserData(xs) in UserData::arbitrary()) + { let elems = encode_for_domain(&*DOMAIN, &xs); + let user_commitments = commit_to_field_elems(&*SRS, *DOMAIN, elems); + let blob = FieldBlob::::encode::<_, DefaultFqSponge>(&*SRS, *DOMAIN, &xs); + prop_assert_eq!(user_commitments, blob.commitments); + } + } +} diff --git a/saffron/src/cli.rs b/saffron/src/cli.rs new file mode 100644 index 0000000000..8cae3445ef --- /dev/null +++ b/saffron/src/cli.rs @@ -0,0 +1,134 @@ +use clap::{arg, Parser}; +use std::{fmt::Display, str::FromStr}; + +#[derive(Debug, Clone)] +pub struct HexString(pub Vec); + +impl FromStr for HexString { + type Err = hex::FromHexError; + + fn from_str(s: &str) -> Result { + let stripped = s.strip_prefix("0x").unwrap_or(s); + Ok(HexString(hex::decode(stripped)?)) + } +} + +impl Display for HexString { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "0x{}", hex::encode(&self.0)) + } +} + +#[derive(Parser)] +pub struct EncodeFileArgs { + #[arg(long, short = 'i', value_name = "FILE", help = "input file")] + pub input: String, + + #[arg( + long, + short = 'o', + value_name = "FILE", + help = "output file (encoded as field elements)" + )] + pub output: String, + + #[arg(long = "srs-filepath", value_name = "SRS_FILEPATH")] + pub srs_cache: Option, + + #[arg( + long = "assert-commitment", + value_name = "COMMITMENT", + help = "hash of commitments (hex encoded)" + )] + pub assert_commitment: Option, +} + +#[derive(Parser)] +pub struct DecodeFileArgs { + #[arg( + long, + short = 'i', + value_name = "FILE", + help = "input file (encoded as field elements)" + )] + pub input: String, + + #[arg(long, short = 'o', value_name = "FILE", help = "output file")] + pub output: String, + + #[arg(long = "srs-filepath", value_name = "SRS_FILEPATH")] + pub srs_cache: Option, +} + +#[derive(Parser)] +pub struct ComputeCommitmentArgs { + #[arg(long, short = 'i', value_name = "FILE", help = "input file")] + pub input: String, + + #[arg(long = "srs-filepath", value_name = "SRS_FILEPATH")] + pub srs_cache: Option, +} + +#[derive(Parser)] +pub struct StorageProofArgs { + #[arg( + long, + short = 'i', + value_name = "FILE", + help = "input file (encoded as field elements)" + )] + pub input: String, + + #[arg(long = "srs-filepath", value_name = "SRS_FILEPATH")] + pub srs_cache: Option, + + #[arg( + long = "challenge", + value_name = "CHALLENGE", + help = "challenge (hex encoded" + )] + pub challenge: HexString, +} + +#[derive(Parser)] +pub struct VerifyStorageProofArgs { + #[arg(long = "srs-filepath", value_name = "SRS_FILEPATH")] + pub srs_cache: Option, + + #[arg( + long, + short = 'c', + value_name = "COMMITMENT", + help = "commitment (hex encoded)" + )] + pub commitment: HexString, + + #[arg( + long = "challenge", + value_name = "CHALLENGE", + help = "challenge (hex encoded" + )] + pub challenge: HexString, + + #[arg(long, short = 'p', value_name = "PROOF", help = "proof (hex encoded)")] + pub proof: HexString, +} + +#[derive(Parser)] +#[command( + name = "saffron", + version = "0.1", + about = "saffron - a mutable state layer" +)] +pub enum Commands { + #[command(name = "encode")] + Encode(EncodeFileArgs), + #[command(name = "decode")] + Decode(DecodeFileArgs), + #[command(name = "compute-commitment")] + ComputeCommitment(ComputeCommitmentArgs), + #[command(name = "storage-proof")] + StorageProof(StorageProofArgs), + #[command(name = "verify-storage-proof")] + VerifyStorageProof(VerifyStorageProofArgs), +} diff --git a/saffron/src/commitment.rs b/saffron/src/commitment.rs new file mode 100644 index 0000000000..471103c767 --- /dev/null +++ b/saffron/src/commitment.rs @@ -0,0 +1,69 @@ +use ark_ec::AffineRepr; +use ark_ff::One; +use ark_poly::{Evaluations, Radix2EvaluationDomain as D}; +use kimchi::curve::KimchiCurve; +use mina_poseidon::FqSponge; +use poly_commitment::{ + commitment::{absorb_commitment, CommitmentCurve}, + ipa::SRS, + PolyComm, SRS as _, +}; +use rayon::prelude::*; +use tracing::instrument; + +#[instrument(skip_all, level = "debug")] +pub fn commit_to_field_elems( + srs: &SRS, + domain: D, + field_elems: Vec>, +) -> Vec> { + field_elems + .par_iter() + .map(|chunk| { + let evals = Evaluations::from_vec_and_domain(chunk.to_vec(), domain); + srs.commit_evaluations_non_hiding(domain, &evals) + }) + .collect() +} + +#[instrument(skip_all, level = "debug")] +pub fn fold_commitments< + G: AffineRepr, + EFqSponge: Clone + FqSponge, +>( + sponge: &mut EFqSponge, + commitments: &[PolyComm], +) -> (PolyComm, G::ScalarField) { + for commitment in commitments { + absorb_commitment(sponge, commitment) + } + let alpha = sponge.challenge(); + let powers: Vec = commitments + .iter() + .scan(G::ScalarField::one(), |acc, _| { + let res = *acc; + *acc *= alpha; + Some(res) + }) + .collect::>(); + ( + PolyComm::multi_scalar_mul(&commitments.iter().collect::>(), &powers), + alpha, + ) +} + +pub fn user_commitment< + G: KimchiCurve, + EFqSponge: Clone + FqSponge, +>( + srs: &SRS, + domain: D, + field_elems: Vec>, +) -> PolyComm { + let commitments = commit_to_field_elems(srs, domain, field_elems); + let (commitment, _) = { + let mut sponge = EFqSponge::new(G::other_curve_sponge_params()); + fold_commitments(&mut sponge, &commitments) + }; + commitment +} diff --git a/saffron/src/env.rs b/saffron/src/env.rs new file mode 100644 index 0000000000..c151c6cb73 --- /dev/null +++ b/saffron/src/env.rs @@ -0,0 +1,58 @@ +use std::{fs::File, path::Path}; + +use kimchi::precomputed_srs::TestSRS; +use poly_commitment::{commitment::CommitmentCurve, ipa::SRS}; +use time::macros::format_description; +use tracing::debug; +use tracing_subscriber::{ + fmt::{format::FmtSpan, time::UtcTime}, + EnvFilter, +}; + +pub fn get_srs_from_cache(cache: String) -> SRS { + debug!("Loading SRS from cache {}", cache); + let file_path = Path::new(&cache); + let file = File::open(file_path).expect("Error opening SRS cache file"); + let srs: SRS = { + // By convention, proof systems serializes a TestSRS with filename 'test_.srs'. + // The benefit of using this is you don't waste time verifying the SRS. + if file_path + .file_name() + .unwrap() + .to_str() + .unwrap() + .starts_with("test_") + { + let test_srs: TestSRS = rmp_serde::from_read(&file).unwrap(); + From::from(test_srs) + } else { + rmp_serde::from_read(&file).unwrap() + } + }; + debug!("SRS loaded successfully from cache"); + srs +} + +pub fn init_console_subscriber() { + let timer = UtcTime::new(format_description!( + "[year]-[month]-[day]T[hour repr:24]:[minute]:[second].[subsecond digits:3]Z" + )); + tracing_subscriber::fmt() + .with_env_filter(EnvFilter::from_default_env()) + .with_span_events(FmtSpan::CLOSE) + .with_timer(timer) + .with_target(true) + .with_thread_ids(false) + .with_line_number(false) + .with_file(false) + .with_level(true) + .with_ansi(true) + .with_writer(std::io::stdout) + .init(); +} + +#[cfg(test)] +#[ctor::ctor] +fn init_test_logging() { + init_console_subscriber(); +} diff --git a/saffron/src/lib.rs b/saffron/src/lib.rs new file mode 100644 index 0000000000..dbfe03d318 --- /dev/null +++ b/saffron/src/lib.rs @@ -0,0 +1,6 @@ +pub mod blob; +pub mod cli; +pub mod commitment; +pub mod env; +pub mod proof; +pub mod utils; diff --git a/saffron/src/main.rs b/saffron/src/main.rs new file mode 100644 index 0000000000..9273f3c7df --- /dev/null +++ b/saffron/src/main.rs @@ -0,0 +1,161 @@ +use anyhow::Result; +use ark_poly::{EvaluationDomain, Radix2EvaluationDomain}; +use clap::Parser; +use kimchi::groupmap::GroupMap; +use mina_curves::pasta::{Fp, Vesta, VestaParameters}; +use mina_poseidon::{constants::PlonkSpongeConstantsKimchi, sponge::DefaultFqSponge}; +use poly_commitment::{commitment::CommitmentCurve, ipa::SRS, SRS as _}; +use rand::rngs::OsRng; +use saffron::{ + blob::FieldBlob, + cli::{self, HexString}, + commitment::user_commitment, + env, + proof::{self, StorageProof}, + utils, +}; +use std::{ + fs::File, + io::{Read, Write}, +}; +use tracing::{debug, debug_span}; + +pub const DEFAULT_SRS_SIZE: usize = 1 << 16; + +type FqSponge = DefaultFqSponge; + +fn get_srs(cache: Option) -> (SRS, Radix2EvaluationDomain) { + let res = match cache { + Some(cache) => { + let srs = env::get_srs_from_cache(cache); + let domain_fp = Radix2EvaluationDomain::new(srs.size()).unwrap(); + (srs, domain_fp) + } + None => { + debug!( + "No SRS cache provided. Creating SRS from scratch with domain size {}", + DEFAULT_SRS_SIZE + ); + let domain_size = DEFAULT_SRS_SIZE; + let srs = SRS::create(domain_size); + let domain_fp = Radix2EvaluationDomain::new(srs.size()).unwrap(); + debug!("SRS created successfully"); + (srs, domain_fp) + } + }; + + debug_span!("get_lagrange_basis", basis_size = res.0.size()).in_scope(|| { + res.0.get_lagrange_basis(res.1); + }); + + res +} + +fn decode_file(args: cli::DecodeFileArgs) -> Result<()> { + let (_, domain) = get_srs(args.srs_cache); + debug!( + domain_size = domain.size(), + input_file = args.input, + "Decoding file" + ); + let file = File::open(args.input)?; + let blob: FieldBlob = rmp_serde::decode::from_read(file)?; + let data = FieldBlob::::decode(domain, blob); + debug!(output_file = args.output, "Writing decoded blob to file"); + let mut writer = File::create(args.output)?; + writer.write_all(&data)?; + Ok(()) +} + +fn encode_file(args: cli::EncodeFileArgs) -> Result<()> { + let (srs, domain) = get_srs(args.srs_cache); + debug!( + domain_size = domain.size(), + input_file = args.input, + "Encoding file" + ); + let mut file = File::open(args.input)?; + let mut buf = Vec::new(); + file.read_to_end(&mut buf)?; + let blob = FieldBlob::::encode::<_, FqSponge>(&srs, domain, &buf); + args.assert_commitment + .into_iter() + .for_each(|asserted_commitment| { + let c = rmp_serde::from_slice(&asserted_commitment.0).unwrap(); + if blob.folded_commitment != c { + panic!( + "commitment hash mismatch: asserted {}, computed {}", + asserted_commitment, + HexString(rmp_serde::encode::to_vec(&c).unwrap()) + ); + } + }); + debug!(output_file = args.output, "Writing encoded blob to file",); + let mut writer = File::create(args.output)?; + rmp_serde::encode::write(&mut writer, &blob)?; + Ok(()) +} + +pub fn compute_commitment(args: cli::ComputeCommitmentArgs) -> Result { + let (srs, domain_fp) = get_srs(args.srs_cache); + let mut file = File::open(args.input)?; + let mut buf = Vec::new(); + file.read_to_end(&mut buf)?; + let field_elems = utils::encode_for_domain(&domain_fp, &buf); + let commitment = user_commitment::<_, FqSponge>(&srs, domain_fp, field_elems); + let res = rmp_serde::to_vec(&commitment)?; + Ok(HexString(res)) +} + +pub fn storage_proof(args: cli::StorageProofArgs) -> Result { + let file = File::open(args.input)?; + let blob: FieldBlob = rmp_serde::decode::from_read(file)?; + let proof = { + let (srs, _) = get_srs(args.srs_cache); + let group_map = ::Map::setup(); + let mut rng = OsRng; + let evaluation_point = utils::encode(&args.challenge.0); + proof::storage_proof::(&srs, &group_map, blob, evaluation_point, &mut rng) + }; + let res = rmp_serde::to_vec(&proof)?; + Ok(HexString(res)) +} + +pub fn verify_storage_proof(args: cli::VerifyStorageProofArgs) -> Result<()> { + let (srs, _) = get_srs(args.srs_cache); + let group_map = ::Map::setup(); + let commitment = rmp_serde::from_slice(&args.commitment.0)?; + let evaluation_point = utils::encode(&args.challenge.0); + let proof: StorageProof = rmp_serde::from_slice(&args.proof.0)?; + let mut rng = OsRng; + let res = proof::verify_storage_proof::( + &srs, + &group_map, + commitment, + evaluation_point, + &proof, + &mut rng, + ); + assert!(res); + Ok(()) +} + +pub fn main() -> Result<()> { + env::init_console_subscriber(); + let args = cli::Commands::parse(); + match args { + cli::Commands::Encode(args) => encode_file(args), + cli::Commands::Decode(args) => decode_file(args), + cli::Commands::ComputeCommitment(args) => { + let commitment = compute_commitment(args)?; + println!("{}", commitment); + Ok(()) + } + cli::Commands::StorageProof(args) => { + let proof = storage_proof(args)?; + println!("{}", proof); + Ok(()) + } + cli::Commands::VerifyStorageProof(args) => verify_storage_proof(args), + } +} diff --git a/saffron/src/proof.rs b/saffron/src/proof.rs new file mode 100644 index 0000000000..33d6f1b779 --- /dev/null +++ b/saffron/src/proof.rs @@ -0,0 +1,179 @@ +use crate::blob::FieldBlob; +use ark_ec::AffineRepr; +use ark_ff::{One, PrimeField, Zero}; +use ark_poly::{univariate::DensePolynomial, Polynomial, Radix2EvaluationDomain as D}; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; +use kimchi::curve::KimchiCurve; +use mina_poseidon::FqSponge; +use o1_utils::ExtendedDensePolynomial; +use poly_commitment::{ + commitment::{BatchEvaluationProof, CommitmentCurve, Evaluation}, + ipa::{OpeningProof, SRS}, + utils::DensePolynomialOrEvaluations, + PolyComm, +}; +use rand::rngs::OsRng; +use serde::{Deserialize, Serialize}; +use serde_with::serde_as; +use tracing::instrument; + +#[serde_as] +#[derive(Debug, Serialize, Deserialize)] +#[serde(bound = "G::ScalarField : CanonicalDeserialize + CanonicalSerialize")] +pub struct StorageProof { + #[serde_as(as = "o1_utils::serialization::SerdeAs")] + pub evaluation: G::ScalarField, + pub opening_proof: OpeningProof, +} + +#[instrument(skip_all, level = "debug")] +pub fn storage_proof>( + srs: &SRS, + group_map: &G::Map, + blob: FieldBlob, + evaluation_point: G::ScalarField, + rng: &mut OsRng, +) -> StorageProof +where + G::BaseField: PrimeField, +{ + let p = { + let init = (DensePolynomial::zero(), G::ScalarField::one()); + blob.data + .into_iter() + .fold(init, |(acc_poly, curr_power), curr_poly| { + ( + acc_poly + curr_poly.scale(curr_power), + curr_power * blob.alpha, + ) + }) + .0 + }; + let evaluation = p.evaluate(&evaluation_point); + let opening_proof_sponge = { + let mut sponge = EFqSponge::new(G::other_curve_sponge_params()); + // TODO: check and see if we need to also absorb the absorb the poly cm + // see https://github.com/o1-labs/proof-systems/blob/feature/test-data-storage-commitments/data-storage/src/main.rs#L265-L269 + sponge.absorb_fr(&[evaluation]); + sponge + }; + let opening_proof = srs.open( + group_map, + &[( + DensePolynomialOrEvaluations::<::ScalarField, D> ::DensePolynomial( + &p, + ), + PolyComm { + chunks: vec![G::ScalarField::zero()], + }, + )], + &[evaluation_point], + G::ScalarField::one(), // Single evaluation, so we don't care + G::ScalarField::one(), // Single evaluation, so we don't care + opening_proof_sponge, + rng, + ); + StorageProof { + evaluation, + opening_proof, + } +} + +#[instrument(skip_all, level = "debug")] +pub fn verify_storage_proof< + G: KimchiCurve, + EFqSponge: Clone + FqSponge, +>( + srs: &SRS, + group_map: &G::Map, + commitment: PolyComm, + evaluation_point: G::ScalarField, + proof: &StorageProof, + rng: &mut OsRng, +) -> bool +where + G::BaseField: PrimeField, +{ + let mut opening_proof_sponge = EFqSponge::new(G::other_curve_sponge_params()); + opening_proof_sponge.absorb_fr(&[proof.evaluation]); + + srs.verify( + group_map, + &mut [BatchEvaluationProof { + sponge: opening_proof_sponge.clone(), + evaluation_points: vec![evaluation_point], + polyscale: G::ScalarField::one(), + evalscale: G::ScalarField::one(), + evaluations: vec![Evaluation { + commitment, + evaluations: vec![vec![proof.evaluation]], + }], + opening: &proof.opening_proof, + combined_inner_product: proof.evaluation, + }], + rng, + ) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{ + commitment::{commit_to_field_elems, fold_commitments}, + env, + utils::{encode_for_domain, test_utils::UserData}, + }; + use ark_poly::{EvaluationDomain, Radix2EvaluationDomain}; + use ark_std::UniformRand; + use kimchi::groupmap::GroupMap; + use mina_curves::pasta::{Fp, Vesta, VestaParameters}; + use mina_poseidon::{constants::PlonkSpongeConstantsKimchi, sponge::DefaultFqSponge}; + use once_cell::sync::Lazy; + use poly_commitment::{commitment::CommitmentCurve, ipa::SRS, SRS as _}; + use proptest::prelude::*; + + static SRS: Lazy> = Lazy::new(|| { + if let Ok(srs) = std::env::var("SRS_FILEPATH") { + env::get_srs_from_cache(srs) + } else { + SRS::create(1 << 16) + } + }); + + static DOMAIN: Lazy> = + Lazy::new(|| Radix2EvaluationDomain::new(SRS.size()).unwrap()); + + static GROUP_MAP: Lazy<::Map> = + Lazy::new(::Map::setup); + + proptest! { + #![proptest_config(ProptestConfig::with_cases(5))] + #[test] + fn test_storage_prove_verify(UserData(data) in UserData::arbitrary()) { + let mut rng = OsRng; + let (commitment,_) = { + let field_elems = encode_for_domain(&*DOMAIN, &data); + let user_commitments = commit_to_field_elems(&*SRS, *DOMAIN, field_elems); + let mut fq_sponge = DefaultFqSponge::::new( + mina_poseidon::pasta::fq_kimchi::static_params(), + ); + fold_commitments(&mut fq_sponge, &user_commitments) + }; + let blob = FieldBlob::::encode::<_, DefaultFqSponge>(&*SRS, *DOMAIN, &data); + let evaluation_point = Fp::rand(&mut rng); + let proof = storage_proof::< + Vesta, DefaultFqSponge + + >(&*SRS, &*GROUP_MAP, blob, evaluation_point, &mut rng); + let res = verify_storage_proof::>( + &*SRS, + &*GROUP_MAP, + commitment, + evaluation_point, + &proof, + &mut rng, + ); + prop_assert!(res); + } + } +} diff --git a/saffron/src/utils.rs b/saffron/src/utils.rs new file mode 100644 index 0000000000..c0a48d4ba2 --- /dev/null +++ b/saffron/src/utils.rs @@ -0,0 +1,438 @@ +use std::marker::PhantomData; + +use ark_ff::{BigInteger, PrimeField}; +use ark_poly::EvaluationDomain; +use o1_utils::FieldHelpers; +use thiserror::Error; +use tracing::instrument; + +// For injectivity, you can only use this on inputs of length at most +// 'F::MODULUS_BIT_SIZE / 8', e.g. for Vesta this is 31. +pub fn encode(bytes: &[u8]) -> Fp { + Fp::from_be_bytes_mod_order(bytes) +} + +pub fn decode_into(buffer: &mut [u8], x: Fp) { + let bytes = x.into_bigint().to_bytes_be(); + buffer.copy_from_slice(&bytes); +} + +pub fn encode_as_field_elements(bytes: &[u8]) -> Vec { + let n = (F::MODULUS_BIT_SIZE / 8) as usize; + bytes + .chunks(n) + .map(|chunk| { + let mut bytes = vec![0u8; n]; + bytes[..chunk.len()].copy_from_slice(chunk); + encode(&bytes) + }) + .collect::>() +} + +pub fn encode_for_domain>( + domain: &D, + bytes: &[u8], +) -> Vec> { + let domain_size = domain.size(); + let xs = encode_as_field_elements(bytes); + xs.chunks(domain_size) + .map(|chunk| { + if chunk.len() < domain.size() { + let mut padded_chunk = Vec::with_capacity(domain.size()); + padded_chunk.extend_from_slice(chunk); + padded_chunk.resize(domain.size(), F::zero()); + padded_chunk + } else { + chunk.to_vec() + } + }) + .collect() +} + +#[derive(Clone, Debug)] +/// Represents the bytes a user query +pub struct QueryBytes { + pub start: usize, + pub len: usize, +} + +#[derive(Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Debug)] +/// We store the data in a vector of vector of field element +/// The inner vector represent polynomials +struct FieldElt { + /// the index of the polynomial the data point is attached too + poly_index: usize, + /// the index of the root of unity the data point is attached too + eval_index: usize, + domain_size: usize, + n_polys: usize, +} +/// Represents a query in term of Field element +#[derive(Debug)] +pub struct QueryField { + start: FieldElt, + /// how many bytes we need to trim from the first chunk + /// we get from the first field element we decode + leftover_start: usize, + end: FieldElt, + /// how many bytes we need to trim from the last chunk + /// we get from the last field element we decode + leftover_end: usize, + tag: PhantomData, +} + +impl QueryField { + #[instrument(skip_all, level = "debug")] + pub fn apply(self, data: &[Vec]) -> Vec { + let n = (F::MODULUS_BIT_SIZE / 8) as usize; + let m = F::size_in_bytes(); + let mut buffer = vec![0u8; m]; + let mut answer = Vec::new(); + self.start + .into_iter() + .take_while(|x| x <= &self.end) + .for_each(|x| { + let value = data[x.poly_index][x.eval_index]; + decode_into(&mut buffer, value); + answer.extend_from_slice(&buffer[(m - n)..m]); + }); + + answer[(self.leftover_start)..(answer.len() - self.leftover_end)].to_vec() + } +} + +impl Iterator for FieldElt { + type Item = FieldElt; + fn next(&mut self) -> Option { + let current = *self; + + if (self.eval_index + 1) < self.domain_size { + self.eval_index += 1; + } else if (self.poly_index + 1) < self.n_polys { + self.poly_index += 1; + self.eval_index = 0; + } else { + return None; + } + + Some(current) + } +} + +#[derive(Debug, Error, Clone, PartialEq)] +pub enum QueryError { + #[error("Query out of bounds: poly_index {poly_index} eval_index {eval_index} n_polys {n_polys} domain_size {domain_size}")] + QueryOutOfBounds { + poly_index: usize, + eval_index: usize, + n_polys: usize, + domain_size: usize, + }, +} + +impl QueryBytes { + pub fn into_query_field( + &self, + domain_size: usize, + n_polys: usize, + ) -> Result, QueryError> { + let n = (F::MODULUS_BIT_SIZE / 8) as usize; + let start = { + let start_field_nb = self.start / n; + FieldElt { + poly_index: start_field_nb / domain_size, + eval_index: start_field_nb % domain_size, + domain_size, + n_polys, + } + }; + let byte_end = self.start + self.len; + let end = { + let end_field_nb = byte_end / n; + FieldElt { + poly_index: end_field_nb / domain_size, + eval_index: end_field_nb % domain_size, + domain_size, + n_polys, + } + }; + + if start.poly_index >= n_polys || end.poly_index >= n_polys { + return Err(QueryError::QueryOutOfBounds { + poly_index: end.poly_index, + eval_index: end.eval_index, + n_polys, + domain_size, + }); + }; + + let leftover_start = self.start % n; + let leftover_end = n - byte_end % n; + + Ok(QueryField { + start, + leftover_start, + end, + leftover_end, + tag: std::marker::PhantomData, + }) + } +} + +#[cfg(test)] +pub mod test_utils { + use proptest::prelude::*; + + #[derive(Debug, Clone)] + pub struct UserData(pub Vec); + + impl UserData { + pub fn len(&self) -> usize { + self.0.len() + } + + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } + } + + #[derive(Clone, Debug)] + pub enum DataSize { + Small, + Medium, + Large, + } + + impl DataSize { + const KB: usize = 1_000; + const MB: usize = 1_000_000; + + fn size_range_bytes(&self) -> (usize, usize) { + match self { + // Small: 1KB - 1MB + Self::Small => (Self::KB, Self::MB), + // Medium: 1MB - 10MB + Self::Medium => (Self::MB, 10 * Self::MB), + // Large: 10MB - 100MB + Self::Large => (10 * Self::MB, 100 * Self::MB), + } + } + } + + impl Arbitrary for DataSize { + type Parameters = (); + type Strategy = BoxedStrategy; + + fn arbitrary_with(_: ()) -> Self::Strategy { + prop_oneof![ + 6 => Just(DataSize::Small), // 60% chance + 3 => Just(DataSize::Medium), + 1 => Just(DataSize::Large) + ] + .boxed() + } + } + + impl Default for DataSize { + fn default() -> Self { + Self::Small + } + } + + impl Arbitrary for UserData { + type Parameters = DataSize; + type Strategy = BoxedStrategy; + + fn arbitrary() -> Self::Strategy { + DataSize::arbitrary() + .prop_flat_map(|size| { + let (min, max) = size.size_range_bytes(); + prop::collection::vec(any::(), min..max) + }) + .prop_map(UserData) + .boxed() + } + + fn arbitrary_with(size: Self::Parameters) -> Self::Strategy { + let (min, max) = size.size_range_bytes(); + prop::collection::vec(any::(), min..max) + .prop_map(UserData) + .boxed() + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use ark_poly::Radix2EvaluationDomain; + use ark_std::UniformRand; + use mina_curves::pasta::Fp; + use once_cell::sync::Lazy; + use proptest::prelude::*; + use test_utils::{DataSize, UserData}; + use tracing::debug; + + fn decode(x: Fp) -> Vec { + let mut buffer = vec![0u8; Fp::size_in_bytes()]; + decode_into(&mut buffer, x); + buffer + } + + fn decode_from_field_elements(xs: Vec) -> Vec { + let n = (F::MODULUS_BIT_SIZE / 8) as usize; + let m = F::size_in_bytes(); + let mut buffer = vec![0u8; F::size_in_bytes()]; + xs.iter() + .flat_map(|x| { + decode_into(&mut buffer, *x); + buffer[(m - n)..m].to_vec() + }) + .collect() + } + + // Check that [u8] -> Fp -> [u8] is the identity function. + proptest! { + #[test] + fn test_round_trip_from_bytes(xs in any::<[u8;31]>()) + { let n : Fp = encode(&xs); + let ys : [u8; 31] = decode(n).as_slice()[1..32].try_into().unwrap(); + prop_assert_eq!(xs, ys); + } + } + + // Check that Fp -> [u8] -> Fp is the identity function. + proptest! { + #[test] + fn test_round_trip_from_fp( + x in prop::strategy::Just(Fp::rand(&mut ark_std::rand::thread_rng())) + ) { + let bytes = decode(x); + let y = encode(&bytes); + prop_assert_eq!(x,y); + } + } + + static DOMAIN: Lazy> = Lazy::new(|| { + const SRS_SIZE: usize = 1 << 16; + Radix2EvaluationDomain::new(SRS_SIZE).unwrap() + }); + + // check that Vec -> Vec> -> Vec is the identity function + proptest! { + #![proptest_config(ProptestConfig::with_cases(20))] + #[test] + fn test_round_trip_encoding_to_field_elems(UserData(xs) in UserData::arbitrary() + ) + { let chunked = encode_for_domain(&*DOMAIN, &xs); + let elems = chunked + .into_iter() + .flatten() + .collect(); + let ys = decode_from_field_elements(elems) + .into_iter() + .take(xs.len()) + .collect::>(); + prop_assert_eq!(xs,ys); + } + } + + fn padded_field_length(xs: &[u8]) -> usize { + let m = Fp::MODULUS_BIT_SIZE as usize / 8; + let n = xs.len(); + let num_field_elems = (n + m - 1) / m; + let num_polys = (num_field_elems + DOMAIN.size() - 1) / DOMAIN.size(); + DOMAIN.size() * num_polys + } + + proptest! { + #![proptest_config(ProptestConfig::with_cases(20))] + #[test] + fn test_padded_byte_length(UserData(xs) in UserData::arbitrary() + ) + { let chunked = encode_for_domain(&*DOMAIN, &xs); + let n = chunked.into_iter().flatten().count(); + prop_assert_eq!(n, padded_field_length(&xs)); + } + } + + proptest! { + #![proptest_config(ProptestConfig::with_cases(20))] + #[test] + fn test_query( + (UserData(xs), queries) in UserData::arbitrary() + .prop_flat_map(|xs| { + let n = xs.len(); + let query_strategy = (0..(n - 1)).prop_flat_map(move |start| { + ((start + 1)..n).prop_map(move |end| QueryBytes { start, len: end - start}) + }); + let queries_strategy = prop::collection::vec(query_strategy, 10); + (Just(xs), queries_strategy) + }) + ) { + let chunked = encode_for_domain(&*DOMAIN, &xs); + for query in queries { + let expected = &xs[query.start..(query.start+query.len)]; + let field_query: QueryField = query.into_query_field(DOMAIN.size(), chunked.len()).unwrap(); + let got_answer = field_query.apply(&chunked); + prop_assert_eq!(expected, got_answer); + } + } + } + + proptest! { + #![proptest_config(ProptestConfig::with_cases(20))] + #[test] + fn test_for_invalid_query_length( + (UserData(xs), mut query) in UserData::arbitrary() + .prop_flat_map(|UserData(xs)| { + let padded_len = { + let m = Fp::MODULUS_BIT_SIZE as usize / 8; + padded_field_length(&xs) * m + }; + let query_strategy = (0..xs.len()).prop_map(move |start| { + // this is the last valid end point + let end = padded_len - 1; + QueryBytes { start, len: end - start } + }); + (Just(UserData(xs)), query_strategy) + }) + ) { + debug!("check that first query is valid"); + let chunked = encode_for_domain(&*DOMAIN, &xs); + let n_polys = chunked.len(); + let query_field = query.into_query_field::(DOMAIN.size(), n_polys); + prop_assert!(query_field.is_ok()); + debug!("check that extending query length by 1 is invalid"); + query.len += 1; + let query_field = query.into_query_field::(DOMAIN.size(), n_polys); + prop_assert!(query_field.is_err()); + + } + } + + proptest! { + #![proptest_config(ProptestConfig::with_cases(20))] + #[test] + fn test_nil_query( + (UserData(xs), query) in UserData::arbitrary_with(DataSize::Small) + .prop_flat_map(|xs| { + let padded_len = { + let m = Fp::MODULUS_BIT_SIZE as usize / 8; + padded_field_length(&xs.0) * m + }; + let query_strategy = (0..padded_len).prop_map(move |start| { + QueryBytes { start, len: 0 } + }); + (Just(xs), query_strategy) + }) + ) { + let chunked = encode_for_domain(&*DOMAIN, &xs); + let n_polys = chunked.len(); + let field_query: QueryField = query.into_query_field(DOMAIN.size(), n_polys).unwrap(); + let got_answer = field_query.apply(&chunked); + prop_assert!(got_answer.is_empty()); + } + + } +} diff --git a/saffron/test-encoding.sh b/saffron/test-encoding.sh new file mode 100755 index 0000000000..0402c71d0d --- /dev/null +++ b/saffron/test-encoding.sh @@ -0,0 +1,76 @@ +#!/bin/bash + +# Check if input file is provided +if [ $# -lt 1 ]; then + echo "Usage: $0 [srs-filepath]" + exit 1 +fi + +INPUT_FILE="$1" +SRS_ARG="" +if [ $# -eq 2 ]; then + SRS_ARG="--srs-filepath $2" +fi +ENCODED_FILE="${INPUT_FILE%.*}.bin" +DECODED_FILE="${INPUT_FILE%.*}-decoded${INPUT_FILE##*.}" + +# Ensure input file exists +if [ ! -f "$INPUT_FILE" ]; then + echo "Error: Input file '$INPUT_FILE' does not exist" + exit 1 +fi + +# Compute commitment and capture last line +COMMITMENT=$(cargo run --release --bin saffron compute-commitment -i "$INPUT_FILE" $SRS_ARG | tee /dev/stderr | tail -n 1) + + + +# Run encode with captured commitment +echo "Encoding $INPUT_FILE to $ENCODED_FILE" +if ! cargo run --release --bin saffron encode -i "$INPUT_FILE" -o "$ENCODED_FILE" --assert-commitment "$COMMITMENT" $SRS_ARG; then + echo "Encoding failed" + exit 1 +fi + +# Generate 32-byte random challenge as hex string +echo "Generating random challenge..." +CHALLENGE=$(head -c 32 /dev/urandom | xxd -p -c 32) +echo "Challenge: $CHALLENGE" + +# Generate storage proof and capture proof output +echo "Generating storage proof..." +PROOF=$(cargo run --release --bin saffron storage-proof -i "$ENCODED_FILE" --challenge "$CHALLENGE" $SRS_ARG | tee /dev/stderr | tail -n 1) +if [ $? -ne 0 ]; then + echo "Storage proof generation failed" + exit 1 +fi + +# Verify the storage proof +echo "Verifying proof..." +if ! cargo run --release --bin saffron verify-storage-proof --commitment "$COMMITMENT" --challenge "$CHALLENGE" --proof "$PROOF" $SRS_ARG; then + echo "Proof verification failed" + exit 1 +fi +echo "✓ Proof verification successful" + + +# Run decode +echo "Decoding $ENCODED_FILE to $DECODED_FILE" +if ! cargo run --release --bin saffron decode -i "$ENCODED_FILE" -o "$DECODED_FILE" $SRS_ARG; then + echo "Decoding failed" + exit 1 +fi + +# Compare files +echo "Comparing original and decoded files..." +if cmp -s "$INPUT_FILE" "$DECODED_FILE"; then + echo "✓ Success: Files are identical" + echo "Cleaning up temporary files..." + rm -f "$ENCODED_FILE" "$DECODED_FILE" + exit 0 +else + echo "✗ Error: Files differ" + echo "Keeping temporary files for inspection" + diff "$INPUT_FILE" "$DECODED_FILE" + exit 1 +fi diff --git a/utils/Cargo.toml b/utils/Cargo.toml index 73252da05f..361d48a206 100644 --- a/utils/Cargo.toml +++ b/utils/Cargo.toml @@ -15,20 +15,20 @@ ark-ff.workspace = true ark-poly.workspace = true ark-serialize.workspace = true bcs.workspace = true -rayon.workspace = true -serde.workspace = true -serde_with.workspace = true hex.workspace = true num-bigint.workspace = true num-integer.workspace = true num-traits.workspace = true +rand.workspace = true +rand_core.workspace = true +rayon.workspace = true rmp-serde.workspace = true -secp256k1.workspace = true +serde.workspace = true +serde_with.workspace = true sha2.workspace = true thiserror.workspace = true -rand.workspace = true -rand_core.workspace = true [dev-dependencies] ark-ec.workspace = true mina-curves.workspace = true +secp256k1.workspace = true