From 3578f692940fb49cc3c245d7a8371017d52e8f8f Mon Sep 17 00:00:00 2001 From: DoHoonKim Date: Tue, 31 Oct 2023 17:14:55 +0900 Subject: [PATCH 01/27] Implement Lasso inside Hyperplonk --- benchmark/benches/proof_system.rs | 2 +- plonkish_backend/src/backend.rs | 11 +- plonkish_backend/src/backend/hyperplonk.rs | 278 ++++++++++++---- .../src/backend/hyperplonk/preprocessor.rs | 40 +-- .../src/backend/hyperplonk/prover.rs | 202 ------------ .../src/backend/hyperplonk/util.rs | 121 +++++-- plonkish_backend/src/backend/lookup/lasso.rs | 158 +++++++++ .../lookup/lasso/memory_checking/mod.rs | 142 ++++++++ .../lookup/lasso/memory_checking/prover.rs | 234 +++++++++++++ .../lookup/lasso/memory_checking/verifier.rs | 135 ++++++++ .../src/backend/lookup/lasso/prover/mod.rs | 271 +++++++++++++++ .../src/backend/lookup/lasso/prover/surge.rs | 223 +++++++++++++ .../src/backend/lookup/lasso/test/mod.rs | 73 ++++ .../src/backend/lookup/lasso/verifier/mod.rs | 119 +++++++ plonkish_backend/src/backend/lookup/logup.rs | 292 ++++++++++++++++ plonkish_backend/src/backend/lookup/mod.rs | 50 +++ plonkish_backend/src/frontend/halo2.rs | 4 + plonkish_backend/src/frontend/halo2/lookup.rs | 47 +++ plonkish_backend/src/frontend/halo2/test.rs | 3 +- plonkish_backend/src/lib.rs | 3 + plonkish_backend/src/piop/gkr.rs | 23 ++ .../src/piop/gkr/fractional_sum_check.rs | 23 +- .../src/piop/gkr/grand_product.rs | 312 ++++++++++++++++++ plonkish_backend/src/poly/multilinear.rs | 13 +- plonkish_backend/src/util/arithmetic.rs | 26 ++ rust-toolchain | 2 +- 26 files changed, 2483 insertions(+), 324 deletions(-) create mode 100644 plonkish_backend/src/backend/lookup/lasso.rs create mode 100644 plonkish_backend/src/backend/lookup/lasso/memory_checking/mod.rs create mode 100644 plonkish_backend/src/backend/lookup/lasso/memory_checking/prover.rs create mode 100644 plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs create mode 100644 plonkish_backend/src/backend/lookup/lasso/prover/mod.rs create mode 100644 plonkish_backend/src/backend/lookup/lasso/prover/surge.rs create mode 100644 plonkish_backend/src/backend/lookup/lasso/test/mod.rs create mode 100644 plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs create mode 100644 plonkish_backend/src/backend/lookup/logup.rs create mode 100644 plonkish_backend/src/backend/lookup/mod.rs create mode 100644 plonkish_backend/src/frontend/halo2/lookup.rs create mode 100644 plonkish_backend/src/piop/gkr/grand_product.rs diff --git a/benchmark/benches/proof_system.rs b/benchmark/benches/proof_system.rs index 99000df..8c5c384 100644 --- a/benchmark/benches/proof_system.rs +++ b/benchmark/benches/proof_system.rs @@ -15,7 +15,7 @@ use halo2_proofs::{ }; use itertools::Itertools; use plonkish_backend::{ - backend::{self, PlonkishBackend, PlonkishCircuit}, + backend::{self, lookup::logup, PlonkishBackend, PlonkishCircuit}, frontend::halo2::{circuit::VanillaPlonk, CircuitExt, Halo2Circuit}, halo2_curves::bn256::{Bn256, Fr}, pcs::multilinear, diff --git a/plonkish_backend/src/backend.rs b/plonkish_backend/src/backend.rs index db879ee..7e40b76 100644 --- a/plonkish_backend/src/backend.rs +++ b/plonkish_backend/src/backend.rs @@ -11,12 +11,15 @@ use crate::{ use rand::RngCore; use std::{collections::BTreeSet, fmt::Debug, iter}; +use self::lookup::lasso::DecomposableTable; + pub mod hyperplonk; +pub mod lookup; pub trait PlonkishBackend: Clone + Debug { type Pcs: PolynomialCommitmentScheme; - type ProverParam: Clone + Debug + Serialize + DeserializeOwned; - type VerifierParam: Clone + Debug + Serialize + DeserializeOwned; + type ProverParam: Clone + Debug; + type VerifierParam: Clone + Debug; fn setup( circuit_info: &PlonkishCircuitInfo, @@ -43,7 +46,7 @@ pub trait PlonkishBackend: Clone + Debug { ) -> Result<(), Error>; } -#[derive(Clone, Debug, Serialize, Deserialize)] +#[derive(Clone, Debug)] pub struct PlonkishCircuitInfo { /// 2^k is the size of the circuit pub k: usize, @@ -64,6 +67,8 @@ pub struct PlonkishCircuitInfo { /// which contains vector of tuples representing the input and table /// respectively. pub lookups: Vec, Expression)>>, + /// Represents Lasso lookup argument, which contains input, indices, and table + pub lasso_lookups: Vec<(Expression, Expression, Box>)>, /// Each item inside outer vector repesents an closed permutation cycle, /// which contains vetor of tuples representing the polynomial index and /// row respectively. diff --git a/plonkish_backend/src/backend/hyperplonk.rs b/plonkish_backend/src/backend/hyperplonk.rs index e94d29f..5b9aab9 100644 --- a/plonkish_backend/src/backend/hyperplonk.rs +++ b/plonkish_backend/src/backend/hyperplonk.rs @@ -2,29 +2,32 @@ use crate::{ backend::{ hyperplonk::{ preprocessor::{batch_size, compose, permutation_polys}, - prover::{ - instance_polys, lookup_compressed_polys, lookup_h_polys, lookup_m_polys, - permutation_z_polys, prove_zero_check, - }, - verifier::verify_zero_check, + prover::{instance_polys, permutation_z_polys, prove_zero_check}, + verifier::{pcs_query, points, verify_zero_check}, }, + lookup::lasso::verifier::LassoVerifier, PlonkishBackend, PlonkishCircuit, PlonkishCircuitInfo, WitnessEncoding, }, - pcs::PolynomialCommitmentScheme, + pcs::{PolynomialCommitmentScheme, Evaluation}, poly::multilinear::MultilinearPolynomial, util::{ arithmetic::{powers, BooleanHypercube, PrimeField}, end_timer, - expression::Expression, + expression::{Expression, Query}, start_timer, transcript::{TranscriptRead, TranscriptWrite}, - Deserialize, DeserializeOwned, Itertools, Serialize, + DeserializeOwned, Itertools, Serialize, }, Error, }; use rand::RngCore; use std::{fmt::Debug, hash::Hash, iter, marker::PhantomData}; +use super::lookup::lasso::{ + prover::{LassoProver, Surge}, + DecomposableTable, Lasso, +}; + pub(crate) mod preprocessor; pub(crate) mod prover; pub(crate) mod verifier; @@ -35,7 +38,7 @@ pub mod util; #[derive(Clone, Debug)] pub struct HyperPlonk(PhantomData); -#[derive(Clone, Debug, Serialize, Deserialize)] +#[derive(Clone, Debug)] pub struct HyperPlonkProverParam where F: PrimeField, @@ -46,6 +49,8 @@ where pub(crate) num_witness_polys: Vec, pub(crate) num_challenges: Vec, pub(crate) lookups: Vec, Expression)>>, + pub(crate) lasso_lookups: Vec<(Expression, Expression, Box>)>, + pub(crate) lookup_polys_offset: usize, pub(crate) num_permutation_z_polys: usize, pub(crate) num_vars: usize, pub(crate) expression: Expression, @@ -55,7 +60,7 @@ where pub(crate) permutation_comms: Vec, } -#[derive(Clone, Debug, Serialize, Deserialize)] +#[derive(Clone, Debug)] pub struct HyperPlonkVerifierParam where F: PrimeField, @@ -65,7 +70,8 @@ where pub(crate) num_instances: Vec, pub(crate) num_witness_polys: Vec, pub(crate) num_challenges: Vec, - pub(crate) num_lookups: usize, + pub(crate) lasso_tables: Vec>>, + pub(crate) lookup_polys_offset: usize, pub(crate) num_permutation_z_polys: usize, pub(crate) num_vars: usize, pub(crate) expression: Expression, @@ -124,12 +130,23 @@ where // Compose `VirtualPolynomialInfo` let (num_permutation_z_polys, expression) = compose(circuit_info); + let lookup_polys_offset = circuit_info.num_instances.len() + + preprocess_polys.len() + + circuit_info.num_witness_polys.iter().sum::() + + permutation_polys.len() + + num_permutation_z_polys; + let vp = HyperPlonkVerifierParam { pcs: pcs_vp, num_instances: circuit_info.num_instances.clone(), num_witness_polys: circuit_info.num_witness_polys.clone(), num_challenges: circuit_info.num_challenges.clone(), - num_lookups: circuit_info.lookups.len(), + lasso_tables: circuit_info + .lasso_lookups + .iter() + .map(|(_, _, table)| table.clone()) + .collect_vec(), + lookup_polys_offset, num_permutation_z_polys, num_vars, expression: expression.clone(), @@ -146,6 +163,8 @@ where num_witness_polys: circuit_info.num_witness_polys.clone(), num_challenges: circuit_info.num_challenges.clone(), lookups: circuit_info.lookups.clone(), + lasso_lookups: circuit_info.lasso_lookups.clone(), + lookup_polys_offset, num_permutation_z_polys, num_vars, expression, @@ -208,31 +227,104 @@ where .chain(witness_polys.iter()) .collect_vec(); - // Round n + let (lookups, tables) = pp + .lasso_lookups + .iter() + .map(|(input, index, table)| ((input, index), table)) + .unzip::<_, _, Vec<_>, Vec<_>>(); + let lookup_polys = Lasso::::lookup_polys(&polys, &lookups); + let (lookup_input_polys, lookup_nz_polys) = + lookup_polys.into_iter().unzip::<_, _, Vec<_>, Vec<_>>(); - let beta = transcript.squeeze_challenge(); + let lookup_input_poly = &lookup_input_polys[0]; + let lookup_nz_poly = &lookup_nz_polys[0]; + let table = tables[0]; + let num_vars = lookup_input_poly.num_vars(); + // why this is 3?? + let lookup_points_offset = 3; - let timer = start_timer(|| format!("lookup_compressed_polys-{}", pp.lookups.len())); - let lookup_compressed_polys = { - let max_lookup_width = pp.lookups.iter().map(Vec::len).max().unwrap_or_default(); - let betas = powers(beta).take(max_lookup_width).collect_vec(); - lookup_compressed_polys(&pp.lookups, &polys, &challenges, &betas) - }; - end_timer(timer); + // commit to input_poly + let lookup_input_comm = Pcs::commit_and_write(&pp.pcs, lookup_input_poly, transcript)?; - let timer = start_timer(|| format!("lookup_m_polys-{}", pp.lookups.len())); - let lookup_m_polys = lookup_m_polys(&lookup_compressed_polys)?; - end_timer(timer); + // get surge and dims + let mut surge = Surge::::new(); + + // commit to dims + let dims = surge.commit(&table, lookup_nz_poly); + let dim_comms = Pcs::batch_commit_and_write(&pp.pcs, &dims, transcript)?; - let lookup_m_comms = Pcs::batch_commit_and_write(&pp.pcs, &lookup_m_polys, transcript)?; + // Round n + // squeeze `r` + let r = transcript.squeeze_challenges(num_vars); + + // get subtable_polys + let subtable_polys = table.subtable_polys(); + let subtable_polys = subtable_polys.iter().collect_vec(); + let subtable_polys = subtable_polys.as_slice(); + + // get e_polys & read_ts_polys & final_cts_polys + let e_polys = { + let nz = surge.nz(); + LassoProver::::e_polys(subtable_polys, &table, &nz) + }; + let (read_ts_polys, final_cts_polys) = surge.counter_polys(&table); + + // commit to read_ts_polys & final_cts_polys & e_polys + let read_ts_comms = Pcs::batch_commit_and_write(&pp.pcs, &read_ts_polys, transcript)?; + let final_cts_comms = Pcs::batch_commit_and_write(&pp.pcs, &final_cts_polys, transcript)?; + let e_comms = Pcs::batch_commit_and_write(&pp.pcs, e_polys.as_slice(), transcript)?; + + // Lasso Sumcheck + let (lookup_points, lookup_evals) = Surge::::prove_sum_check( + &table, + lookup_input_poly, + e_polys.as_slice(), + &r, + num_vars, + pp.lookup_polys_offset, + lookup_points_offset, + transcript, + )?; + // squeeze memory checking challenges -> we will reuse beta, gamma for memory checking of Lasso // Round n+1 + let [beta, gamma] = transcript.squeeze_challenges(2).try_into().unwrap(); + + // memory_checking + let mut memory_checking = LassoProver::::prepare_memory_checking( + &table, + &subtable_polys, + &e_polys, + &dims, + &read_ts_polys, + &final_cts_polys, + &beta, + &gamma, + ); - let gamma = transcript.squeeze_challenge(); + memory_checking + .iter_mut() + .map(|memory_checking| memory_checking.prove_grand_product(transcript)) + .collect::, Error>>()?; - let timer = start_timer(|| format!("lookup_h_polys-{}", pp.lookups.len())); - let lookup_h_polys = lookup_h_polys(&lookup_compressed_polys, &lookup_m_polys, &gamma); - end_timer(timer); + // for each memory_checking, prepare dims, e_polys, read_ts_polys and `x` + // for each memory_checking, prepare final_cts_polys and `y` + let mem_check_opening_points = memory_checking + .iter() + .flat_map(|memory_checking| memory_checking.opening_points()) + .collect_vec(); + + let mem_check_opening_evals = memory_checking + .iter() + .enumerate() + .flat_map(|(index, memory_checking)| { + memory_checking.opening_evals( + table.num_chunks(), + pp.lookup_polys_offset, + lookup_points_offset + 1 + 2 * index, + ) + }) + .collect_vec(); let timer = start_timer(|| format!("permutation_z_polys-{}", pp.permutation_polys.len())); let permutation_z_polys = permutation_z_polys( @@ -244,12 +336,8 @@ where ); end_timer(timer); - let lookup_h_permutation_z_polys = iter::empty() - .chain(lookup_h_polys.iter()) - .chain(permutation_z_polys.iter()) - .collect_vec(); - let lookup_h_permutation_z_comms = - Pcs::batch_commit_and_write(&pp.pcs, lookup_h_permutation_z_polys.clone(), transcript)?; + let permutation_z_comms = + Pcs::batch_commit_and_write(&pp.pcs, permutation_z_polys.iter(), transcript)?; // Round n+2 @@ -259,8 +347,7 @@ where let polys = iter::empty() .chain(polys) .chain(pp.permutation_polys.iter().map(|(_, poly)| poly)) - .chain(lookup_m_polys.iter()) - .chain(lookup_h_permutation_z_polys) + .chain(permutation_z_polys.iter()) .collect_vec(); challenges.extend([beta, gamma, alpha]); let (points, evals) = prove_zero_check( @@ -273,20 +360,39 @@ where )?; // PCS open - + let polys = iter::empty() + .chain(polys) + .chain([lookup_input_poly]) + .chain(dims.iter()) + .chain(read_ts_polys.iter()) + .chain(final_cts_polys.iter()) + .chain(e_polys.iter()); let dummy_comm = Pcs::Commitment::default(); let comms = iter::empty() .chain(iter::repeat(&dummy_comm).take(pp.num_instances.len())) .chain(&pp.preprocess_comms) .chain(&witness_comms) .chain(&pp.permutation_comms) - .chain(&lookup_m_comms) - .chain(&lookup_h_permutation_z_comms) + .chain(&permutation_z_comms) + .chain([&lookup_input_comm]) + .chain(dim_comms.iter()) + .chain(read_ts_comms.iter()) + .chain(final_cts_comms.iter()) + .chain(e_comms.iter()) + .collect_vec(); + let points = iter::empty() + .chain(points) + .chain(lookup_points) + .chain(mem_check_opening_points) + .collect_vec(); + let evals = iter::empty() + .chain(evals) + .chain(lookup_evals) + .chain(mem_check_opening_evals) .collect_vec(); let timer = start_timer(|| format!("pcs_batch_open-{}", evals.len())); Pcs::batch_open(&pp.pcs, polys, comms, &points, &evals, transcript)?; end_timer(timer); - Ok(()) } @@ -305,7 +411,8 @@ where // Round 0..n - let mut witness_comms = Vec::with_capacity(vp.num_witness_polys.iter().sum()); + let num_witness_polys = vp.num_witness_polys.iter().sum(); + let mut witness_comms = Vec::with_capacity(num_witness_polys); let mut challenges = Vec::with_capacity(vp.num_challenges.iter().sum::() + 4); for (num_polys, num_challenges) in vp.num_witness_polys.iter().zip_eq(vp.num_challenges.iter()) @@ -314,22 +421,58 @@ where challenges.extend(transcript.squeeze_challenges(*num_challenges)); } + let lookup_points_offset = 3; + // read input_comm, dim_comms + let input_comm = Pcs::read_commitment(&vp.pcs, transcript)?; + let lasso_lookup_tables = &vp.lasso_tables; + let lookup_table = &lasso_lookup_tables[0]; + let num_chunks = lookup_table.num_chunks(); + let num_memories = lookup_table.num_memories(); + let dim_comms = Pcs::read_commitments(&vp.pcs, num_chunks, transcript)?; + // Round n + let r = transcript.squeeze_challenges(vp.num_vars); - let beta = transcript.squeeze_challenge(); + // read read_ts_comms & final_cts_comms & e_comms + let read_ts_comms = Pcs::read_commitments(&vp.pcs, num_chunks, transcript)?; + let final_cts_comms = Pcs::read_commitments(&vp.pcs, num_chunks, transcript)?; + let e_comms = Pcs::read_commitments(&vp.pcs, num_memories, transcript)?; - let lookup_m_comms = Pcs::read_commitments(&vp.pcs, vp.num_lookups, transcript)?; + let (lookup_points, lookup_evals) = LassoVerifier::::verify_sum_check( + lookup_table, + vp.num_vars, + vp.lookup_polys_offset, + lookup_points_offset, + transcript, + )?; // Round n+1 - let gamma = transcript.squeeze_challenge(); - - let lookup_h_permutation_z_comms = Pcs::read_commitments( - &vp.pcs, - vp.num_lookups + vp.num_permutation_z_polys, - transcript, - )?; + let [beta, gamma] = transcript.squeeze_challenges(2).try_into().unwrap(); + + // memory checking + let memory_checking = LassoVerifier::::prepare_memory_checking(lookup_table); + let (mem_check_opening_points, mem_check_opening_evals) = + memory_checking + .iter() + .enumerate() + .map(|(index, memory_checking)| { + memory_checking.verify_grand_product( + lookup_table.num_chunks(), + vp.num_vars, + vp.lookup_polys_offset, + lookup_points_offset + 1 + 2 * index, + &beta, + &gamma, + transcript + ) + }) + .collect::>, Vec>)>, Error>>()? + .into_iter() + .unzip::<_, _, Vec<_>, Vec<_>>(); + let permutation_z_comms = + Pcs::read_commitments(&vp.pcs, vp.num_permutation_z_polys, transcript)?; // Round n+2 let alpha = transcript.squeeze_challenge(); @@ -346,15 +489,28 @@ where )?; // PCS verify - let dummy_comm = Pcs::Commitment::default(); let comms = iter::empty() .chain(iter::repeat(&dummy_comm).take(vp.num_instances.len())) .chain(&vp.preprocess_comms) .chain(&witness_comms) .chain(vp.permutation_comms.iter().map(|(_, comm)| comm)) - .chain(&lookup_m_comms) - .chain(&lookup_h_permutation_z_comms) + .chain(&permutation_z_comms) + .chain([&input_comm]) + .chain(dim_comms.iter()) + .chain(read_ts_comms.iter()) + .chain(final_cts_comms.iter()) + .chain(e_comms.iter()) + .collect_vec(); + let points = iter::empty() + .chain(points) + .chain(lookup_points) + .chain(mem_check_opening_points.concat()) + .collect_vec(); + let evals = iter::empty() + .chain(evals) + .chain(lookup_evals) + .chain(mem_check_opening_evals.concat()) .collect_vec(); Pcs::batch_verify(&vp.pcs, comms, &points, &evals, transcript)?; @@ -373,7 +529,10 @@ mod test { use crate::{ backend::{ hyperplonk::{ - util::{rand_vanilla_plonk_circuit, rand_vanilla_plonk_with_lookup_circuit}, + util::{ + rand_vanilla_plonk_circuit, rand_vanilla_plonk_with_lasso_lookup_circuit, + rand_vanilla_plonk_with_lookup_circuit, + }, HyperPlonk, }, test::run_plonkish_backend, @@ -411,10 +570,17 @@ mod test { rand_vanilla_plonk_with_lookup_circuit(num_vars, seeded_std_rng(), seeded_std_rng()) }); } + + #[test] + fn [<$name _hyperplonk_vanilla_plonk_with_lasso_lookup>]() { + run_plonkish_backend::<_, HyperPlonk<$pcs>, Keccak256Transcript<_>, _>($num_vars_range, |num_vars| { + rand_vanilla_plonk_with_lasso_lookup_circuit(num_vars, seeded_std_rng(), seeded_std_rng()) + }); + } } }; ($name:ident, $pcs:ty) => { - tests!($name, $pcs, 2..16); + tests!($name, $pcs, 15..16); }; } diff --git a/plonkish_backend/src/backend/hyperplonk/preprocessor.rs b/plonkish_backend/src/backend/hyperplonk/preprocessor.rs index c23c9b0..eebd89f 100644 --- a/plonkish_backend/src/backend/hyperplonk/preprocessor.rs +++ b/plonkish_backend/src/backend/hyperplonk/preprocessor.rs @@ -11,13 +11,14 @@ use crate::{ use std::{array, borrow::Cow, iter, mem}; pub(super) fn batch_size(circuit_info: &PlonkishCircuitInfo) -> usize { - let num_lookups = circuit_info.lookups.len(); let num_permutation_polys = circuit_info.permutation_polys().len(); chain![ [circuit_info.preprocess_polys.len() + circuit_info.permutation_polys().len()], circuit_info.num_witness_polys.clone(), - [num_lookups], - [num_lookups + div_ceil(num_permutation_polys, max_degree(circuit_info, None) - 1)], + [div_ceil( + num_permutation_polys, + max_degree(circuit_info, None) - 1 + )], ] .sum() } @@ -29,31 +30,22 @@ pub(super) fn compose( let [beta, gamma, alpha] = &array::from_fn(|idx| Expression::::Challenge(challenge_offset + idx)); - let (lookup_constraints, lookup_zero_checks) = lookup_constraints(circuit_info, beta, gamma); + // To use Lasso as lookup argument, we will run Sumcheck for Lasso seperately + // Will generalize this function later + // let (lookup_constraints, lookup_zero_checks) = lookup_constraints(circuit_info, beta, gamma); - let max_degree = max_degree(circuit_info, Some(&lookup_constraints)); - let (num_permutation_z_polys, permutation_constraints) = permutation_constraints( - circuit_info, - max_degree, - beta, - gamma, - 2 * circuit_info.lookups.len(), - ); + let max_degree = max_degree(circuit_info, None); + let (num_permutation_z_polys, permutation_constraints) = + permutation_constraints(circuit_info, max_degree, beta, gamma, 0); let expression = { let constraints = iter::empty() .chain(circuit_info.constraints.iter()) - .chain(lookup_constraints.iter()) .chain(permutation_constraints.iter()) .collect_vec(); let eq = Expression::eq_xy(0); let zero_check_on_every_row = Expression::distribute_powers(constraints, alpha) * eq; - Expression::distribute_powers( - iter::empty() - .chain(lookup_zero_checks.iter()) - .chain(Some(&zero_check_on_every_row)), - alpha, - ) + Expression::distribute_powers(iter::empty().chain(Some(&zero_check_on_every_row)), alpha) }; (num_permutation_z_polys, expression) @@ -63,19 +55,19 @@ pub(super) fn max_degree( circuit_info: &PlonkishCircuitInfo, lookup_constraints: Option<&[Expression]>, ) -> usize { - let lookup_constraints = lookup_constraints.map(Cow::Borrowed).unwrap_or_else(|| { - let dummy_challenge = Expression::zero(); - Cow::Owned(self::lookup_constraints(circuit_info, &dummy_challenge, &dummy_challenge).0) - }); + // let lookup_constraints = lookup_constraints.map(Cow::Borrowed).unwrap_or_else(|| { + // let dummy_challenge = Expression::zero(); + // Cow::Owned(self::lookup_constraints(circuit_info, &dummy_challenge, &dummy_challenge).0) + // }); iter::empty() .chain(circuit_info.constraints.iter().map(Expression::degree)) - .chain(lookup_constraints.iter().map(Expression::degree)) .chain(circuit_info.max_degree) .chain(Some(2)) .max() .unwrap() } +// LogUp lookup_constraints pub(super) fn lookup_constraints( circuit_info: &PlonkishCircuitInfo, beta: &Expression, diff --git a/plonkish_backend/src/backend/hyperplonk/prover.rs b/plonkish_backend/src/backend/hyperplonk/prover.rs index 19ef148..12bdf65 100644 --- a/plonkish_backend/src/backend/hyperplonk/prover.rs +++ b/plonkish_backend/src/backend/hyperplonk/prover.rs @@ -47,208 +47,6 @@ pub(crate) fn instance_polys<'a, F: PrimeField>( .collect() } -pub(crate) fn lookup_compressed_polys( - lookups: &[Vec<(Expression, Expression)>], - polys: &[&MultilinearPolynomial], - challenges: &[F], - betas: &[F], -) -> Vec<[MultilinearPolynomial; 2]> { - if lookups.is_empty() { - return Default::default(); - } - - let num_vars = polys[0].num_vars(); - let expression = lookups - .iter() - .flat_map(|lookup| lookup.iter().map(|(input, table)| (input + table))) - .sum::>(); - let lagranges = { - let bh = BooleanHypercube::new(num_vars).iter().collect_vec(); - expression - .used_langrange() - .into_iter() - .map(|i| (i, bh[i.rem_euclid(1 << num_vars) as usize])) - .collect::>() - }; - lookups - .iter() - .map(|lookup| lookup_compressed_poly(lookup, &lagranges, polys, challenges, betas)) - .collect() -} - -pub(super) fn lookup_compressed_poly( - lookup: &[(Expression, Expression)], - lagranges: &HashSet<(i32, usize)>, - polys: &[&MultilinearPolynomial], - challenges: &[F], - betas: &[F], -) -> [MultilinearPolynomial; 2] { - let num_vars = polys[0].num_vars(); - let bh = BooleanHypercube::new(num_vars); - let compress = |expressions: &[&Expression]| { - betas - .iter() - .copied() - .zip(expressions.iter().map(|expression| { - let mut compressed = vec![F::ZERO; 1 << num_vars]; - parallelize(&mut compressed, |(compressed, start)| { - for (b, compressed) in (start..).zip(compressed) { - *compressed = expression.evaluate( - &|constant| constant, - &|common_poly| match common_poly { - CommonPolynomial::Identity => F::from(b as u64), - CommonPolynomial::Lagrange(i) => { - if lagranges.contains(&(i, b)) { - F::ONE - } else { - F::ZERO - } - } - CommonPolynomial::EqXY(_) => unreachable!(), - }, - &|query| polys[query.poly()][bh.rotate(b, query.rotation())], - &|challenge| challenges[challenge], - &|value| -value, - &|lhs, rhs| lhs + &rhs, - &|lhs, rhs| lhs * &rhs, - &|value, scalar| value * &scalar, - ); - } - }); - MultilinearPolynomial::new(compressed) - })) - .sum::>() - }; - - let (inputs, tables) = lookup - .iter() - .map(|(input, table)| (input, table)) - .unzip::<_, _, Vec<_>, Vec<_>>(); - - let timer = start_timer(|| "compressed_input_poly"); - let compressed_input_poly = compress(&inputs); - end_timer(timer); - - let timer = start_timer(|| "compressed_table_poly"); - let compressed_table_poly = compress(&tables); - end_timer(timer); - - [compressed_input_poly, compressed_table_poly] -} - -pub(crate) fn lookup_m_polys( - compressed_polys: &[[MultilinearPolynomial; 2]], -) -> Result>, Error> { - compressed_polys.iter().map(lookup_m_poly).try_collect() -} - -pub(super) fn lookup_m_poly( - compressed_polys: &[MultilinearPolynomial; 2], -) -> Result, Error> { - let [input, table] = compressed_polys; - - let counts = { - let indice_map = table.iter().zip(0..).collect::>(); - - let chunk_size = div_ceil(input.evals().len(), num_threads()); - let num_chunks = div_ceil(input.evals().len(), chunk_size); - let mut counts = vec![HashMap::new(); num_chunks]; - let mut valids = vec![true; num_chunks]; - parallelize_iter( - counts - .iter_mut() - .zip(valids.iter_mut()) - .zip((0..).step_by(chunk_size)), - |((count, valid), start)| { - for input in input[start..].iter().take(chunk_size) { - if let Some(idx) = indice_map.get(input) { - count - .entry(*idx) - .and_modify(|count| *count += 1) - .or_insert(1); - } else { - *valid = false; - break; - } - } - }, - ); - if valids.iter().any(|valid| !valid) { - return Err(Error::InvalidSnark("Invalid lookup input".to_string())); - } - counts - }; - - let mut m = vec![0; 1 << input.num_vars()]; - for (idx, count) in counts.into_iter().flatten() { - m[idx] += count; - } - let m = par_map_collect(m, |count| match count { - 0 => F::ZERO, - 1 => F::ONE, - count => F::from(count), - }); - Ok(MultilinearPolynomial::new(m)) -} - -pub(super) fn lookup_h_polys( - compressed_polys: &[[MultilinearPolynomial; 2]], - m_polys: &[MultilinearPolynomial], - gamma: &F, -) -> Vec> { - compressed_polys - .iter() - .zip(m_polys.iter()) - .map(|(compressed_polys, m_poly)| lookup_h_poly(compressed_polys, m_poly, gamma)) - .collect() -} - -pub(super) fn lookup_h_poly( - compressed_polys: &[MultilinearPolynomial; 2], - m_poly: &MultilinearPolynomial, - gamma: &F, -) -> MultilinearPolynomial { - let [input, table] = compressed_polys; - let mut h_input = vec![F::ZERO; 1 << input.num_vars()]; - let mut h_table = vec![F::ZERO; 1 << input.num_vars()]; - - parallelize(&mut h_input, |(h_input, start)| { - for (h_input, input) in h_input.iter_mut().zip(input[start..].iter()) { - *h_input = *gamma + input; - } - }); - parallelize(&mut h_table, |(h_table, start)| { - for (h_table, table) in h_table.iter_mut().zip(table[start..].iter()) { - *h_table = *gamma + table; - } - }); - - let chunk_size = div_ceil(2 * h_input.len(), num_threads()); - parallelize_iter( - iter::empty() - .chain(h_input.chunks_mut(chunk_size)) - .chain(h_table.chunks_mut(chunk_size)), - |h| { - h.iter_mut().batch_invert(); - }, - ); - - parallelize(&mut h_input, |(h_input, start)| { - for (h_input, (h_table, m)) in h_input - .iter_mut() - .zip(h_table[start..].iter().zip(m_poly[start..].iter())) - { - *h_input -= *h_table * m; - } - }); - - if cfg!(feature = "sanity-check") { - assert_eq!(sum::(&h_input), F::ZERO); - } - - MultilinearPolynomial::new(h_input) -} - pub(crate) fn permutation_z_polys( num_chunks: usize, permutation_polys: &[(usize, MultilinearPolynomial)], diff --git a/plonkish_backend/src/backend/hyperplonk/util.rs b/plonkish_backend/src/backend/hyperplonk/util.rs index 30965e5..b01834b 100644 --- a/plonkish_backend/src/backend/hyperplonk/util.rs +++ b/plonkish_backend/src/backend/hyperplonk/util.rs @@ -2,17 +2,18 @@ use crate::{ backend::{ hyperplonk::{ preprocessor::{compose, permutation_polys}, - prover::{ - instance_polys, lookup_compressed_polys, lookup_h_polys, lookup_m_polys, - permutation_z_polys, - }, + prover::{instance_polys, permutation_z_polys}, }, + lookup::lasso::{test::AndTable, DecomposableTable}, mock::MockCircuit, PlonkishCircuit, PlonkishCircuitInfo, }, poly::{multilinear::MultilinearPolynomial, Polynomial}, util::{ - arithmetic::{powers, BooleanHypercube, PrimeField}, + arithmetic::{ + fe_from_le_bytes, fe_to_bits_le, inner_product, powers, usize_from_bits_le, + BooleanHypercube, PrimeField, + }, expression::{Expression, Query, Rotation}, test::{rand_array, rand_idx, rand_vec}, Itertools, @@ -42,7 +43,8 @@ pub fn vanilla_plonk_circuit_info( num_witness_polys: vec![3], num_challenges: vec![0], constraints: vec![q_l * w_l + q_r * w_r + q_m * w_l * w_r + q_o * w_o + q_c + pi], - lookups: Vec::new(), + lookups: vec![], + lasso_lookups: vec![], permutations, max_degree: Some(4), } @@ -80,6 +82,34 @@ pub fn vanilla_plonk_with_lookup_circuit_info( (q_lookup * w_r, t_r.clone()), (q_lookup * w_o, t_o.clone()), ]], + lasso_lookups: vec![], + permutations, + max_degree: Some(4), + } +} + +pub fn vanilla_plonk_with_lasso_lookup_circuit_info( + num_vars: usize, + num_instances: usize, + preprocess_polys: [Vec; 9], + permutations: Vec>, +) -> PlonkishCircuitInfo { + let [pi, q_l, q_r, q_m, q_o, q_c, q_lookup, t_l, t_r, t_o, w_l, w_r, w_o] = + &array::from_fn(|poly| Query::new(poly, Rotation::cur())).map(Expression::::Polynomial); + let lasso_lookup_input = w_l.clone(); + let lasso_lookup_indices = w_r.clone(); + let lasso_table = Box::new(AndTable::::new()); + let chunk_bits = lasso_table.chunk_bits(); + let num_vars = chunk_bits.iter().chain([&num_vars]).max().unwrap(); + PlonkishCircuitInfo { + k: *num_vars, + num_instances: vec![num_instances], + preprocess_polys: preprocess_polys.to_vec(), + num_witness_polys: vec![3], + num_challenges: vec![0], + constraints: vec![], + lookups: vec![vec![]], + lasso_lookups: vec![(lasso_lookup_input, lasso_lookup_indices, lasso_table)], permutations, max_degree: Some(4), } @@ -315,6 +345,72 @@ pub fn rand_vanilla_plonk_with_lookup_circuit( ) } +pub fn rand_vanilla_plonk_with_lasso_lookup_circuit( + num_vars: usize, + mut preprocess_rng: impl RngCore, + mut witness_rng: impl RngCore, +) -> (PlonkishCircuitInfo, impl PlonkishCircuit) { + let num_vars = 16; + let size = 1 << num_vars; + let mut polys = [(); 13].map(|_| vec![F::ZERO; size]); + + let [t_l, t_r, t_o] = [(); 3].map(|_| { + iter::empty() + .chain([F::ZERO, F::ZERO]) + .chain(iter::repeat_with(|| F::random(&mut preprocess_rng))) + .take(size) + .collect_vec() + }); + polys[7] = t_l; + polys[8] = t_r; + polys[9] = t_o; + + let instances = rand_vec(num_vars, &mut witness_rng); + polys[0] = instance_polys(num_vars, [&instances])[0].evals().to_vec(); + let instance_rows = BooleanHypercube::new(num_vars) + .iter() + .take(num_vars + 1) + .collect::>(); + + let mut permutation = Permutation::default(); + for poly in [10, 11, 12] { + permutation.copy((poly, 1), (poly, 1)); + } + let and_table = AndTable::::new(); + let subtable_poly = &and_table.subtable_polys()[0]; + for idx in 0..size - 1 { + let (w_l, w_r) = { + let index = witness_rng.next_u64(); + let index_bits = fe_to_bits_le(F::from(index)); + assert_eq!(usize_from_bits_le(&index_bits) as u64, index); + let operands = index_bits[..64] + .chunks(16) + .map(|chunked_index_bits| { + let chunked_index = usize_from_bits_le(chunked_index_bits); + subtable_poly[chunked_index] + }) + .collect_vec(); + let value = and_table.combine_lookups(&operands); + (value, F::from(index)) + }; + let values = vec![(10, w_l), (11, w_r)]; + for (poly, value) in values { + polys[poly][idx] = value; + } + } + let [_, q_l, q_r, q_m, q_o, q_c, q_lookup, t_l, t_r, t_o, w_l, w_r, w_o] = polys; + let circuit_info = vanilla_plonk_with_lasso_lookup_circuit_info( + num_vars, + instances.len(), + [q_l, q_r, q_m, q_o, q_c, q_lookup, t_l, t_r, t_o], + permutation.into_cycles(), + ); + ( + circuit_info, + MockCircuit::new(vec![instances], vec![w_l, w_r, w_o]), + ) +} + pub fn rand_vanilla_plonk_with_lookup_assignment( num_vars: usize, mut preprocess_rng: impl RngCore, @@ -338,17 +434,6 @@ pub fn rand_vanilla_plonk_with_lookup_assignment( let challenges: [_; 3] = rand_array(&mut witness_rng); let [beta, gamma, _] = challenges; - let (lookup_compressed_polys, lookup_m_polys) = { - let PlonkishCircuitInfo { lookups, .. } = - vanilla_plonk_with_lookup_circuit_info(0, 0, Default::default(), Vec::new()); - let betas = powers(beta).take(3).collect_vec(); - let lookup_compressed_polys = - lookup_compressed_polys(&lookups, &polys.iter().collect_vec(), &[], &betas); - let lookup_m_polys = lookup_m_polys(&lookup_compressed_polys).unwrap(); - (lookup_compressed_polys, lookup_m_polys) - }; - let lookup_h_polys = lookup_h_polys(&lookup_compressed_polys, &lookup_m_polys, &gamma); - let permutation_polys = permutation_polys(num_vars, &[10, 11, 12], &permutations); let permutation_z_polys = permutation_z_polys( 1, @@ -365,8 +450,6 @@ pub fn rand_vanilla_plonk_with_lookup_assignment( iter::empty() .chain(polys) .chain(permutation_polys) - .chain(lookup_m_polys) - .chain(lookup_h_polys) .chain(permutation_z_polys) .collect_vec(), challenges.to_vec(), diff --git a/plonkish_backend/src/backend/lookup/lasso.rs b/plonkish_backend/src/backend/lookup/lasso.rs new file mode 100644 index 0000000..57aa1c7 --- /dev/null +++ b/plonkish_backend/src/backend/lookup/lasso.rs @@ -0,0 +1,158 @@ +use std::{collections::HashSet, fmt::Debug, iter, marker::PhantomData}; + +use halo2_curves::ff::{Field, PrimeField}; +use itertools::Itertools; + +use crate::{ + backend::lookup::lasso::prover::Surge, + pcs::{CommitmentChunk, Evaluation, PolynomialCommitmentScheme}, + piop::sum_check::{ + classic::{ClassicSumCheck, EvaluationsProver}, + SumCheck, + }, + poly::multilinear::MultilinearPolynomial, + util::{ + arithmetic::BooleanHypercube, + expression::{CommonPolynomial, Expression}, + parallel::parallelize, + transcript::{FieldTranscriptRead, TranscriptWrite}, + }, + Error, +}; + +pub mod memory_checking; +pub mod prover; +pub mod test; +pub mod verifier; + +pub trait Subtable { + fn evaluate(point: &[F]) -> F; +} + +/// This is a trait that contains information about decomposable table to which +/// backend prover and verifier can ask +pub trait DecomposableTable: Debug + Sync + DecomposableTableClone { + fn num_chunks(&self) -> usize; + + fn num_memories(&self) -> usize; + + /// Returns multilinear extension polynomials of each subtable + fn subtable_polys(&self) -> Vec>; + + fn combine_lookup_expressions(&self, expressions: Vec>) -> Expression; + + /// The `g` function that computes T[r] = g(T_1[r_1], ..., T_k[r_1], T_{k+1}[r_2], ..., T_{\alpha}[r_c]) + fn combine_lookups(&self, operands: &[F]) -> F; + + /// Returns the size of bits for each chunk. + /// Each chunk can have different bits. + fn chunk_bits(&self) -> Vec; + + fn memory_to_subtable_index(&self, memory_index: usize) -> usize; + + fn memory_to_chunk_index(&self, memory_index: usize) -> usize; +} + +pub trait DecomposableTableClone { + fn clone_box(&self) -> Box>; +} + +impl DecomposableTableClone for T +where + T: DecomposableTable + Clone + 'static, +{ + fn clone_box(&self) -> Box> { + Box::new(self.clone()) + } +} + +impl Clone for Box> { + fn clone(&self) -> Self { + self.clone_box() + } +} + +#[derive(Clone, Debug)] +pub struct Lasso< + F: Field + PrimeField, + Pcs: PolynomialCommitmentScheme>, +> { + _marker1: PhantomData, + _marker2: PhantomData, +} + +impl< + F: Field + PrimeField, + Pcs: PolynomialCommitmentScheme>, + > Lasso +{ + pub fn lookup_polys( + polys: &[&MultilinearPolynomial], + lookups: &Vec<(&Expression, &Expression)>, + ) -> Vec<(MultilinearPolynomial, MultilinearPolynomial)> { + let num_vars = polys[0].num_vars(); + let expression = lookups + .iter() + .map(|(input, index)| *input + *index) + .sum::>(); + let lagranges = { + let bh = BooleanHypercube::new(num_vars).iter().collect_vec(); + expression + .used_langrange() + .into_iter() + .map(|i| (i, bh[i.rem_euclid(1 << num_vars) as usize])) + .collect::>() + }; + lookups + .iter() + .map(|lookup| Self::lookup_poly(lookup, &lagranges, polys)) + .collect() + } + + fn lookup_poly( + lookup: &(&Expression, &Expression), + lagranges: &HashSet<(i32, usize)>, + polys: &[&MultilinearPolynomial], + ) -> (MultilinearPolynomial, MultilinearPolynomial) { + let num_vars = polys[0].num_vars(); + let bh = BooleanHypercube::new(num_vars); + + let evaluate = |expression: &Expression| { + let mut evals = vec![F::ZERO; 1 << num_vars]; + parallelize(&mut evals, |(evals, start)| { + for (b, eval) in (start..).zip(evals) { + *eval = expression.evaluate( + &|constant| constant, + &|common_poly| match common_poly { + CommonPolynomial::Identity => F::from(b as u64), + CommonPolynomial::Lagrange(i) => { + if lagranges.contains(&(i, b)) { + F::ONE + } else { + F::ZERO + } + } + CommonPolynomial::EqXY(_) => unreachable!(), + }, + &|query| polys[query.poly()][bh.rotate(b, query.rotation())], + &|_| unreachable!(), + &|value| -value, + &|lhs, rhs| lhs + &rhs, + &|lhs, rhs| lhs * &rhs, + &|value, scalar| value * &scalar, + ); + } + }); + MultilinearPolynomial::new(evals) + }; + + let (input, index) = lookup; + (evaluate(input), evaluate(index)) + } +} + +#[derive(Clone, Debug)] +pub struct GeneralizedLasso>( + PhantomData, + PhantomData, +); diff --git a/plonkish_backend/src/backend/lookup/lasso/memory_checking/mod.rs b/plonkish_backend/src/backend/lookup/lasso/memory_checking/mod.rs new file mode 100644 index 0000000..3196f8a --- /dev/null +++ b/plonkish_backend/src/backend/lookup/lasso/memory_checking/mod.rs @@ -0,0 +1,142 @@ +pub mod prover; +pub mod verifier; + +use std::iter; + +use halo2_curves::ff::PrimeField; +use itertools::Itertools; +pub use prover::MemoryCheckingProver; +use rayon::prelude::{IntoParallelRefIterator, IndexedParallelIterator, ParallelIterator}; + +use crate::{ + poly::multilinear::MultilinearPolynomial, + util::{arithmetic::inner_product, transcript::FieldTranscriptRead}, + Error, +}; + +#[derive(Clone, Debug)] +pub struct Chunk { + chunk_index: usize, + chunk_bits: usize, + memory: Vec>, +} + +impl Chunk { + pub fn chunk_polys_index(&self, offset: usize, num_chunks: usize) -> Vec { + let dim_poly_index = offset + 1 + self.chunk_index; + let read_ts_poly_index = offset + 1 + num_chunks + self.chunk_index; + let final_cts_poly_index = offset + 1 + 2 * num_chunks + self.chunk_index; + vec![dim_poly_index, read_ts_poly_index, final_cts_poly_index] + } + + pub fn new(chunk_index: usize, chunk_bits: usize, memory: Memory) -> Self { + Self { + chunk_index, + chunk_bits, + memory: vec![memory], + } + } + + pub fn num_memories(&self) -> usize { + self.memory.len() + } + + pub fn chunk_bits(&self) -> usize { + self.chunk_bits + } + + pub fn add_memory(&mut self, memory: Memory) { + self.memory.push(memory); + } + + pub fn memory_indices(&self) -> Vec { + self.memory.iter().map(|memory| memory.memory_index).collect_vec() + } + + /// check the following relations: + /// - $read(x) == hash(dim(x), E(x), read_ts(x))$ + /// - $write(x) == hash(dim(x), E(x), read_ts(x) + 1)$ + /// - $init(y) == hash(y, T(y), 0)$ + /// - $final_read(y) == hash(y, T(y), final_cts(x))$ + pub fn verify_memories( + &self, + read_xs: &[F], + write_xs: &[F], + init_ys: &[F], + final_read_ys: &[F], + y: &[F], + gamma: &F, + tau: &F, + transcript: &mut impl FieldTranscriptRead, + ) -> Result<(F, F, F, Vec), Error> { + let hash = |a: &F, v: &F, t: &F| -> F { *a + *v * gamma + *t * gamma.square() - tau }; + let [dim_x, read_ts_poly_x, final_cts_poly_y] = + transcript.read_field_elements(3)?.try_into().unwrap(); + let e_poly_xs = transcript.read_field_elements(self.num_memories())?; + self.memory.iter().enumerate().for_each(|(i, memory)| { + assert_eq!(read_xs[i], hash(&dim_x, &e_poly_xs[i], &read_ts_poly_x)); + + assert_eq!( + write_xs[i], + hash(&dim_x, &e_poly_xs[i], &(read_ts_poly_x + F::ONE)) + ); + + let id_poly_y = inner_product( + iter::successors(Some(F::ONE), |power_of_two| Some(power_of_two.double())) + .take(y.len()) + .collect_vec() + .iter(), + y, + ); + + let subtable_poly_y = memory.subtable_poly.evaluate(y); + + assert_eq!(init_ys[i], hash(&id_poly_y, &subtable_poly_y, &F::ZERO)); + + assert_eq!( + final_read_ys[i], + hash(&id_poly_y, &subtable_poly_y, &final_cts_poly_y) + ); + }); + Ok((dim_x, read_ts_poly_x, final_cts_poly_y, e_poly_xs)) + } +} + +#[derive(Clone, Debug)] +pub struct Memory { + memory_index: usize, + subtable_poly: MultilinearPolynomial, +} + +impl Memory { + pub fn new(memory_index: usize, subtable_poly: MultilinearPolynomial) -> Self { + Self { + memory_index, + subtable_poly, + } + } +} + +#[derive(Clone, Debug)] +struct MemoryGKR { + init: MultilinearPolynomial, + read: MultilinearPolynomial, + write: MultilinearPolynomial, + final_read: MultilinearPolynomial, +} + +impl MemoryGKR { + pub fn new( + init: MultilinearPolynomial, + read: MultilinearPolynomial, + write: MultilinearPolynomial, + final_read: MultilinearPolynomial, + ) -> Self { + Self { + init, + read, + write, + final_read, + } + } +} diff --git a/plonkish_backend/src/backend/lookup/lasso/memory_checking/prover.rs b/plonkish_backend/src/backend/lookup/lasso/memory_checking/prover.rs new file mode 100644 index 0000000..5ebcfe6 --- /dev/null +++ b/plonkish_backend/src/backend/lookup/lasso/memory_checking/prover.rs @@ -0,0 +1,234 @@ +use std::iter; + +use halo2_curves::ff::PrimeField; +use itertools::{chain, Itertools}; +use rayon::prelude::{IntoParallelIterator, ParallelIterator}; + +use crate::{ + backend::lookup::lasso::prover::Chunk, + pcs::Evaluation, + piop::gkr::prove_grand_product, + poly::{multilinear::MultilinearPolynomial, Polynomial}, + util::transcript::FieldTranscriptWrite, + Error, +}; + +use super::MemoryGKR; + +#[derive(Clone)] +pub struct MemoryCheckingProver<'a, F: PrimeField> { + /// chunks with the same bits size + chunks: Vec>, + /// GKR initial polynomials for each memory + memories: Vec>, + /// random point at which `read_write` polynomials opened + x: Vec, + /// random point at which `init_final_read` polynomials opened + y: Vec, +} + +// e_polys -> x (Lasso Sumcheck) +// dims, e_polys, read_ts_polys -> x (for each MemoryChecking) +// final_cts_polys -> y (for each MemoryChecking) + +impl<'a, F: PrimeField> MemoryCheckingProver<'a, F> { + // T_1[dim_1(x)], ..., T_k[dim_1(x)], + // ... + // T_{\alpha-k+1}[dim_c(x)], ..., T_{\alpha}[dim_c(x)] + pub fn new(chunks: Vec>, tau: &F, gamma: &F) -> Self { + let num_reads = chunks[0].num_reads(); + let memory_size = 1 << chunks[0].chunk_bits(); + + let hash = |a: &F, v: &F, t: &F| -> F { *a + *v * gamma + *t * gamma.square() - tau }; + + let memories_gkr: Vec> = (0..chunks.len()) + .into_par_iter() + .flat_map(|i| { + let chunk = &chunks[i]; + let chunk_polys = chunk.chunk_polys().collect_vec(); + let (dim, read_ts_poly, final_cts_poly) = + (chunk_polys[0], chunk_polys[1], chunk_polys[2]); + chunk + .memories() + .map(|memory| { + let memory_polys = memory.polys().collect_vec(); + let (subtable_poly, e_poly) = (memory_polys[0], memory_polys[1]); + let mut init = vec![]; + let mut read = vec![]; + let mut write = vec![]; + let mut final_read = vec![]; + (0..memory_size).for_each(|i| { + init.push(hash(&F::from(i as u64), &subtable_poly[i], &F::ZERO)); + final_read.push(hash( + &F::from(i as u64), + &subtable_poly[i], + &final_cts_poly[i], + )); + }); + (0..num_reads).for_each(|i| { + read.push(hash(&dim[i], &e_poly[i], &read_ts_poly[i])); + write.push(hash(&dim[i], &e_poly[i], &(read_ts_poly[i] + F::ONE))); + }); + MemoryGKR::new( + MultilinearPolynomial::new(init), + MultilinearPolynomial::new(read), + MultilinearPolynomial::new(write), + MultilinearPolynomial::new(final_read), + ) + }) + .collect_vec() + }) + .collect(); + + Self { + chunks, + memories: memories_gkr, + x: vec![], + y: vec![], + } + } + + fn inits(&self) -> impl Iterator> { + self.memories.iter().map(|memory| &memory.init) + } + + fn reads(&self) -> impl Iterator> { + self.memories.iter().map(|memory| &memory.read) + } + + fn writes(&self) -> impl Iterator> { + self.memories.iter().map(|memory| &memory.write) + } + + fn final_reads(&self) -> impl Iterator> { + self.memories.iter().map(|memory| &memory.final_read) + } + + fn iter( + &self, + ) -> impl Iterator< + Item = ( + &MultilinearPolynomial, + &MultilinearPolynomial, + &MultilinearPolynomial, + &MultilinearPolynomial, + ), + > { + self.memories.iter().map(|memory| { + ( + &memory.init, + &memory.read, + &memory.write, + &memory.final_read, + ) + }) + } + + pub fn claimed_v_0s(&self) -> impl IntoIterator>> { + let (claimed_read_0s, claimed_write_0s, claimed_init_0s, claimed_final_read_0s) = self + .iter() + .map(|(init, read, write, final_read)| { + let claimed_init_0 = init.iter().product(); + let claimed_read_0 = read.iter().product(); + let claimed_write_0 = write.iter().product(); + let claimed_final_read_0 = final_read.iter().product(); + + // sanity check + assert_eq!( + claimed_init_0 * claimed_write_0, + claimed_read_0 * claimed_final_read_0 + ); + ( + Some(claimed_read_0), + Some(claimed_write_0), + Some(claimed_init_0), + Some(claimed_final_read_0), + ) + }) + .multiunzip::<(Vec<_>, Vec<_>, Vec<_>, Vec<_>)>(); + chain!([ + claimed_read_0s, + claimed_write_0s, + claimed_init_0s, + claimed_final_read_0s + ]) + } + + pub fn prove_grand_product( + &mut self, + transcript: &mut impl FieldTranscriptWrite, + ) -> Result<(), Error> { + let (_, x) = prove_grand_product( + iter::repeat(None).take(self.memories.len() * 2), + chain!(self.reads(), self.writes()), + transcript, + )?; + + let (_, y) = prove_grand_product( + iter::repeat(None).take(self.memories.len() * 2), + chain!(self.inits(), self.final_reads()), + transcript, + )?; + + self.chunks.iter().for_each(|chunk| { + let chunk_poly_evals = chunk.chunk_poly_evals(&x, &y); + let e_poly_xs = chunk.e_poly_evals(&x); + transcript.write_field_elements(&chunk_poly_evals).unwrap(); + transcript.write_field_elements(&e_poly_xs).unwrap(); + }); + + self.x = x; + self.y = y; + + Ok(()) + } + + pub fn opening_points(&self) -> impl Iterator> { + chain!([self.x.clone(), self.y.clone()]) + } + + pub fn opening_evals( + &self, + num_chunks: usize, + polys_offset: usize, + points_offset: usize, + ) -> impl Iterator> { + let (dim_xs, read_ts_poly_xs, final_cts_poly_xs, e_poly_xs) = self + .chunks + .iter() + .map(|chunk| { + let chunk_poly_evals = chunk.chunk_poly_evals(&self.x, &self.y); + let chunk_polys_index = chunk.chunk_polys_index(polys_offset, num_chunks); + let e_poly_xs = chunk.e_poly_evals(&self.x); + let e_polys_offset = polys_offset + 1 + 3 * num_chunks; + ( + Evaluation::new(chunk_polys_index[0], points_offset, chunk_poly_evals[0]), + Evaluation::new(chunk_polys_index[1], points_offset, chunk_poly_evals[1]), + Evaluation::new(chunk_polys_index[2], points_offset + 1, chunk_poly_evals[2]), + chunk + .memories() + .enumerate() + .map(|(i, memory)| { + Evaluation::new( + e_polys_offset + memory.memory_index(), + points_offset, + e_poly_xs[i], + ) + }) + .collect_vec(), + ) + }) + .multiunzip::<( + Vec>, + Vec>, + Vec>, + Vec>>, + )>(); + chain!( + dim_xs, + read_ts_poly_xs, + final_cts_poly_xs, + e_poly_xs.concat() + ) + } +} diff --git a/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs b/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs new file mode 100644 index 0000000..a9dae53 --- /dev/null +++ b/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs @@ -0,0 +1,135 @@ +use std::{iter, marker::PhantomData}; + +use halo2_curves::ff::PrimeField; +use itertools::{Itertools, chain}; + +use crate::{piop::gkr::verify_grand_product, util::transcript::FieldTranscriptRead, Error, pcs::Evaluation}; + +use super::Chunk; + +#[derive(Clone, Debug)] +pub struct MemoryCheckingVerifier { + chunks: Vec>, + _marker: PhantomData, +} + +impl<'a, F: PrimeField> MemoryCheckingVerifier { + pub fn new(chunks: Vec>) -> Self { + Self { + chunks, + _marker: PhantomData, + } + } + + pub fn verify_grand_product( + &self, + num_chunks: usize, + num_reads: usize, + polys_offset: usize, + points_offset: usize, + gamma: &F, + tau: &F, + transcript: &mut impl FieldTranscriptRead, + ) -> Result<(Vec>, Vec>), Error> { + let num_memories: usize = self.chunks.iter().map(|chunk| chunk.num_memories()).sum(); + let memory_size = self.chunks[0].chunk_bits(); + let (read_write_xs, x) = verify_grand_product( + num_reads, + iter::repeat(None).take(2 * num_memories), + transcript, + )?; + let (read_xs, write_xs) = read_write_xs.split_at(num_memories); + + let (init_final_read_ys, y) = verify_grand_product( + memory_size, + iter::repeat(None).take(2 * num_memories), + transcript, + )?; + let (init_ys, final_read_ys) = init_final_read_ys.split_at(num_memories); + + let mut offset = 0; + let (dim_xs, read_ts_poly_xs, final_cts_poly_ys, e_poly_xs) = self + .chunks + .iter() + .map(|chunk| { + let num_memories = chunk.num_memories(); + let result = chunk.verify_memories( + &read_xs[offset..offset + num_memories], + &write_xs[offset..offset + num_memories], + &init_ys[offset..offset + num_memories], + &final_read_ys[offset..offset + num_memories], + &y, + gamma, + tau, + transcript, + ); + offset += num_memories; + result + }) + .collect::)>, Error>>()? + .into_iter() + .multiunzip::<(Vec<_>, Vec<_>, Vec<_>, Vec>)>(); + + let opening_evals = self.opening_evals( + num_chunks, + polys_offset, + points_offset, + &dim_xs, + &read_ts_poly_xs, + &final_cts_poly_ys, + &e_poly_xs.concat() + ).collect_vec(); + + Ok((vec![x, y], opening_evals)) + } + + fn opening_evals( + &self, + num_chunks: usize, + polys_offset: usize, + points_offset: usize, + dim_xs: &[F], + read_ts_poly_xs: &[F], + final_cts_poly_ys: &[F], + e_poly_xs: &[F], + ) -> impl Iterator> { + let (dim_xs, read_ts_poly_xs, final_cts_poly_xs) = self + .chunks + .iter() + .enumerate() + .map(|(i, chunk)| { + let chunk_polys_index = chunk.chunk_polys_index(polys_offset, num_chunks); + ( + Evaluation::new(chunk_polys_index[0], points_offset, dim_xs[i]), + Evaluation::new(chunk_polys_index[1], points_offset, read_ts_poly_xs[i]), + Evaluation::new(chunk_polys_index[2], points_offset + 1, final_cts_poly_ys[i]), + ) + }) + .multiunzip::<( + Vec>, + Vec>, + Vec>, + )>(); + + let e_poly_offset = polys_offset + 1 + 3 * num_chunks; + let e_poly_xs = self + .chunks + .iter() + .flat_map(|chunk| chunk.memory_indices()) + .zip(e_poly_xs) + .map(|(memory_index, &e_poly_x)| { + Evaluation::new( + e_poly_offset + memory_index, + points_offset, + e_poly_x, + ) + }) + .collect_vec(); + chain!( + dim_xs, + read_ts_poly_xs, + final_cts_poly_xs, + e_poly_xs + ) + } +} diff --git a/plonkish_backend/src/backend/lookup/lasso/prover/mod.rs b/plonkish_backend/src/backend/lookup/lasso/prover/mod.rs new file mode 100644 index 0000000..773d7cb --- /dev/null +++ b/plonkish_backend/src/backend/lookup/lasso/prover/mod.rs @@ -0,0 +1,271 @@ +use std::{ + collections::HashMap, + iter::{self, repeat}, +}; + +use halo2_curves::ff::{Field, PrimeField}; +use itertools::{chain, izip, Itertools}; + +use crate::{ + pcs::{CommitmentChunk, Evaluation, PolynomialCommitmentScheme}, + piop::sum_check::{ + classic::{ClassicSumCheck, EvaluationsProver}, + SumCheck as _, VirtualPolynomial, + }, + poly::multilinear::MultilinearPolynomial, + util::transcript::TranscriptWrite, + Error, +}; + +use super::{memory_checking::MemoryCheckingProver, DecomposableTable, Lasso}; + +mod surge; + +pub use surge::Surge; + +type SumCheck = ClassicSumCheck>; + +pub struct Point { + offset: usize, + point: Vec, +} + +#[derive(Clone)] +pub struct Poly<'a, F: PrimeField> { + offset: usize, + poly: &'a MultilinearPolynomial, +} + +#[derive(Clone, Debug)] +pub struct Chunk<'a, F: PrimeField> { + chunk_index: usize, + dim: &'a MultilinearPolynomial, + read_ts_poly: &'a MultilinearPolynomial, + final_cts_poly: &'a MultilinearPolynomial, + memories: Vec>, +} + +impl<'a, F: PrimeField> Chunk<'a, F> { + fn new( + chunk_index: usize, + dim: &'a MultilinearPolynomial, + read_ts_poly: &'a MultilinearPolynomial, + final_cts_poly: &'a MultilinearPolynomial, + memory: Memory<'a, F>, + ) -> Self { + // sanity check + assert_eq!(dim.num_vars(), read_ts_poly.num_vars()); + + Self { + chunk_index, + dim, + read_ts_poly, + final_cts_poly, + memories: vec![memory], + } + } + + pub fn chunk_polys_index(&self, offset: usize, num_chunks: usize) -> Vec { + let dim_poly_index = offset + 1 + self.chunk_index; + let read_ts_poly_index = offset + 1 + num_chunks + self.chunk_index; + let final_cts_poly_index = offset + 1 + 2 * num_chunks + self.chunk_index; + vec![dim_poly_index, read_ts_poly_index, final_cts_poly_index] + } + + pub fn chunk_index(&self) -> usize { + self.chunk_index + } + + pub fn chunk_bits(&self) -> usize { + self.final_cts_poly.num_vars() + } + + pub fn num_reads(&self) -> usize { + 1 << self.dim.num_vars() + } + + pub fn chunk_polys(&self) -> impl Iterator> { + chain!([self.dim, self.read_ts_poly, self.final_cts_poly]) + } + + pub fn chunk_poly_evals(&self, x: &[F], y: &[F]) -> Vec { + vec![ + self.dim.evaluate(x), + self.read_ts_poly.evaluate(x), + self.final_cts_poly.evaluate(y), + ] + } + + pub fn e_poly_evals(&self, x: &[F]) -> Vec { + self.memories + .iter() + .map(|memory| memory.e_poly.evaluate(x)) + .collect_vec() + } + + pub(super) fn memories(&self) -> impl Iterator> { + self.memories.iter() + } + + pub(super) fn add_memory(&mut self, memory: Memory<'a, F>) { + // sanity check + let chunk_bits = self.chunk_bits(); + let num_reads = self.num_reads(); + assert_eq!(chunk_bits, memory.subtable_poly.num_vars()); + assert_eq!(num_reads, memory.e_poly.num_vars()); + + self.memories.push(memory); + } +} + +#[derive(Clone, Debug)] +pub(super) struct Memory<'a, F: PrimeField> { + memory_index: usize, + subtable_poly: &'a MultilinearPolynomial, + e_poly: &'a MultilinearPolynomial, +} + +impl<'a, F: PrimeField> Memory<'a, F> { + fn new( + memory_index: usize, + subtable_poly: &'a MultilinearPolynomial, + e_poly: &'a MultilinearPolynomial, + ) -> Self { + Self { + memory_index, + subtable_poly, + e_poly, + } + } + + pub fn memory_index(&self) -> usize { + self.memory_index + } + + pub fn e_poly(&self) -> &'a MultilinearPolynomial { + self.e_poly + } + + pub fn polys(&self) -> impl Iterator> { + chain!([self.subtable_poly, self.e_poly]) + } +} + +pub struct LassoProver< + F: Field + PrimeField, + Pcs: PolynomialCommitmentScheme>, +> { + // Remove this + scheme: Lasso, +} + +impl< + F: Field + PrimeField, + Pcs: PolynomialCommitmentScheme>, + > LassoProver +{ + pub fn e_polys( + subtable_polys: &[&MultilinearPolynomial], + table: &Box>, + nz: &Vec<&[usize]>, + ) -> Vec> { + let num_chunks = table.num_chunks(); + let num_memories = table.num_memories(); + assert_eq!(nz.len(), num_chunks); + let num_reads = nz[0].len(); + (0..num_memories) + .map(|i| { + let mut e_poly = Vec::with_capacity(num_reads); + let subtable_poly = subtable_polys[table.memory_to_subtable_index(i)]; + let nz = nz[table.memory_to_chunk_index(i)]; + (0..num_reads).for_each(|j| { + e_poly.push(subtable_poly[nz[j]]); + }); + MultilinearPolynomial::new(e_poly) + }) + .collect_vec() + } + + pub fn chunks<'a>( + table: &Box>, + subtable_polys: &'a [&MultilinearPolynomial], + e_polys: &'a [MultilinearPolynomial], + dims: &'a [MultilinearPolynomial], + read_ts_polys: &'a [MultilinearPolynomial], + final_cts_polys: &'a [MultilinearPolynomial], + ) -> Vec> { + // key: chunk index, value: chunk + let mut chunk_map: HashMap> = HashMap::new(); + + let num_memories = table.num_memories(); + let memories = (0..num_memories).map(|memory_index| { + let subtable_poly = subtable_polys[table.memory_to_subtable_index(memory_index)]; + Memory::new(memory_index, subtable_poly, &e_polys[memory_index]) + }); + memories.enumerate().for_each(|(memory_index, memory)| { + let chunk_index = table.memory_to_chunk_index(memory_index); + if let Some(_) = chunk_map.get(&chunk_index) { + chunk_map.entry(chunk_index).and_modify(|chunk| { + chunk.add_memory(memory); + }); + } else { + let dim = &dims[chunk_index]; + let read_ts_poly = &read_ts_polys[chunk_index]; + let final_cts_poly = &final_cts_polys[chunk_index]; + chunk_map.insert( + chunk_index, + Chunk::new(chunk_index, dim, read_ts_poly, final_cts_poly, memory), + ); + } + }); + + // sanity check + { + let num_chunks = table.num_chunks(); + assert_eq!(chunk_map.len(), num_chunks); + } + + let mut chunks = chunk_map.into_iter().collect_vec(); + chunks.sort_by_key(|(chunk_index, _)| *chunk_index); + chunks.into_iter().map(|(_, chunk)| chunk).collect_vec() + } + + pub fn prepare_memory_checking<'a>( + table: &Box>, + subtable_polys: &'a [&MultilinearPolynomial], + e_polys: &'a [MultilinearPolynomial], + dims: &'a [MultilinearPolynomial], + read_ts_polys: &'a [MultilinearPolynomial], + final_cts_polys: &'a [MultilinearPolynomial], + gamma: &F, + tau: &F, + ) -> Vec> { + let chunks = Self::chunks( + table, + subtable_polys, + e_polys, + dims, + read_ts_polys, + final_cts_polys, + ); + let chunk_bits = table.chunk_bits(); + // key: chunk bits, value: chunks + let mut chunk_map: HashMap>> = HashMap::new(); + + chunks.iter().enumerate().for_each(|(chunk_index, chunk)| { + let chunk_bits = chunk_bits[chunk_index]; + if let Some(_) = chunk_map.get(&chunk_bits) { + chunk_map.entry(chunk_bits).and_modify(|chunks| { + chunks.push(chunk.clone()); + }); + } else { + chunk_map.insert(chunk_bits, vec![chunk.clone()]); + } + }); + + chunk_map + .into_iter() + .map(|(_, chunks)| MemoryCheckingProver::new(chunks, tau, gamma)) + .collect_vec() + } +} diff --git a/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs b/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs new file mode 100644 index 0000000..0ad5563 --- /dev/null +++ b/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs @@ -0,0 +1,223 @@ +use std::{collections::BTreeSet, iter::repeat, marker::PhantomData}; + +use halo2_curves::ff::{Field, PrimeField}; +use itertools::Itertools; +use rayon::prelude::{IntoParallelIterator, ParallelIterator}; + +use crate::{ + backend::lookup::lasso::DecomposableTable, + pcs::{CommitmentChunk, Evaluation, PolynomialCommitmentScheme}, + piop::sum_check::{ + classic::{ClassicSumCheck, EvaluationsProver}, + SumCheck as _, VirtualPolynomial, + }, + poly::{multilinear::MultilinearPolynomial, Polynomial}, + util::{ + arithmetic::{fe_to_bits_le, usize_from_bits_le}, + expression::{Expression, Query, Rotation}, + transcript::{FieldTranscriptRead, TranscriptWrite}, + }, + Error, +}; + +type SumCheck = ClassicSumCheck>; + +pub struct Surge< + F: Field + PrimeField, + Pcs: PolynomialCommitmentScheme>, +> { + lookup_indices: Vec>, + _marker: PhantomData, + _marker2: PhantomData, +} + +impl< + F: Field + PrimeField, + Pcs: PolynomialCommitmentScheme>, + > Surge +{ + pub fn new() -> Self { + Self { + lookup_indices: vec![vec![]], + _marker: PhantomData, + _marker2: PhantomData, + } + } + + pub fn nz(&'_ self) -> Vec<&[usize]> { + self.lookup_indices + .iter() + .map(|lookup_indices| lookup_indices.as_slice()) + .collect_vec() + } + + fn split_by_chunk_bits(bits: &[bool], chunk_bits: &[usize]) -> Vec> { + let mut offset = 0; + let mut chunked_bits = vec![]; + chunk_bits.iter().for_each(|chunk_bits| { + let mut chunked = vec![true; *chunk_bits]; + chunked.copy_from_slice(&bits[offset..offset + chunk_bits]); + chunked_bits.push(chunked); + offset = offset + chunk_bits; + }); + chunked_bits + } + + /// computes dim_1, ..., dim_c where c == DecomposableTable::C + pub fn commit( + &mut self, + table: &Box>, + nz_poly: &MultilinearPolynomial, + ) -> Vec> { + let num_rows: usize = 1 << nz_poly.num_vars(); + let num_chunks = table.num_chunks(); + let chunk_bits = table.chunk_bits(); + // get indices of non-zero columns of all rows where each index is chunked + let indices = (0..num_rows) + .map(|i| { + let index_bits = fe_to_bits_le(nz_poly[i]); + let mut chunked_index = repeat(0).take(num_chunks).collect_vec(); + let chunked_index_bits = Self::split_by_chunk_bits(&index_bits, &chunk_bits); + chunked_index + .iter_mut() + .zip(chunked_index_bits) + .map(|(chunked_index, index_bits)| { + *chunked_index = usize_from_bits_le(&index_bits); + }) + .collect_vec(); + chunked_index + }) + .collect_vec(); + let mut dims = Vec::with_capacity(num_chunks); + self.lookup_indices.resize(num_chunks, vec![]); + self.lookup_indices + .iter_mut() + .enumerate() + .for_each(|(i, lookup_indices)| { + let indices = indices + .iter() + .map(|indices| { + lookup_indices.push(indices[i]); + indices[i] + }) + .collect_vec(); + dims.push(MultilinearPolynomial::from_usize(indices)); + }); + + dims + } + + pub fn counter_polys( + &self, + table: &Box>, + ) -> (Vec>, Vec>) { + let num_chunks = table.num_chunks(); + let mut read_ts_polys = Vec::with_capacity(num_chunks); + let mut final_cts_polys = Vec::with_capacity(num_chunks); + let chunk_bits = table.chunk_bits(); + self.lookup_indices + .iter() + .enumerate() + .for_each(|(i, lookup_indices)| { + let num_reads = lookup_indices.len(); + let memory_size = 1 << chunk_bits[i]; + let mut final_timestamps = vec![0usize; memory_size]; + let mut read_timestamps = vec![0usize; num_reads]; + (0..num_reads).for_each(|i| { + let memory_address = lookup_indices[i]; + let ts = final_timestamps[memory_address]; + read_timestamps[i] = ts; + let write_timestamp = ts + 1; + final_timestamps[memory_address] = write_timestamp; + }); + read_ts_polys.push(MultilinearPolynomial::from_usize(read_timestamps)); + final_cts_polys.push(MultilinearPolynomial::from_usize(final_timestamps)); + }); + + (read_ts_polys, final_cts_polys) + } + + pub fn prove_sum_check( + table: &Box>, + input_poly: &MultilinearPolynomial, + e_polys: &[MultilinearPolynomial], + r: &[F], + num_vars: usize, + polys_offset: usize, + points_offset: usize, + transcript: &mut impl TranscriptWrite, F>, + ) -> Result<(Vec>, Vec>), Error> { + let claimed_sum = Self::sum_check_claim(&r, &table, input_poly, &e_polys); + transcript.write_field_element(&claimed_sum)?; + + let expression = Self::sum_check_expression(&table); + + // proceed sumcheck + let (x, evals) = SumCheck::prove( + &(), + num_vars, + VirtualPolynomial::new(&expression, e_polys, &[], &[r.to_vec()]), + claimed_sum, + transcript, + )?; + let points = vec![x]; + let pcs_query = Self::pcs_query(&expression, 0); + let e_polys_offset = polys_offset + 1 + table.num_chunks() * 3; + let evals = pcs_query + .into_iter() + .map(|query| { + Evaluation::new( + e_polys_offset + query.poly(), + points_offset, + evals[query.poly()], + ) + }) + .collect_vec(); + + transcript.write_field_elements(evals.iter().map(Evaluation::value))?; + + Ok((points, evals)) + } + + pub fn sum_check_claim( + r: &[F], + table: &Box>, + input_poly: &MultilinearPolynomial, + e_polys: &[MultilinearPolynomial], + ) -> F { + let num_memories = table.num_memories(); + assert_eq!(e_polys.len(), num_memories); + let num_vars = e_polys[0].num_vars(); + let bh_size = 1 << num_vars; + let eq = MultilinearPolynomial::eq_xy(r); + // \sum_{k \in \{0, 1\}^{\log m}} (\tilde{eq}(r, k) * g(E_1(k), ..., E_{\alpha}(k))) + let claim = (0..bh_size) + .into_par_iter() + .map(|k| { + let operands = e_polys.iter().map(|e_poly| e_poly[k]).collect_vec(); + eq[k] * table.combine_lookups(&operands) + }) + .sum(); + assert_eq!(input_poly.evaluate(r), claim); + + claim + } + + // (\tilde{eq}(r, k) * g(E_1(k), ..., E_{\alpha}(k))) + pub fn sum_check_expression(table: &Box>) -> Expression { + let num_memories = table.num_memories(); + let exprs = table.combine_lookup_expressions( + (0..num_memories) + .map(|idx| Expression::Polynomial(Query::new(idx, Rotation::cur()))) + .collect_vec(), + ); + let eq_xy = Expression::::eq_xy(0); + eq_xy * exprs + } + + pub fn pcs_query(expression: &Expression, offset: usize) -> BTreeSet { + let mut used_query = expression.used_query(); + used_query.retain(|query| query.poly() >= offset); + used_query + } +} diff --git a/plonkish_backend/src/backend/lookup/lasso/test/mod.rs b/plonkish_backend/src/backend/lookup/lasso/test/mod.rs new file mode 100644 index 0000000..8683948 --- /dev/null +++ b/plonkish_backend/src/backend/lookup/lasso/test/mod.rs @@ -0,0 +1,73 @@ +use std::{iter, marker::PhantomData}; + +use halo2_curves::ff::PrimeField; +use itertools::Itertools; + +use crate::{ + poly::multilinear::MultilinearPolynomial, + util::{ + arithmetic::{inner_product, powers, split_bits}, + expression::Expression, + }, +}; + +use super::DecomposableTable; + +#[derive(Clone, Debug)] +pub struct AndTable(PhantomData); + +impl AndTable { + pub fn new() -> Self { + Self(PhantomData) + } +} + +impl DecomposableTable for AndTable { + fn num_chunks(&self) -> usize { + 4 + } + + fn num_memories(&self) -> usize { + 4 + } + + fn subtable_polys(&self) -> Vec> { + let memory_size = 1 << 16; + let mut evals = vec![]; + (0..memory_size).for_each(|i| { + let (lhs, rhs) = split_bits(i, 8); + let result = F::from((lhs & rhs) as u64); + evals.push(result) + }); + vec![MultilinearPolynomial::new(evals)] + } + + fn chunk_bits(&self) -> Vec { + vec![16, 16, 16, 16] + } + + fn combine_lookup_expressions(&self, expressions: Vec>) -> Expression { + Expression::DistributePowers( + expressions, + Box::new(Expression::Constant(F::from(2 << 16))), + ) + } + + fn combine_lookups(&self, operands: &[F]) -> F { + inner_product( + operands, + iter::successors(Some(F::ONE), |power_of_two| Some(power_of_two.double())) + .take(operands.len()) + .collect_vec() + .iter(), + ) + } + + fn memory_to_chunk_index(&self, memory_index: usize) -> usize { + memory_index + } + + fn memory_to_subtable_index(&self, memory_index: usize) -> usize { + 0 + } +} diff --git a/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs b/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs new file mode 100644 index 0000000..dc21326 --- /dev/null +++ b/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs @@ -0,0 +1,119 @@ +use std::{collections::HashMap, marker::PhantomData}; + +use halo2_curves::ff::{Field, PrimeField}; +use itertools::Itertools; + +use crate::{ + pcs::{Evaluation, PolynomialCommitmentScheme}, + piop::sum_check::{ + classic::{ClassicSumCheck, EvaluationsProver}, + SumCheck, + }, + poly::multilinear::MultilinearPolynomial, + util::transcript::FieldTranscriptRead, + Error, +}; + +use super::{ + memory_checking::{verifier::MemoryCheckingVerifier, Chunk, Memory}, + prover::Surge, + DecomposableTable, +}; + +pub struct LassoVerifier< + F: Field + PrimeField, + Pcs: PolynomialCommitmentScheme>, +>(PhantomData, PhantomData); + +impl< + F: Field + PrimeField, + Pcs: PolynomialCommitmentScheme>, + > LassoVerifier +{ + pub fn verify_sum_check( + table: &Box>, + num_vars: usize, + polys_offset: usize, + points_offset: usize, + transcript: &mut impl FieldTranscriptRead, + ) -> Result<(Vec>, Vec>), Error> { + let expression = Surge::::sum_check_expression(&table); + let claim = transcript.read_field_element()?; + let (eval, x) = ClassicSumCheck::>::verify( + &(), + num_vars, + expression.degree(), + claim, + transcript, + )?; + let points = vec![x]; + let pcs_query = Surge::::pcs_query(&expression, 0); + let e_polys_offset = polys_offset + 1 + table.num_chunks() * 3; + let evals = pcs_query + .iter() + .map(|query| { + let value = transcript.read_field_element().unwrap(); + Evaluation::new(e_polys_offset + query.poly(), points_offset, value) + }) + .collect_vec(); + + Ok((points, evals)) + } + + fn chunks(table: &Box>) -> Vec> { + let num_memories = table.num_memories(); + let chunk_bits = table.chunk_bits(); + let subtable_polys = table.subtable_polys(); + // key: chunk index, value: chunk + let mut chunk_map: HashMap> = HashMap::new(); + (0..num_memories).for_each(|memory_index| { + let chunk_index = table.memory_to_chunk_index(memory_index); + let chunk_bits = chunk_bits[chunk_index]; + let subtable_poly = &subtable_polys[table.memory_to_subtable_index(memory_index)]; + let memory = Memory::new(memory_index, subtable_poly.clone()); + if let Some(_) = chunk_map.get(&chunk_index) { + chunk_map.entry(chunk_index).and_modify(|chunk| { + chunk.add_memory(memory); + }); + } else { + chunk_map.insert(chunk_index, Chunk::new(chunk_index, chunk_bits, memory)); + } + }); + + // sanity check + { + let num_chunks = table.num_chunks(); + assert_eq!(chunk_map.len(), num_chunks); + } + + let mut chunks = chunk_map.into_iter().collect_vec(); + chunks.sort_by_key(|(chunk_index, _)| *chunk_index); + chunks.into_iter().map(|(_, chunk)| chunk).collect_vec() + } + + pub fn prepare_memory_checking<'a>( + table: &Box>, + ) -> Vec> { + let chunks = Self::chunks(table); + let chunk_bits = table.chunk_bits(); + // key: chunk_bits, value: chunks + let mut chunk_map = HashMap::>>::new(); + chunks + .into_iter() + .enumerate() + .for_each(|(chunk_index, chunk)| { + let chunk_bits = chunk_bits[chunk_index]; + if let Some(_) = chunk_map.get(&chunk_bits) { + chunk_map.entry(chunk_bits).and_modify(|chunks| { + chunks.push(chunk); + }); + } else { + chunk_map.insert(chunk_bits, vec![chunk]); + } + }); + chunk_map + .into_iter() + .map(|(_, chunks)| MemoryCheckingVerifier::new(chunks)) + .collect_vec() + } +} diff --git a/plonkish_backend/src/backend/lookup/logup.rs b/plonkish_backend/src/backend/lookup/logup.rs new file mode 100644 index 0000000..14a0745 --- /dev/null +++ b/plonkish_backend/src/backend/lookup/logup.rs @@ -0,0 +1,292 @@ +use std::{ + collections::{HashMap, HashSet}, + hash::Hash, + iter, + marker::PhantomData, +}; + +use halo2_curves::ff::{BatchInvert, Field, PrimeField}; +use itertools::Itertools; + +use crate::{ + pcs::{CommitmentChunk, PolynomialCommitmentScheme}, + poly::{multilinear::MultilinearPolynomial, Polynomial}, + util::{ + arithmetic::{div_ceil, powers, sum, BooleanHypercube}, + end_timer, + expression::{CommonPolynomial, Expression}, + parallel::{num_threads, par_map_collect, parallelize, parallelize_iter}, + start_timer, + transcript::TranscriptWrite, + }, + Error, +}; + +use super::{MVLookupStrategy, MVLookupStrategyOutput}; + +#[derive(Clone, Debug)] +pub struct LogUp>(PhantomData, PhantomData); + +impl> LogUp { + pub fn lookup_compressed_polys( + lookups: &[Vec<(Expression, Expression)>], + polys: &[&MultilinearPolynomial], + challenges: &[F], + betas: &[F], + ) -> Vec<[MultilinearPolynomial; 2]> { + if lookups.is_empty() { + return Default::default(); + } + + let num_vars = polys[0].num_vars(); + let expression = lookups + .iter() + .flat_map(|lookup| lookup.iter().map(|(input, table)| (input + table))) + .sum::>(); + let lagranges = { + let bh = BooleanHypercube::new(num_vars).iter().collect_vec(); + expression + .used_langrange() + .into_iter() + .map(|i| (i, bh[i.rem_euclid(1 << num_vars) as usize])) + .collect::>() + }; + lookups + .iter() + .map(|lookup| { + Self::lookup_compressed_poly(lookup, &lagranges, polys, challenges, betas) + }) + .collect() + } + + pub fn lookup_compressed_poly( + lookup: &[(Expression, Expression)], + lagranges: &HashSet<(i32, usize)>, + polys: &[&MultilinearPolynomial], + challenges: &[F], + betas: &[F], + ) -> [MultilinearPolynomial; 2] { + let num_vars = polys[0].num_vars(); + let bh = BooleanHypercube::new(num_vars); + let compress = |expressions: &[&Expression]| { + betas + .iter() + .copied() + .zip(expressions.iter().map(|expression| { + let mut compressed = vec![F::ZERO; 1 << num_vars]; + parallelize(&mut compressed, |(compressed, start)| { + for (b, compressed) in (start..).zip(compressed) { + *compressed = expression.evaluate( + &|constant| constant, + &|common_poly| match common_poly { + CommonPolynomial::Identity => F::from(b as u64), + CommonPolynomial::Lagrange(i) => { + if lagranges.contains(&(i, b)) { + F::ONE + } else { + F::ZERO + } + } + CommonPolynomial::EqXY(_) => unreachable!(), + }, + &|query| polys[query.poly()][bh.rotate(b, query.rotation())], + &|challenge| challenges[challenge], + &|value| -value, + &|lhs, rhs| lhs + &rhs, + &|lhs, rhs| lhs * &rhs, + &|value, scalar| value * &scalar, + ); + } + }); + MultilinearPolynomial::new(compressed) + })) + .sum::>() + }; + + let (inputs, tables) = lookup + .iter() + .map(|(input, table)| (input, table)) + .unzip::<_, _, Vec<_>, Vec<_>>(); + + let timer = start_timer(|| "compressed_input_poly"); + let compressed_input_poly = compress(&inputs); + end_timer(timer); + + let timer = start_timer(|| "compressed_table_poly"); + let compressed_table_poly = compress(&tables); + end_timer(timer); + + [compressed_input_poly, compressed_table_poly] + } +} + +impl> LogUp { + pub(crate) fn lookup_m_polys( + compressed_polys: &[[MultilinearPolynomial; 2]], + ) -> Result>, Error> { + compressed_polys + .iter() + .map(|compressed_polys| Self::lookup_m_poly(compressed_polys)) + .try_collect() + } + + pub(super) fn lookup_m_poly( + compressed_polys: &[MultilinearPolynomial; 2], + ) -> Result, Error> { + let [input, table] = compressed_polys; + + let counts = { + let indice_map = table.iter().zip(0..).collect::>(); + + let chunk_size = div_ceil(input.evals().len(), num_threads()); + let num_chunks = div_ceil(input.evals().len(), chunk_size); + let mut counts = vec![HashMap::new(); num_chunks]; + let mut valids = vec![true; num_chunks]; + parallelize_iter( + counts + .iter_mut() + .zip(valids.iter_mut()) + .zip((0..).step_by(chunk_size)), + |((count, valid), start)| { + for input in input[start..].iter().take(chunk_size) { + if let Some(idx) = indice_map.get(input) { + count + .entry(*idx) + .and_modify(|count| *count += 1) + .or_insert(1); + } else { + *valid = false; + break; + } + } + }, + ); + if valids.iter().any(|valid| !valid) { + return Err(Error::InvalidSnark("Invalid lookup input".to_string())); + } + counts + }; + + let mut m = vec![0; 1 << input.num_vars()]; + for (idx, count) in counts.into_iter().flatten() { + m[idx] += count; + } + let m = par_map_collect(m, |count| match count { + 0 => F::ZERO, + 1 => F::ONE, + count => F::from(count), + }); + Ok(MultilinearPolynomial::new(m)) + } + + pub(super) fn lookup_h_polys( + compressed_polys: &[[MultilinearPolynomial; 2]], + m_polys: &[MultilinearPolynomial], + gamma: &F, + ) -> Vec> { + compressed_polys + .iter() + .zip(m_polys.iter()) + .map(|(compressed_polys, m_poly)| Self::lookup_h_poly(compressed_polys, m_poly, gamma)) + .collect() + } + + pub(super) fn lookup_h_poly( + compressed_polys: &[MultilinearPolynomial; 2], + m_poly: &MultilinearPolynomial, + gamma: &F, + ) -> MultilinearPolynomial { + let [input, table] = compressed_polys; + let mut h_input = vec![F::ZERO; 1 << input.num_vars()]; + let mut h_table = vec![F::ZERO; 1 << input.num_vars()]; + + parallelize(&mut h_input, |(h_input, start)| { + for (h_input, input) in h_input.iter_mut().zip(input[start..].iter()) { + *h_input = *gamma + input; + } + }); + parallelize(&mut h_table, |(h_table, start)| { + for (h_table, table) in h_table.iter_mut().zip(table[start..].iter()) { + *h_table = *gamma + table; + } + }); + + let chunk_size = div_ceil(2 * h_input.len(), num_threads()); + parallelize_iter( + iter::empty() + .chain(h_input.chunks_mut(chunk_size)) + .chain(h_table.chunks_mut(chunk_size)), + |h| { + h.iter_mut().batch_invert(); + }, + ); + + parallelize(&mut h_input, |(h_input, start)| { + for (h_input, (h_table, m)) in h_input + .iter_mut() + .zip(h_table[start..].iter().zip(m_poly[start..].iter())) + { + *h_input -= *h_table * m; + } + }); + + if cfg!(feature = "sanity-check") { + assert_eq!(sum::(&h_input), F::ZERO); + } + + MultilinearPolynomial::new(h_input) + } +} + +impl< + F: Field + PrimeField + Hash, + Pcs: PolynomialCommitmentScheme>, + > MVLookupStrategy for LogUp +{ + type Pcs = Pcs; + + fn preprocess( + lookups: &[Vec<(Expression, Expression)>], + polys: &[&MultilinearPolynomial], + challenges: &mut Vec, + ) -> Result; 2]>, Error> { + let timer = start_timer(|| format!("lookup_compressed_polys-{}", lookups.len())); + let lookup_compressed_polys = { + let beta = challenges.last().unwrap(); + let max_lookup_width = lookups.iter().map(Vec::len).max().unwrap_or_default(); + let betas = powers(*beta).take(max_lookup_width).collect_vec(); + Self::lookup_compressed_polys(lookups, &polys, &challenges, &betas) + }; + end_timer(timer); + Ok(lookup_compressed_polys) + } + + fn commit( + pp: &Pcs::ProverParam, + lookup_compressed_polys: &[[MultilinearPolynomial; 2]], + challenges: &mut Vec, + transcript: &mut impl TranscriptWrite, F>, + ) -> Result, crate::Error> { + let timer = start_timer(|| format!("lookup_m_polys-{}", lookup_compressed_polys.len())); + let lookup_m_polys = Self::lookup_m_polys(&lookup_compressed_polys)?; + end_timer(timer); + + let lookup_m_comms = Pcs::batch_commit_and_write(&pp, &lookup_m_polys, transcript)?; + + let gamma = transcript.squeeze_challenge(); + challenges.extend([gamma]); + + let timer = start_timer(|| format!("lookup_h_polys-{}", lookup_compressed_polys.len())); + let lookup_h_polys = + Self::lookup_h_polys(&lookup_compressed_polys, &lookup_m_polys, &gamma); + end_timer(timer); + + let lookup_h_comms = Pcs::batch_commit_and_write(&pp, &lookup_h_polys, transcript)?; + + let mut polys = Vec::with_capacity(2 * lookup_compressed_polys.len()); + polys.extend([lookup_m_polys, lookup_h_polys]); + let mut comms = Vec::with_capacity(lookup_m_comms.len() + lookup_h_comms.len()); + comms.extend([lookup_m_comms, lookup_h_comms]); + Ok(MVLookupStrategyOutput { polys, comms }) + } +} diff --git a/plonkish_backend/src/backend/lookup/mod.rs b/plonkish_backend/src/backend/lookup/mod.rs new file mode 100644 index 0000000..0a0bdcb --- /dev/null +++ b/plonkish_backend/src/backend/lookup/mod.rs @@ -0,0 +1,50 @@ +use std::fmt::Debug; + +use halo2_curves::ff::Field; + +use crate::{ + pcs::{CommitmentChunk, PolynomialCommitmentScheme}, + poly::multilinear::MultilinearPolynomial, + util::{expression::Expression, transcript::TranscriptWrite}, + Error, +}; + +pub mod lasso; +pub mod logup; + +pub struct MVLookupStrategyOutput< + F: Field, + Pcs: PolynomialCommitmentScheme>, +> { + polys: Vec>>, + comms: Vec>, +} + +impl>> + MVLookupStrategyOutput +{ + pub fn polys(&self) -> Vec> { + self.polys.concat() + } + + pub fn comms(&self) -> Vec { + self.comms.concat() + } +} + +pub trait MVLookupStrategy: Clone + Debug { + type Pcs: PolynomialCommitmentScheme>; + + fn preprocess( + lookups: &[Vec<(Expression, Expression)>], + polys: &[&MultilinearPolynomial], + challenges: &mut Vec, + ) -> Result; 2]>, Error>; + + fn commit( + pp: &>::ProverParam, + lookup_polys: &[[MultilinearPolynomial; 2]], + challenges: &mut Vec, + transcript: &mut impl TranscriptWrite, F>, + ) -> Result, Error>; +} diff --git a/plonkish_backend/src/frontend/halo2.rs b/plonkish_backend/src/frontend/halo2.rs index 9bae2ad..1abedbb 100644 --- a/plonkish_backend/src/frontend/halo2.rs +++ b/plonkish_backend/src/frontend/halo2.rs @@ -21,6 +21,8 @@ use std::{ #[cfg(any(test, feature = "benchmark"))] pub mod circuit; +pub mod lookup; + #[cfg(test)] mod test; @@ -132,6 +134,7 @@ impl> PlonkishCircuit for Halo2Circuit { .collect_vec() }) .collect(); + let lasso_lookups = vec![]; let num_instances = instances.iter().map(Vec::len).collect_vec(); let preprocess_polys = @@ -155,6 +158,7 @@ impl> PlonkishCircuit for Halo2Circuit { num_challenges: num_by_phase(&cs.challenge_phase()), constraints, lookups, + lasso_lookups, permutations, max_degree: Some(cs.degree::()), }) diff --git a/plonkish_backend/src/frontend/halo2/lookup.rs b/plonkish_backend/src/frontend/halo2/lookup.rs new file mode 100644 index 0000000..dad76a5 --- /dev/null +++ b/plonkish_backend/src/frontend/halo2/lookup.rs @@ -0,0 +1,47 @@ +use std::fmt::Debug; + +use halo2_curves::ff::PrimeField; +use halo2_proofs::plonk::ConstraintSystem; + +use crate::{poly::multilinear::MultilinearPolynomial, util::expression::Expression}; + +/// This is a trait that decomposable tables provide implementations for. +/// This will be converted into `DecomposableTable` +pub trait SubtableStrategy< + F: PrimeField, + const TABLE_SIZE: usize, + const NUM_CHUNKS: usize, + const NUM_MEMORIES: usize, +> +{ + /// This is a configuration object that stores subtables + type Config: Clone; + + fn configure(meta: &mut ConstraintSystem) -> Self::Config; + + /// The `g` function that computes T[r] = g(T_1[r_1], ..., T_k[r_1], T_{k+1}[r_2], ..., T_{\alpha}[r_c]) + fn combine_lookups(&self, config: Self::Config) -> Expression; +} + +/// This is a trait that contains information about decomposable table to which +/// backend prover and verifier can ask +pub trait DecomposableTable: Clone + Debug + Sync { + const NUM_CHUNKS: usize; + const NUM_MEMORIES: usize; + + /// Returns multilinear extension polynomials of each subtable + fn subtable_polys(&self) -> Vec>; + + fn combine_lookup_expressions(&self, expressions: &[&Expression]) -> Expression; + + /// The `g` function that computes T[r] = g(T_1[r_1], ..., T_k[r_1], T_{k+1}[r_2], ..., T_{\alpha}[r_c]) + fn combine_lookups(&self, operands: &[F]) -> F; + + /// Returns the size of bits for each chunk. + /// Each chunk can have different bits. + fn chunk_bits(&self) -> [usize; Self::NUM_CHUNKS]; + + fn memory_to_subtable_index(&self, memory_index: usize) -> usize; + + fn memory_to_chunk_index(&self, memory_index: usize) -> usize; +} diff --git a/plonkish_backend/src/frontend/halo2/test.rs b/plonkish_backend/src/frontend/halo2/test.rs index 6eb2d73..8539243 100644 --- a/plonkish_backend/src/frontend/halo2/test.rs +++ b/plonkish_backend/src/frontend/halo2/test.rs @@ -1,5 +1,6 @@ use crate::backend::{ hyperplonk::{util, HyperPlonk}, + lookup::logup::LogUp, test::run_plonkish_backend, PlonkishCircuit, }; @@ -8,7 +9,7 @@ use crate::{ pcs::multilinear::MultilinearKzg, util::transcript::Keccak256Transcript, }; -use halo2_curves::bn256::{Bn256, Fr}; +use halo2_curves::bn256::{self, Bn256, Fr}; use rand::rngs::OsRng; #[test] diff --git a/plonkish_backend/src/lib.rs b/plonkish_backend/src/lib.rs index 054488d..9cae148 100644 --- a/plonkish_backend/src/lib.rs +++ b/plonkish_backend/src/lib.rs @@ -1,4 +1,7 @@ #![allow(clippy::op_ref)] +#![feature(generic_const_exprs)] +#![feature(map_first_last)] +#![feature(int_log)] pub mod backend; pub mod frontend; diff --git a/plonkish_backend/src/piop/gkr.rs b/plonkish_backend/src/piop/gkr.rs index b26907c..1e7a661 100644 --- a/plonkish_backend/src/piop/gkr.rs +++ b/plonkish_backend/src/piop/gkr.rs @@ -1,3 +1,26 @@ mod fractional_sum_check; +mod grand_product; + +use std::collections::HashMap; pub use fractional_sum_check::{prove_fractional_sum_check, verify_fractional_sum_check}; +pub use grand_product::{prove_grand_product, verify_grand_product}; +use halo2_curves::ff::PrimeField; +use itertools::izip; + +use crate::{ + util::expression::{Query, Rotation}, + Error, +}; + +fn eval_by_query(evals: &[F]) -> HashMap { + izip!( + (0..).map(|idx| Query::new(idx, Rotation::cur())), + evals.iter().cloned() + ) + .collect() +} + +fn err_unmatched_sum_check_output() -> Error { + Error::InvalidSumcheck("Unmatched between sum_check output and query evaluation".to_string()) +} diff --git a/plonkish_backend/src/piop/gkr/fractional_sum_check.rs b/plonkish_backend/src/piop/gkr/fractional_sum_check.rs index 5e16213..1e87224 100644 --- a/plonkish_backend/src/piop/gkr/fractional_sum_check.rs +++ b/plonkish_backend/src/piop/gkr/fractional_sum_check.rs @@ -4,9 +4,12 @@ //! [PH23]: https://eprint.iacr.org/2023/1284.pdf use crate::{ - piop::sum_check::{ - classic::{ClassicSumCheck, EvaluationsProver}, - evaluate, SumCheck as _, VirtualPolynomial, + piop::{ + gkr::{err_unmatched_sum_check_output, eval_by_query}, + sum_check::{ + classic::{ClassicSumCheck, EvaluationsProver}, + evaluate, SumCheck as _, VirtualPolynomial, + }, }, poly::{multilinear::MultilinearPolynomial, Polynomial}, util::{ @@ -20,7 +23,7 @@ use crate::{ }, Error, }; -use std::{array, collections::HashMap, iter}; +use std::{array, iter}; type SumCheck = ClassicSumCheck>; @@ -295,18 +298,6 @@ fn layer_down_claim(evals: &[F], mu: F) -> (Vec, Vec) { .unzip() } -fn eval_by_query(evals: &[F]) -> HashMap { - izip!( - (0..).map(|idx| Query::new(idx, Rotation::cur())), - evals.iter().cloned() - ) - .collect() -} - -fn err_unmatched_sum_check_output() -> Error { - Error::InvalidSumcheck("Unmatched between sum_check output and query evaluation".to_string()) -} - #[cfg(test)] mod test { use crate::{ diff --git a/plonkish_backend/src/piop/gkr/grand_product.rs b/plonkish_backend/src/piop/gkr/grand_product.rs new file mode 100644 index 0000000..04893c2 --- /dev/null +++ b/plonkish_backend/src/piop/gkr/grand_product.rs @@ -0,0 +1,312 @@ +use std::{array, iter}; + +use halo2_curves::ff::PrimeField; +use itertools::{chain, izip, Itertools}; + +use crate::{ + piop::{ + gkr::{err_unmatched_sum_check_output, eval_by_query}, + sum_check::{ + classic::{ClassicSumCheck, EvaluationsProver}, + evaluate, SumCheck as _, VirtualPolynomial, + }, + }, + poly::{multilinear::MultilinearPolynomial, Polynomial}, + util::{ + arithmetic::{div_ceil, inner_product, powers}, + expression::{Expression, Query, Rotation}, + parallel::{num_threads, parallelize_iter}, + transcript::{FieldTranscriptRead, FieldTranscriptWrite}, + }, + Error, +}; + +type SumCheck = ClassicSumCheck>; + +struct Layer { + v_l: MultilinearPolynomial, + v_r: MultilinearPolynomial, +} + +impl From<[Vec; 2]> for Layer { + fn from(values: [Vec; 2]) -> Self { + let [v_l, v_r] = values.map(MultilinearPolynomial::new); + Self { v_l, v_r } + } +} + +impl Layer { + fn bottom(v: &&MultilinearPolynomial) -> Self { + let mid = v.evals().len() >> 1; + [&v[..mid], &v[mid..]].map(ToOwned::to_owned).into() + } + + fn num_vars(&self) -> usize { + self.v_l.num_vars() + } + + fn polys(&self) -> [&MultilinearPolynomial; 2] { + [&self.v_l, &self.v_r] + } + + fn poly_chunks(&self, chunk_size: usize) -> impl Iterator { + let [v_l, v_r] = self.polys().map(|poly| poly.evals().chunks(chunk_size)); + izip!(v_l, v_r) + } + + fn up(&self) -> Self { + assert!(self.num_vars() != 0); + + let len = 1 << self.num_vars(); + let chunk_size = div_ceil(len, num_threads()).next_power_of_two(); + + let mut outputs: [_; 2] = array::from_fn(|_| vec![F::ZERO; len >> 1]); + let (v_up_l, v_up_r) = outputs.split_at_mut(1); + + parallelize_iter( + izip!( + chain![v_up_l, v_up_r].flat_map(|v_up| v_up.chunks_mut(chunk_size)), + self.poly_chunks(chunk_size), + ), + |(v_up, (v_l, v_r))| { + izip!(v_up, v_l, v_r).for_each(|(v_up, v_l, v_r)| { + *v_up = *v_l * *v_r; + }) + }, + ); + + outputs.into() + } +} + +pub fn prove_grand_product<'a, F: PrimeField>( + claimed_v_0s: impl IntoIterator>, + vs: impl IntoIterator>, + transcript: &mut impl FieldTranscriptWrite, +) -> Result<(Vec, Vec), Error> { + let claimed_v_0s = claimed_v_0s.into_iter().collect_vec(); + let vs = vs.into_iter().collect_vec(); + let num_batching = claimed_v_0s.len(); + + assert!(num_batching != 0); + assert_eq!(num_batching, vs.len()); + for poly in &vs { + assert_eq!(poly.num_vars(), vs[0].num_vars()); + } + + let bottom_layers = vs.iter().map(Layer::bottom).collect_vec(); + let layers = iter::successors(bottom_layers.into(), |layers| { + (layers[0].num_vars() > 0).then(|| layers.iter().map(Layer::up).collect()) + }) + .collect_vec(); + + let claimed_v_0s = { + let v_0s = chain![layers.last().unwrap()] + .map(|layer| { + let [v_l, v_r] = layer.polys().map(|poly| poly[0]); + v_l * v_r + }) + .collect_vec(); + + let mut hash_to_transcript = |claimed: Vec<_>, computed: Vec<_>| { + izip!(claimed, computed) + .map(|(claimed, computed)| match claimed { + Some(claimed) => { + if cfg!(feature = "sanity-check") { + assert_eq!(claimed, computed) + } + transcript.common_field_element(&computed).map(|_| computed) + } + None => transcript.write_field_element(&computed).map(|_| computed), + }) + .try_collect::<_, Vec<_>, _>() + }; + + hash_to_transcript(claimed_v_0s, v_0s)? + }; + + let expression = sum_check_expression(num_batching); + + let (v_xs, x) = + layers + .iter() + .rev() + .fold(Ok((claimed_v_0s, Vec::new())), |result, layers| { + let (claimed_v_ys, y) = result?; + + let num_vars = layers[0].num_vars(); + let polys = layers.iter().flat_map(|layer| layer.polys()); + + let (mut x, evals) = if num_vars == 0 { + (vec![], polys.map(|poly| poly[0]).collect_vec()) + } else { + let gamma = transcript.squeeze_challenge(); + + let (x, evals) = { + let claim = sum_check_claim(&claimed_v_ys, gamma); + SumCheck::prove( + &(), + num_vars, + VirtualPolynomial::new(&expression, polys, &[gamma], &[y]), + claim, + transcript, + )? + }; + + (x, evals) + }; + + transcript.write_field_elements(&evals)?; + + let mu = transcript.squeeze_challenge(); + + let v_xs = layer_down_claim(&evals, mu); + x.push(mu); + + Ok((v_xs, x)) + })?; + + if cfg!(feature = "sanity-check") { + izip!(vs, &v_xs).for_each(|(poly, eval)| assert_eq!(poly.evaluate(&x), *eval)); + } + + Ok((v_xs, x)) +} + +pub fn verify_grand_product( + num_vars: usize, + claimed_v_0s: impl IntoIterator>, + transcript: &mut impl FieldTranscriptRead, +) -> Result<(Vec, Vec), Error> { + let claimed_v_0s = claimed_v_0s.into_iter().collect_vec(); + let num_batching = claimed_v_0s.len(); + + assert!(num_batching != 0); + let claimed_v_0s = { + claimed_v_0s + .into_iter() + .map(|claimed| match claimed { + Some(claimed) => transcript.common_field_element(&claimed).map(|_| claimed), + None => transcript.read_field_element(), + }) + .try_collect::<_, Vec<_>, _>()? + }; + + let expression = sum_check_expression(num_batching); + + let (v_xs, x) = (0..num_vars).fold(Ok((claimed_v_0s, Vec::new())), |result, num_vars| { + let (claimed_v_ys, y) = result?; + + let (mut x, evals) = if num_vars == 0 { + let evals = transcript.read_field_elements(2 * num_batching)?; + + for (claimed_v, (&v_l, &v_r)) in izip!(claimed_v_ys, evals.iter().tuples()) { + if claimed_v != v_l * v_r { + return Err(err_unmatched_sum_check_output()); + } + } + + (Vec::new(), evals) + } else { + let gamma = transcript.squeeze_challenge(); + + let (x_eval, x) = { + let claim = sum_check_claim(&claimed_v_ys, gamma); + SumCheck::verify(&(), num_vars, expression.degree(), claim, transcript)? + }; + + let evals = transcript.read_field_elements(2 * num_batching)?; + + let eval_by_query = eval_by_query(&evals); + if x_eval != evaluate(&expression, num_vars, &eval_by_query, &[gamma], &[&y], &x) { + return Err(err_unmatched_sum_check_output()); + } + + (x, evals) + }; + + let mu = transcript.squeeze_challenge(); + + let v_xs = layer_down_claim(&evals, mu); + x.push(mu); + + Ok((v_xs, x)) + })?; + + Ok((v_xs, x)) +} + +fn sum_check_expression(num_batching: usize) -> Expression { + let exprs = &(0..2 * num_batching) + .map(|idx| Expression::::Polynomial(Query::new(idx, Rotation::cur()))) + .tuples() + .map(|(ref v_l, ref v_r)| v_l * v_r) + .collect_vec(); + let eq_xy = &Expression::eq_xy(0); + let gamma = &Expression::Challenge(0); + Expression::distribute_powers(exprs, gamma) * eq_xy +} + +fn sum_check_claim(claimed_v_ys: &[F], gamma: F) -> F { + inner_product( + claimed_v_ys, + &powers(gamma).take(claimed_v_ys.len()).collect_vec(), + ) +} + +fn layer_down_claim(evals: &[F], mu: F) -> Vec { + evals + .iter() + .tuples() + .map(|(&v_l, &v_r)| v_l + mu * (v_r - v_l)) + .collect_vec() +} + +#[cfg(test)] +mod tests { + use std::iter; + + use itertools::{chain, Itertools}; + + use crate::{ + piop::gkr::{prove_grand_product, verify_grand_product}, + poly::multilinear::MultilinearPolynomial, + util::{ + izip_eq, + test::{rand_vec, seeded_std_rng}, + transcript::{InMemoryTranscript, Keccak256Transcript}, + }, + }; + use halo2_curves::bn256::Fr; + + #[test] + fn grand_product_test() { + let num_batching = 4; + for num_vars in 1..16 { + let mut rng = seeded_std_rng(); + + let vs = iter::repeat_with(|| rand_vec(1 << num_vars, &mut rng)) + .map(MultilinearPolynomial::new) + .take(num_batching) + .collect_vec(); + let v_0s = vec![None; num_batching]; + + let proof = { + let mut transcript = Keccak256Transcript::new(()); + prove_grand_product::(v_0s.to_vec(), vs.iter(), &mut transcript).unwrap(); + transcript.into_proof() + }; + + let result = { + let mut transcript = Keccak256Transcript::from_proof((), proof.as_slice()); + verify_grand_product::(num_vars, v_0s.to_vec(), &mut transcript) + }; + assert_eq!(result.as_ref().map(|_| ()), Ok(())); + + let (v_xs, x) = result.unwrap(); + for (poly, eval) in izip_eq!(chain![vs], chain![v_xs]) { + assert_eq!(poly.evaluate(&x), eval); + } + } + } +} diff --git a/plonkish_backend/src/poly/multilinear.rs b/plonkish_backend/src/poly/multilinear.rs index df3dc2a..428a3a1 100644 --- a/plonkish_backend/src/poly/multilinear.rs +++ b/plonkish_backend/src/poly/multilinear.rs @@ -1,13 +1,14 @@ use crate::{ poly::Polynomial, util::{ - arithmetic::{div_ceil, usize_from_bits_le, BooleanHypercube, Field}, + arithmetic::{div_ceil, fe_from_le_bytes, usize_from_bits_le, BooleanHypercube, Field}, expression::Rotation, impl_index, parallel::{num_threads, parallelize, parallelize_iter}, BitIndex, Deserialize, Itertools, Serialize, }, }; +use halo2_curves::ff::PrimeField; use num_integer::Integer; use rand::RngCore; use std::{ @@ -62,6 +63,16 @@ impl MultilinearPolynomial { } } +impl MultilinearPolynomial { + pub fn from_usize(evals: Vec) -> Self { + let evals = evals + .iter() + .map(|eval| fe_from_le_bytes(eval.to_le_bytes())) + .collect_vec(); + Self::new(evals) + } +} + impl Polynomial for MultilinearPolynomial { type Point = Vec; diff --git a/plonkish_backend/src/util/arithmetic.rs b/plonkish_backend/src/util/arithmetic.rs index 1a44679..3189b24 100644 --- a/plonkish_backend/src/util/arithmetic.rs +++ b/plonkish_backend/src/util/arithmetic.rs @@ -186,6 +186,23 @@ pub fn usize_from_bits_le(bits: &[bool]) -> usize { .fold(0, |int, bit| (int << 1) + (*bit as usize)) } +pub fn fe_to_bits_le(fe: F) -> Vec { + let repr = fe.to_repr(); + let bytes = repr.as_ref(); + bytes + .iter() + .flat_map(|byte| { + let value = u8::from_le(*byte); + let mut bits = vec![]; + for i in 0..8 { + let mask = 1 << i; + bits.push(value & mask > 0); + } + bits + }) + .collect_vec() +} + pub fn div_rem(dividend: usize, divisor: usize) -> (usize, usize) { Integer::div_rem(÷nd, &divisor) } @@ -194,6 +211,15 @@ pub fn div_ceil(dividend: usize, divisor: usize) -> usize { Integer::div_ceil(÷nd, &divisor) } +pub fn split_bits(item: usize, num_bits: usize) -> (usize, usize) { + let max_value = (1 << num_bits) - 1; // Calculate the maximum value that can be represented with num_bits + + let low_chunk = item & max_value; // Extract the lower bits + let high_chunk = (item >> num_bits) & max_value; // Shift the item to the right and extract the next set of bits + + (high_chunk, low_chunk) +} + #[cfg(test)] mod test { use crate::util::arithmetic; diff --git a/rust-toolchain b/rust-toolchain index 77c582d..274ca3d 100644 --- a/rust-toolchain +++ b/rust-toolchain @@ -1 +1 @@ -1.67.0 \ No newline at end of file +nightly-2023-09-22 \ No newline at end of file From 436a58a56279387d22e3d5d232cc4aaaf0e7e33e Mon Sep 17 00:00:00 2001 From: DoHoonKim8 Date: Wed, 1 Nov 2023 00:20:52 +0900 Subject: [PATCH 02/27] Open input polynomial --- plonkish_backend/src/backend/hyperplonk.rs | 5 +++-- .../src/backend/lookup/lasso/memory_checking/verifier.rs | 4 ++-- .../src/backend/lookup/lasso/prover/surge.rs | 9 +++++---- .../src/backend/lookup/lasso/verifier/mod.rs | 6 ++++-- 4 files changed, 14 insertions(+), 10 deletions(-) diff --git a/plonkish_backend/src/backend/hyperplonk.rs b/plonkish_backend/src/backend/hyperplonk.rs index 5b9aab9..181b894 100644 --- a/plonkish_backend/src/backend/hyperplonk.rs +++ b/plonkish_backend/src/backend/hyperplonk.rs @@ -321,7 +321,7 @@ where memory_checking.opening_evals( table.num_chunks(), pp.lookup_polys_offset, - lookup_points_offset + 1 + 2 * index, + lookup_points_offset + 2 + 2 * index, ) }) .collect_vec(); @@ -443,6 +443,7 @@ where vp.num_vars, vp.lookup_polys_offset, lookup_points_offset, + &r, transcript, )?; @@ -461,7 +462,7 @@ where lookup_table.num_chunks(), vp.num_vars, vp.lookup_polys_offset, - lookup_points_offset + 1 + 2 * index, + lookup_points_offset + 2 + 2 * index, &beta, &gamma, transcript diff --git a/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs b/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs index a9dae53..631f289 100644 --- a/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs +++ b/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs @@ -32,7 +32,7 @@ impl<'a, F: PrimeField> MemoryCheckingVerifier { transcript: &mut impl FieldTranscriptRead, ) -> Result<(Vec>, Vec>), Error> { let num_memories: usize = self.chunks.iter().map(|chunk| chunk.num_memories()).sum(); - let memory_size = self.chunks[0].chunk_bits(); + let memory_bits = self.chunks[0].chunk_bits(); let (read_write_xs, x) = verify_grand_product( num_reads, iter::repeat(None).take(2 * num_memories), @@ -41,7 +41,7 @@ impl<'a, F: PrimeField> MemoryCheckingVerifier { let (read_xs, write_xs) = read_write_xs.split_at(num_memories); let (init_final_read_ys, y) = verify_grand_product( - memory_size, + memory_bits, iter::repeat(None).take(2 * num_memories), transcript, )?; diff --git a/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs b/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs index 0ad5563..20d3d7f 100644 --- a/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs +++ b/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs @@ -160,22 +160,23 @@ impl< claimed_sum, transcript, )?; - let points = vec![x]; + + let points = vec![r.to_vec(), x]; let pcs_query = Self::pcs_query(&expression, 0); let e_polys_offset = polys_offset + 1 + table.num_chunks() * 3; let evals = pcs_query .into_iter() .map(|query| { + transcript.write_field_element(&evals[query.poly()]).unwrap(); Evaluation::new( e_polys_offset + query.poly(), - points_offset, + points_offset + 1, evals[query.poly()], ) }) + .chain([Evaluation::new(polys_offset, points_offset, claimed_sum)]) .collect_vec(); - transcript.write_field_elements(evals.iter().map(Evaluation::value))?; - Ok((points, evals)) } diff --git a/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs b/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs index dc21326..99530a7 100644 --- a/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs +++ b/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs @@ -35,6 +35,7 @@ impl< num_vars: usize, polys_offset: usize, points_offset: usize, + r: &[F], transcript: &mut impl FieldTranscriptRead, ) -> Result<(Vec>, Vec>), Error> { let expression = Surge::::sum_check_expression(&table); @@ -46,15 +47,16 @@ impl< claim, transcript, )?; - let points = vec![x]; + let points = vec![r.to_vec(), x]; let pcs_query = Surge::::pcs_query(&expression, 0); let e_polys_offset = polys_offset + 1 + table.num_chunks() * 3; let evals = pcs_query .iter() .map(|query| { let value = transcript.read_field_element().unwrap(); - Evaluation::new(e_polys_offset + query.poly(), points_offset, value) + Evaluation::new(e_polys_offset + query.poly(), points_offset + 1, value) }) + .chain([Evaluation::new(polys_offset, points_offset, claim)]) .collect_vec(); Ok((points, evals)) From f2ed4cbe2e2364c0febd9d9ed0b4025715ef9384 Mon Sep 17 00:00:00 2001 From: DoHoonKim Date: Wed, 1 Nov 2023 17:07:58 +0900 Subject: [PATCH 03/27] Track opening polys & points index --- plonkish_backend/src/backend.rs | 2 +- plonkish_backend/src/backend/hyperplonk.rs | 243 +++++------- .../src/backend/hyperplonk/util.rs | 11 +- .../src/backend/hyperplonk/verifier.rs | 16 + plonkish_backend/src/backend/lookup/lasso.rs | 79 ---- .../lookup/lasso/memory_checking/mod.rs | 31 +- .../lookup/lasso/memory_checking/prover.rs | 72 ++-- .../lookup/lasso/memory_checking/verifier.rs | 70 ++-- .../src/backend/lookup/lasso/prover/mod.rs | 347 ++++++++++++++---- .../src/backend/lookup/lasso/prover/surge.rs | 35 +- .../src/backend/lookup/lasso/verifier/mod.rs | 66 +++- plonkish_backend/src/frontend/halo2.rs | 4 +- 12 files changed, 558 insertions(+), 418 deletions(-) diff --git a/plonkish_backend/src/backend.rs b/plonkish_backend/src/backend.rs index 7e40b76..35aed46 100644 --- a/plonkish_backend/src/backend.rs +++ b/plonkish_backend/src/backend.rs @@ -68,7 +68,7 @@ pub struct PlonkishCircuitInfo { /// respectively. pub lookups: Vec, Expression)>>, /// Represents Lasso lookup argument, which contains input, indices, and table - pub lasso_lookups: Vec<(Expression, Expression, Box>)>, + pub lasso_lookup: Vec<(Expression, Expression, Box>)>, /// Each item inside outer vector repesents an closed permutation cycle, /// which contains vetor of tuples representing the polynomial index and /// row respectively. diff --git a/plonkish_backend/src/backend/hyperplonk.rs b/plonkish_backend/src/backend/hyperplonk.rs index 181b894..ff5d5f7 100644 --- a/plonkish_backend/src/backend/hyperplonk.rs +++ b/plonkish_backend/src/backend/hyperplonk.rs @@ -3,17 +3,17 @@ use crate::{ hyperplonk::{ preprocessor::{batch_size, compose, permutation_polys}, prover::{instance_polys, permutation_z_polys, prove_zero_check}, - verifier::{pcs_query, points, verify_zero_check}, + verifier::{pcs_query, verify_zero_check, zero_check_opening_points_len}, }, lookup::lasso::verifier::LassoVerifier, PlonkishBackend, PlonkishCircuit, PlonkishCircuitInfo, WitnessEncoding, }, - pcs::{PolynomialCommitmentScheme, Evaluation}, + pcs::{Evaluation, PolynomialCommitmentScheme}, poly::multilinear::MultilinearPolynomial, util::{ - arithmetic::{powers, BooleanHypercube, PrimeField}, + arithmetic::{BooleanHypercube, PrimeField}, end_timer, - expression::{Expression, Query}, + expression::Expression, start_timer, transcript::{TranscriptRead, TranscriptWrite}, DeserializeOwned, Itertools, Serialize, @@ -23,10 +23,7 @@ use crate::{ use rand::RngCore; use std::{fmt::Debug, hash::Hash, iter, marker::PhantomData}; -use super::lookup::lasso::{ - prover::{LassoProver, Surge}, - DecomposableTable, Lasso, -}; +use super::lookup::lasso::{prover::LassoProver, DecomposableTable}; pub(crate) mod preprocessor; pub(crate) mod prover; @@ -49,8 +46,10 @@ where pub(crate) num_witness_polys: Vec, pub(crate) num_challenges: Vec, pub(crate) lookups: Vec, Expression)>>, - pub(crate) lasso_lookups: Vec<(Expression, Expression, Box>)>, + /// assume we have Just One Lookup Table + pub(crate) lasso_lookup: (Expression, Expression, Box>), pub(crate) lookup_polys_offset: usize, + pub(crate) lookup_points_offset: usize, pub(crate) num_permutation_z_polys: usize, pub(crate) num_vars: usize, pub(crate) expression: Expression, @@ -70,8 +69,9 @@ where pub(crate) num_instances: Vec, pub(crate) num_witness_polys: Vec, pub(crate) num_challenges: Vec, - pub(crate) lasso_tables: Vec>>, + pub(crate) lasso_table: Box>, pub(crate) lookup_polys_offset: usize, + pub(crate) lookup_points_offset: usize, pub(crate) num_permutation_z_polys: usize, pub(crate) num_vars: usize, pub(crate) expression: Expression, @@ -135,18 +135,17 @@ where + circuit_info.num_witness_polys.iter().sum::() + permutation_polys.len() + num_permutation_z_polys; + let lookup_points_offset = zero_check_opening_points_len(&expression, circuit_info.num_instances.len()); + let lasso_lookup = &circuit_info.lasso_lookup[0]; let vp = HyperPlonkVerifierParam { pcs: pcs_vp, num_instances: circuit_info.num_instances.clone(), num_witness_polys: circuit_info.num_witness_polys.clone(), num_challenges: circuit_info.num_challenges.clone(), - lasso_tables: circuit_info - .lasso_lookups - .iter() - .map(|(_, _, table)| table.clone()) - .collect_vec(), + lasso_table: lasso_lookup.2.clone(), lookup_polys_offset, + lookup_points_offset, num_permutation_z_polys, num_vars, expression: expression.clone(), @@ -163,8 +162,9 @@ where num_witness_polys: circuit_info.num_witness_polys.clone(), num_challenges: circuit_info.num_challenges.clone(), lookups: circuit_info.lookups.clone(), - lasso_lookups: circuit_info.lasso_lookups.clone(), + lasso_lookup: lasso_lookup.clone(), lookup_polys_offset, + lookup_points_offset, num_permutation_z_polys, num_vars, expression, @@ -227,62 +227,48 @@ where .chain(witness_polys.iter()) .collect_vec(); - let (lookups, tables) = pp - .lasso_lookups - .iter() - .map(|(input, index, table)| ((input, index), table)) - .unzip::<_, _, Vec<_>, Vec<_>>(); - let lookup_polys = Lasso::::lookup_polys(&polys, &lookups); - let (lookup_input_polys, lookup_nz_polys) = - lookup_polys.into_iter().unzip::<_, _, Vec<_>, Vec<_>>(); - - let lookup_input_poly = &lookup_input_polys[0]; - let lookup_nz_poly = &lookup_nz_polys[0]; - let table = tables[0]; + let (lookup, table) = ((&pp.lasso_lookup.0, &pp.lasso_lookup.1), &pp.lasso_lookup.2); + let (lookup_input_poly, lookup_nz_poly) = + LassoProver::::lookup_poly(&lookup, &polys); + let num_vars = lookup_input_poly.num_vars(); // why this is 3?? let lookup_points_offset = 3; - // commit to input_poly - let lookup_input_comm = Pcs::commit_and_write(&pp.pcs, lookup_input_poly, transcript)?; - - // get surge and dims - let mut surge = Surge::::new(); - - // commit to dims - let dims = surge.commit(&table, lookup_nz_poly); - let dim_comms = Pcs::batch_commit_and_write(&pp.pcs, &dims, transcript)?; - - // Round n - // squeeze `r` - let r = transcript.squeeze_challenges(num_vars); - // get subtable_polys let subtable_polys = table.subtable_polys(); let subtable_polys = subtable_polys.iter().collect_vec(); let subtable_polys = subtable_polys.as_slice(); - // get e_polys & read_ts_polys & final_cts_polys - let e_polys = { - let nz = surge.nz(); - LassoProver::::e_polys(subtable_polys, &table, &nz) - }; - let (read_ts_polys, final_cts_polys) = surge.counter_polys(&table); + let (lookup_polys, lookup_comms) = LassoProver::::commit( + &pp.pcs, + pp.lookup_polys_offset, + &table, + subtable_polys, + lookup_input_poly, + &lookup_nz_poly, + transcript, + )?; - // commit to read_ts_polys & final_cts_polys & e_polys - let read_ts_comms = Pcs::batch_commit_and_write(&pp.pcs, &read_ts_polys, transcript)?; - let final_cts_comms = Pcs::batch_commit_and_write(&pp.pcs, &final_cts_polys, transcript)?; - let e_comms = Pcs::batch_commit_and_write(&pp.pcs, e_polys.as_slice(), transcript)?; + // Round n + // squeeze `r` + let r = transcript.squeeze_challenges(num_vars); + let (input_poly, dims, read_ts_polys, final_cts_polys, e_polys) = ( + &lookup_polys[0][0], + &lookup_polys[1], + &lookup_polys[2], + &lookup_polys[3], + &lookup_polys[4], + ); // Lasso Sumcheck - let (lookup_points, lookup_evals) = Surge::::prove_sum_check( + let (lookup_points, lookup_evals) = LassoProver::::prove_sum_check( + lookup_points_offset, &table, - lookup_input_poly, - e_polys.as_slice(), + input_poly, + &e_polys.iter().collect_vec(), &r, num_vars, - pp.lookup_polys_offset, - lookup_points_offset, transcript, )?; @@ -291,39 +277,32 @@ where let [beta, gamma] = transcript.squeeze_challenges(2).try_into().unwrap(); // memory_checking - let mut memory_checking = LassoProver::::prepare_memory_checking( - &table, - &subtable_polys, - &e_polys, - &dims, - &read_ts_polys, - &final_cts_polys, - &beta, - &gamma, - ); - - memory_checking - .iter_mut() - .map(|memory_checking| memory_checking.prove_grand_product(transcript)) - .collect::, Error>>()?; - - // for each memory_checking, prepare dims, e_polys, read_ts_polys and `x` - // for each memory_checking, prepare final_cts_polys and `y` - let mem_check_opening_points = memory_checking + let (mem_check_opening_points, mem_check_opening_evals) = + LassoProver::::memory_checking( + lookup_points_offset, + table, + subtable_polys, + dims, + read_ts_polys, + final_cts_polys, + e_polys, + &beta, + &gamma, + transcript, + )?; + + let lookup_polys = lookup_polys .iter() - .flat_map(|memory_checking| memory_checking.opening_points()) + .flat_map(|lookup_polys| lookup_polys.iter().map(|poly| &poly.poly).collect_vec()) .collect_vec(); - - let mem_check_opening_evals = memory_checking - .iter() - .enumerate() - .flat_map(|(index, memory_checking)| { - memory_checking.opening_evals( - table.num_chunks(), - pp.lookup_polys_offset, - lookup_points_offset + 2 + 2 * index, - ) - }) + let lookup_comms = lookup_comms.concat(); + let lookup_opening_points = iter::empty() + .chain(lookup_points) + .chain(mem_check_opening_points) + .collect_vec(); + let lookup_evals = iter::empty() + .chain(lookup_evals) + .chain(mem_check_opening_evals) .collect_vec(); let timer = start_timer(|| format!("permutation_z_polys-{}", pp.permutation_polys.len())); @@ -360,13 +339,7 @@ where )?; // PCS open - let polys = iter::empty() - .chain(polys) - .chain([lookup_input_poly]) - .chain(dims.iter()) - .chain(read_ts_polys.iter()) - .chain(final_cts_polys.iter()) - .chain(e_polys.iter()); + let polys = iter::empty().chain(polys).chain(lookup_polys); let dummy_comm = Pcs::Commitment::default(); let comms = iter::empty() .chain(iter::repeat(&dummy_comm).take(pp.num_instances.len())) @@ -374,22 +347,13 @@ where .chain(&witness_comms) .chain(&pp.permutation_comms) .chain(&permutation_z_comms) - .chain([&lookup_input_comm]) - .chain(dim_comms.iter()) - .chain(read_ts_comms.iter()) - .chain(final_cts_comms.iter()) - .chain(e_comms.iter()) + .chain(lookup_comms.iter()) .collect_vec(); let points = iter::empty() .chain(points) - .chain(lookup_points) - .chain(mem_check_opening_points) - .collect_vec(); - let evals = iter::empty() - .chain(evals) - .chain(lookup_evals) - .chain(mem_check_opening_evals) + .chain(lookup_opening_points) .collect_vec(); + let evals = iter::empty().chain(evals).chain(lookup_evals).collect_vec(); let timer = start_timer(|| format!("pcs_batch_open-{}", evals.len())); Pcs::batch_open(&pp.pcs, polys, comms, &points, &evals, transcript)?; end_timer(timer); @@ -421,23 +385,15 @@ where challenges.extend(transcript.squeeze_challenges(*num_challenges)); } + let lookup_table = &vp.lasso_table; let lookup_points_offset = 3; - // read input_comm, dim_comms - let input_comm = Pcs::read_commitment(&vp.pcs, transcript)?; - let lasso_lookup_tables = &vp.lasso_tables; - let lookup_table = &lasso_lookup_tables[0]; - let num_chunks = lookup_table.num_chunks(); - let num_memories = lookup_table.num_memories(); - let dim_comms = Pcs::read_commitments(&vp.pcs, num_chunks, transcript)?; + + let lookup_comms = + LassoVerifier::::read_commitments(&vp.pcs, lookup_table, transcript)?; // Round n let r = transcript.squeeze_challenges(vp.num_vars); - // read read_ts_comms & final_cts_comms & e_comms - let read_ts_comms = Pcs::read_commitments(&vp.pcs, num_chunks, transcript)?; - let final_cts_comms = Pcs::read_commitments(&vp.pcs, num_chunks, transcript)?; - let e_comms = Pcs::read_commitments(&vp.pcs, num_memories, transcript)?; - let (lookup_points, lookup_evals) = LassoVerifier::::verify_sum_check( lookup_table, vp.num_vars, @@ -452,25 +408,23 @@ where let [beta, gamma] = transcript.squeeze_challenges(2).try_into().unwrap(); // memory checking - let memory_checking = LassoVerifier::::prepare_memory_checking(lookup_table); let (mem_check_opening_points, mem_check_opening_evals) = - memory_checking - .iter() - .enumerate() - .map(|(index, memory_checking)| { - memory_checking.verify_grand_product( - lookup_table.num_chunks(), - vp.num_vars, - vp.lookup_polys_offset, - lookup_points_offset + 2 + 2 * index, - &beta, - &gamma, - transcript - ) - }) - .collect::>, Vec>)>, Error>>()? - .into_iter() - .unzip::<_, _, Vec<_>, Vec<_>>(); + LassoVerifier::::memory_checking( + vp.num_vars, + vp.lookup_polys_offset, + lookup_points_offset, + lookup_table, + &beta, + &gamma, + transcript, + )?; + + let lookup_opening_points = iter::empty() + .chain(lookup_points) + .chain(mem_check_opening_points); + let lookup_evals = iter::empty() + .chain(lookup_evals) + .chain(mem_check_opening_evals); let permutation_z_comms = Pcs::read_commitments(&vp.pcs, vp.num_permutation_z_polys, transcript)?; @@ -497,22 +451,13 @@ where .chain(&witness_comms) .chain(vp.permutation_comms.iter().map(|(_, comm)| comm)) .chain(&permutation_z_comms) - .chain([&input_comm]) - .chain(dim_comms.iter()) - .chain(read_ts_comms.iter()) - .chain(final_cts_comms.iter()) - .chain(e_comms.iter()) + .chain(lookup_comms.iter()) .collect_vec(); let points = iter::empty() .chain(points) - .chain(lookup_points) - .chain(mem_check_opening_points.concat()) - .collect_vec(); - let evals = iter::empty() - .chain(evals) - .chain(lookup_evals) - .chain(mem_check_opening_evals.concat()) + .chain(lookup_opening_points) .collect_vec(); + let evals = iter::empty().chain(evals).chain(lookup_evals).collect_vec(); Pcs::batch_verify(&vp.pcs, comms, &points, &evals, transcript)?; Ok(()) diff --git a/plonkish_backend/src/backend/hyperplonk/util.rs b/plonkish_backend/src/backend/hyperplonk/util.rs index b01834b..4f2a2c8 100644 --- a/plonkish_backend/src/backend/hyperplonk/util.rs +++ b/plonkish_backend/src/backend/hyperplonk/util.rs @@ -10,10 +10,7 @@ use crate::{ }, poly::{multilinear::MultilinearPolynomial, Polynomial}, util::{ - arithmetic::{ - fe_from_le_bytes, fe_to_bits_le, inner_product, powers, usize_from_bits_le, - BooleanHypercube, PrimeField, - }, + arithmetic::{fe_to_bits_le, usize_from_bits_le, BooleanHypercube, PrimeField}, expression::{Expression, Query, Rotation}, test::{rand_array, rand_idx, rand_vec}, Itertools, @@ -44,7 +41,7 @@ pub fn vanilla_plonk_circuit_info( num_challenges: vec![0], constraints: vec![q_l * w_l + q_r * w_r + q_m * w_l * w_r + q_o * w_o + q_c + pi], lookups: vec![], - lasso_lookups: vec![], + lasso_lookup: vec![], permutations, max_degree: Some(4), } @@ -82,7 +79,7 @@ pub fn vanilla_plonk_with_lookup_circuit_info( (q_lookup * w_r, t_r.clone()), (q_lookup * w_o, t_o.clone()), ]], - lasso_lookups: vec![], + lasso_lookup: vec![], permutations, max_degree: Some(4), } @@ -109,7 +106,7 @@ pub fn vanilla_plonk_with_lasso_lookup_circuit_info( num_challenges: vec![0], constraints: vec![], lookups: vec![vec![]], - lasso_lookups: vec![(lasso_lookup_input, lasso_lookup_indices, lasso_table)], + lasso_lookup: vec![(lasso_lookup_input, lasso_lookup_indices, lasso_table)], permutations, max_degree: Some(4), } diff --git a/plonkish_backend/src/backend/hyperplonk/verifier.rs b/plonkish_backend/src/backend/hyperplonk/verifier.rs index dcc602c..d8a46f9 100644 --- a/plonkish_backend/src/backend/hyperplonk/verifier.rs +++ b/plonkish_backend/src/backend/hyperplonk/verifier.rs @@ -180,3 +180,19 @@ pub(crate) fn point_offset(pcs_query: &BTreeSet) -> HashMap( + expression: &Expression, + num_instance_poly: usize, +) -> usize { + let pcs_query = pcs_query(expression, num_instance_poly); + pcs_query + .iter() + .map(Query::rotation) + .collect::>() + .into_iter() + .map(|rotation| { + 1 << rotation.distance() + }) + .sum() +} diff --git a/plonkish_backend/src/backend/lookup/lasso.rs b/plonkish_backend/src/backend/lookup/lasso.rs index 57aa1c7..3056475 100644 --- a/plonkish_backend/src/backend/lookup/lasso.rs +++ b/plonkish_backend/src/backend/lookup/lasso.rs @@ -72,85 +72,6 @@ impl Clone for Box> { } } -#[derive(Clone, Debug)] -pub struct Lasso< - F: Field + PrimeField, - Pcs: PolynomialCommitmentScheme>, -> { - _marker1: PhantomData, - _marker2: PhantomData, -} - -impl< - F: Field + PrimeField, - Pcs: PolynomialCommitmentScheme>, - > Lasso -{ - pub fn lookup_polys( - polys: &[&MultilinearPolynomial], - lookups: &Vec<(&Expression, &Expression)>, - ) -> Vec<(MultilinearPolynomial, MultilinearPolynomial)> { - let num_vars = polys[0].num_vars(); - let expression = lookups - .iter() - .map(|(input, index)| *input + *index) - .sum::>(); - let lagranges = { - let bh = BooleanHypercube::new(num_vars).iter().collect_vec(); - expression - .used_langrange() - .into_iter() - .map(|i| (i, bh[i.rem_euclid(1 << num_vars) as usize])) - .collect::>() - }; - lookups - .iter() - .map(|lookup| Self::lookup_poly(lookup, &lagranges, polys)) - .collect() - } - - fn lookup_poly( - lookup: &(&Expression, &Expression), - lagranges: &HashSet<(i32, usize)>, - polys: &[&MultilinearPolynomial], - ) -> (MultilinearPolynomial, MultilinearPolynomial) { - let num_vars = polys[0].num_vars(); - let bh = BooleanHypercube::new(num_vars); - - let evaluate = |expression: &Expression| { - let mut evals = vec![F::ZERO; 1 << num_vars]; - parallelize(&mut evals, |(evals, start)| { - for (b, eval) in (start..).zip(evals) { - *eval = expression.evaluate( - &|constant| constant, - &|common_poly| match common_poly { - CommonPolynomial::Identity => F::from(b as u64), - CommonPolynomial::Lagrange(i) => { - if lagranges.contains(&(i, b)) { - F::ONE - } else { - F::ZERO - } - } - CommonPolynomial::EqXY(_) => unreachable!(), - }, - &|query| polys[query.poly()][bh.rotate(b, query.rotation())], - &|_| unreachable!(), - &|value| -value, - &|lhs, rhs| lhs + &rhs, - &|lhs, rhs| lhs * &rhs, - &|value, scalar| value * &scalar, - ); - } - }); - MultilinearPolynomial::new(evals) - }; - - let (input, index) = lookup; - (evaluate(input), evaluate(index)) - } -} - #[derive(Clone, Debug)] pub struct GeneralizedLasso>( PhantomData, diff --git a/plonkish_backend/src/backend/lookup/lasso/memory_checking/mod.rs b/plonkish_backend/src/backend/lookup/lasso/memory_checking/mod.rs index 3196f8a..aabb3ec 100644 --- a/plonkish_backend/src/backend/lookup/lasso/memory_checking/mod.rs +++ b/plonkish_backend/src/backend/lookup/lasso/memory_checking/mod.rs @@ -6,7 +6,6 @@ use std::iter; use halo2_curves::ff::PrimeField; use itertools::Itertools; pub use prover::MemoryCheckingProver; -use rayon::prelude::{IntoParallelRefIterator, IndexedParallelIterator, ParallelIterator}; use crate::{ poly::multilinear::MultilinearPolynomial, @@ -18,7 +17,7 @@ use crate::{ pub struct Chunk { chunk_index: usize, chunk_bits: usize, - memory: Vec>, + pub(crate) memory: Vec>, } impl Chunk { @@ -50,7 +49,10 @@ impl Chunk { } pub fn memory_indices(&self) -> Vec { - self.memory.iter().map(|memory| memory.memory_index).collect_vec() + self.memory + .iter() + .map(|memory| memory.memory_index) + .collect_vec() } /// check the following relations: @@ -65,34 +67,27 @@ impl Chunk { init_ys: &[F], final_read_ys: &[F], y: &[F], - gamma: &F, - tau: &F, + hash: impl Fn(&F, &F, &F) -> F, transcript: &mut impl FieldTranscriptRead, ) -> Result<(F, F, F, Vec), Error> { - let hash = |a: &F, v: &F, t: &F| -> F { *a + *v * gamma + *t * gamma.square() - tau }; let [dim_x, read_ts_poly_x, final_cts_poly_y] = transcript.read_field_elements(3)?.try_into().unwrap(); let e_poly_xs = transcript.read_field_elements(self.num_memories())?; + let id_poly_y = inner_product( + iter::successors(Some(F::ONE), |power_of_two| Some(power_of_two.double())) + .take(y.len()) + .collect_vec() + .iter(), + y, + ); self.memory.iter().enumerate().for_each(|(i, memory)| { assert_eq!(read_xs[i], hash(&dim_x, &e_poly_xs[i], &read_ts_poly_x)); - assert_eq!( write_xs[i], hash(&dim_x, &e_poly_xs[i], &(read_ts_poly_x + F::ONE)) ); - - let id_poly_y = inner_product( - iter::successors(Some(F::ONE), |power_of_two| Some(power_of_two.double())) - .take(y.len()) - .collect_vec() - .iter(), - y, - ); - let subtable_poly_y = memory.subtable_poly.evaluate(y); - assert_eq!(init_ys[i], hash(&id_poly_y, &subtable_poly_y, &F::ZERO)); - assert_eq!( final_read_ys[i], hash(&id_poly_y, &subtable_poly_y, &final_cts_poly_y) diff --git a/plonkish_backend/src/backend/lookup/lasso/memory_checking/prover.rs b/plonkish_backend/src/backend/lookup/lasso/memory_checking/prover.rs index 5ebcfe6..d76077b 100644 --- a/plonkish_backend/src/backend/lookup/lasso/memory_checking/prover.rs +++ b/plonkish_backend/src/backend/lookup/lasso/memory_checking/prover.rs @@ -5,37 +5,34 @@ use itertools::{chain, Itertools}; use rayon::prelude::{IntoParallelIterator, ParallelIterator}; use crate::{ - backend::lookup::lasso::prover::Chunk, + backend::lookup::lasso::prover::{Chunk, Point}, pcs::Evaluation, piop::gkr::prove_grand_product, - poly::{multilinear::MultilinearPolynomial, Polynomial}, + poly::multilinear::MultilinearPolynomial, util::transcript::FieldTranscriptWrite, Error, }; use super::MemoryGKR; -#[derive(Clone)] pub struct MemoryCheckingProver<'a, F: PrimeField> { + /// offset of MemoryCheckingProver instance opening points + points_offset: usize, /// chunks with the same bits size chunks: Vec>, /// GKR initial polynomials for each memory memories: Vec>, /// random point at which `read_write` polynomials opened - x: Vec, + x: Point, /// random point at which `init_final_read` polynomials opened - y: Vec, + y: Point, } -// e_polys -> x (Lasso Sumcheck) -// dims, e_polys, read_ts_polys -> x (for each MemoryChecking) -// final_cts_polys -> y (for each MemoryChecking) - impl<'a, F: PrimeField> MemoryCheckingProver<'a, F> { // T_1[dim_1(x)], ..., T_k[dim_1(x)], // ... // T_{\alpha-k+1}[dim_c(x)], ..., T_{\alpha}[dim_c(x)] - pub fn new(chunks: Vec>, tau: &F, gamma: &F) -> Self { + pub fn new(points_offset: usize, chunks: Vec>, tau: &F, gamma: &F) -> Self { let num_reads = chunks[0].num_reads(); let memory_size = 1 << chunks[0].chunk_bits(); @@ -81,10 +78,11 @@ impl<'a, F: PrimeField> MemoryCheckingProver<'a, F> { .collect(); Self { + points_offset, chunks, memories: memories_gkr, - x: vec![], - y: vec![], + x: Point::default(), + y: Point::default(), } } @@ -154,10 +152,10 @@ impl<'a, F: PrimeField> MemoryCheckingProver<'a, F> { ]) } - pub fn prove_grand_product( + pub fn prove( &mut self, transcript: &mut impl FieldTranscriptWrite, - ) -> Result<(), Error> { + ) -> Result<(Vec>, Vec>), Error> { let (_, x) = prove_grand_product( iter::repeat(None).take(self.memories.len() * 2), chain!(self.reads(), self.writes()), @@ -177,44 +175,42 @@ impl<'a, F: PrimeField> MemoryCheckingProver<'a, F> { transcript.write_field_elements(&e_poly_xs).unwrap(); }); - self.x = x; - self.y = y; + self.x = Point { + offset: self.points_offset, + point: x, + }; + self.y = Point { + offset: self.points_offset + 1, + point: y, + }; + + let opening_points = self.opening_points().collect_vec(); + let opening_evals = self.opening_evals().collect_vec(); - Ok(()) + Ok((opening_points, opening_evals)) } pub fn opening_points(&self) -> impl Iterator> { - chain!([self.x.clone(), self.y.clone()]) + chain!([self.x.point.clone(), self.y.point.clone()]) } - pub fn opening_evals( - &self, - num_chunks: usize, - polys_offset: usize, - points_offset: usize, - ) -> impl Iterator> { + pub fn opening_evals(&self) -> impl Iterator> { let (dim_xs, read_ts_poly_xs, final_cts_poly_xs, e_poly_xs) = self .chunks .iter() .map(|chunk| { - let chunk_poly_evals = chunk.chunk_poly_evals(&self.x, &self.y); - let chunk_polys_index = chunk.chunk_polys_index(polys_offset, num_chunks); - let e_poly_xs = chunk.e_poly_evals(&self.x); - let e_polys_offset = polys_offset + 1 + 3 * num_chunks; + let chunk_poly_evals = chunk.chunk_poly_evals(&self.x.point, &self.y.point); + let x = self.x.offset; + let y = self.y.offset; + let e_poly_xs = chunk.e_poly_evals(&self.x.point); ( - Evaluation::new(chunk_polys_index[0], points_offset, chunk_poly_evals[0]), - Evaluation::new(chunk_polys_index[1], points_offset, chunk_poly_evals[1]), - Evaluation::new(chunk_polys_index[2], points_offset + 1, chunk_poly_evals[2]), + Evaluation::new(chunk.dim.offset, x, chunk_poly_evals[0]), + Evaluation::new(chunk.read_ts_poly.offset, x, chunk_poly_evals[1]), + Evaluation::new(chunk.final_cts_poly.offset, y, chunk_poly_evals[2]), chunk .memories() .enumerate() - .map(|(i, memory)| { - Evaluation::new( - e_polys_offset + memory.memory_index(), - points_offset, - e_poly_xs[i], - ) - }) + .map(|(i, memory)| Evaluation::new(memory.e_poly.offset, x, e_poly_xs[i])) .collect_vec(), ) }) diff --git a/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs b/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs index 631f289..8a17b13 100644 --- a/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs +++ b/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs @@ -1,32 +1,37 @@ use std::{iter, marker::PhantomData}; use halo2_curves::ff::PrimeField; -use itertools::{Itertools, chain}; +use itertools::{chain, Itertools}; -use crate::{piop::gkr::verify_grand_product, util::transcript::FieldTranscriptRead, Error, pcs::Evaluation}; +use crate::{ + pcs::Evaluation, piop::gkr::verify_grand_product, util::transcript::FieldTranscriptRead, Error, +}; use super::Chunk; #[derive(Clone, Debug)] pub struct MemoryCheckingVerifier { + /// offset of MemoryCheckingProver instance opening points + points_offset: usize, + /// chunks with the same bits size chunks: Vec>, _marker: PhantomData, } impl<'a, F: PrimeField> MemoryCheckingVerifier { - pub fn new(chunks: Vec>) -> Self { + pub fn new(points_offset: usize, chunks: Vec>) -> Self { Self { + points_offset, chunks, _marker: PhantomData, } } - pub fn verify_grand_product( + pub fn verify( &self, num_chunks: usize, num_reads: usize, polys_offset: usize, - points_offset: usize, gamma: &F, tau: &F, transcript: &mut impl FieldTranscriptRead, @@ -47,6 +52,7 @@ impl<'a, F: PrimeField> MemoryCheckingVerifier { )?; let (init_ys, final_read_ys) = init_final_read_ys.split_at(num_memories); + let hash = |a: &F, v: &F, t: &F| -> F { *a + *v * gamma + *t * gamma.square() - tau }; let mut offset = 0; let (dim_xs, read_ts_poly_xs, final_cts_poly_ys, e_poly_xs) = self .chunks @@ -59,8 +65,7 @@ impl<'a, F: PrimeField> MemoryCheckingVerifier { &init_ys[offset..offset + num_memories], &final_read_ys[offset..offset + num_memories], &y, - gamma, - tau, + hash, transcript, ); offset += num_memories; @@ -70,15 +75,16 @@ impl<'a, F: PrimeField> MemoryCheckingVerifier { .into_iter() .multiunzip::<(Vec<_>, Vec<_>, Vec<_>, Vec>)>(); - let opening_evals = self.opening_evals( - num_chunks, - polys_offset, - points_offset, - &dim_xs, - &read_ts_poly_xs, - &final_cts_poly_ys, - &e_poly_xs.concat() - ).collect_vec(); + let opening_evals = self + .opening_evals( + num_chunks, + polys_offset, + &dim_xs, + &read_ts_poly_xs, + &final_cts_poly_ys, + &e_poly_xs.concat(), + ) + .collect_vec(); Ok((vec![x, y], opening_evals)) } @@ -87,29 +93,28 @@ impl<'a, F: PrimeField> MemoryCheckingVerifier { &self, num_chunks: usize, polys_offset: usize, - points_offset: usize, dim_xs: &[F], read_ts_poly_xs: &[F], final_cts_poly_ys: &[F], e_poly_xs: &[F], ) -> impl Iterator> { - let (dim_xs, read_ts_poly_xs, final_cts_poly_xs) = self + let (dim_xs, read_ts_poly_xs, final_cts_poly_ys) = self .chunks .iter() .enumerate() .map(|(i, chunk)| { let chunk_polys_index = chunk.chunk_polys_index(polys_offset, num_chunks); ( - Evaluation::new(chunk_polys_index[0], points_offset, dim_xs[i]), - Evaluation::new(chunk_polys_index[1], points_offset, read_ts_poly_xs[i]), - Evaluation::new(chunk_polys_index[2], points_offset + 1, final_cts_poly_ys[i]), + Evaluation::new(chunk_polys_index[0], self.points_offset, dim_xs[i]), + Evaluation::new(chunk_polys_index[1], self.points_offset, read_ts_poly_xs[i]), + Evaluation::new( + chunk_polys_index[2], + self.points_offset + 1, + final_cts_poly_ys[i], + ), ) }) - .multiunzip::<( - Vec>, - Vec>, - Vec>, - )>(); + .multiunzip::<(Vec>, Vec>, Vec>)>(); let e_poly_offset = polys_offset + 1 + 3 * num_chunks; let e_poly_xs = self @@ -118,18 +123,9 @@ impl<'a, F: PrimeField> MemoryCheckingVerifier { .flat_map(|chunk| chunk.memory_indices()) .zip(e_poly_xs) .map(|(memory_index, &e_poly_x)| { - Evaluation::new( - e_poly_offset + memory_index, - points_offset, - e_poly_x, - ) + Evaluation::new(e_poly_offset + memory_index, self.points_offset, e_poly_x) }) .collect_vec(); - chain!( - dim_xs, - read_ts_poly_xs, - final_cts_poly_xs, - e_poly_xs - ) + chain!(dim_xs, read_ts_poly_xs, final_cts_poly_ys, e_poly_xs) } } diff --git a/plonkish_backend/src/backend/lookup/lasso/prover/mod.rs b/plonkish_backend/src/backend/lookup/lasso/prover/mod.rs index 773d7cb..d4c2171 100644 --- a/plonkish_backend/src/backend/lookup/lasso/prover/mod.rs +++ b/plonkish_backend/src/backend/lookup/lasso/prover/mod.rs @@ -1,56 +1,71 @@ use std::{ - collections::HashMap, - iter::{self, repeat}, + collections::{HashMap, HashSet}, + marker::PhantomData, }; use halo2_curves::ff::{Field, PrimeField}; -use itertools::{chain, izip, Itertools}; +use itertools::{chain, Itertools}; use crate::{ pcs::{CommitmentChunk, Evaluation, PolynomialCommitmentScheme}, - piop::sum_check::{ - classic::{ClassicSumCheck, EvaluationsProver}, - SumCheck as _, VirtualPolynomial, - }, poly::multilinear::MultilinearPolynomial, - util::transcript::TranscriptWrite, + util::{ + arithmetic::BooleanHypercube, + expression::{CommonPolynomial, Expression}, + impl_index, + parallel::parallelize, + transcript::{FieldTranscriptWrite, TranscriptWrite}, + }, Error, }; -use super::{memory_checking::MemoryCheckingProver, DecomposableTable, Lasso}; +use super::{memory_checking::MemoryCheckingProver, DecomposableTable}; mod surge; pub use surge::Surge; -type SumCheck = ClassicSumCheck>; - +#[derive(Default)] pub struct Point { - offset: usize, - point: Vec, + /// point offset in batch opening + pub(crate) offset: usize, + pub(crate) point: Vec, } -#[derive(Clone)] -pub struct Poly<'a, F: PrimeField> { - offset: usize, - poly: &'a MultilinearPolynomial, +#[derive(Clone, Debug)] +pub struct Poly { + /// polynomial offset in batch opening + pub(crate) offset: usize, + pub(crate) poly: MultilinearPolynomial, +} + +impl_index!(Poly, poly); + +impl Poly { + pub fn num_vars(&self) -> usize { + self.poly.num_vars() + } + + pub fn evaluate(&self, x: &[F]) -> F { + self.poly.evaluate(x) + } } #[derive(Clone, Debug)] pub struct Chunk<'a, F: PrimeField> { - chunk_index: usize, - dim: &'a MultilinearPolynomial, - read_ts_poly: &'a MultilinearPolynomial, - final_cts_poly: &'a MultilinearPolynomial, - memories: Vec>, + pub(super) chunk_index: usize, + pub(super) dim: &'a Poly, + pub(super) read_ts_poly: &'a Poly, + pub(super) final_cts_poly: &'a Poly, + pub(super) memories: Vec>, } impl<'a, F: PrimeField> Chunk<'a, F> { fn new( chunk_index: usize, - dim: &'a MultilinearPolynomial, - read_ts_poly: &'a MultilinearPolynomial, - final_cts_poly: &'a MultilinearPolynomial, + dim: &'a Poly, + read_ts_poly: &'a Poly, + final_cts_poly: &'a Poly, memory: Memory<'a, F>, ) -> Self { // sanity check @@ -65,13 +80,6 @@ impl<'a, F: PrimeField> Chunk<'a, F> { } } - pub fn chunk_polys_index(&self, offset: usize, num_chunks: usize) -> Vec { - let dim_poly_index = offset + 1 + self.chunk_index; - let read_ts_poly_index = offset + 1 + num_chunks + self.chunk_index; - let final_cts_poly_index = offset + 1 + 2 * num_chunks + self.chunk_index; - vec![dim_poly_index, read_ts_poly_index, final_cts_poly_index] - } - pub fn chunk_index(&self) -> usize { self.chunk_index } @@ -85,7 +93,11 @@ impl<'a, F: PrimeField> Chunk<'a, F> { } pub fn chunk_polys(&self) -> impl Iterator> { - chain!([self.dim, self.read_ts_poly, self.final_cts_poly]) + chain!([ + &self.dim.poly, + &self.read_ts_poly.poly, + &self.final_cts_poly.poly + ]) } pub fn chunk_poly_evals(&self, x: &[F], y: &[F]) -> Vec { @@ -103,7 +115,7 @@ impl<'a, F: PrimeField> Chunk<'a, F> { .collect_vec() } - pub(super) fn memories(&self) -> impl Iterator> { + pub(super) fn memories(&self) -> impl Iterator> { self.memories.iter() } @@ -120,43 +132,81 @@ impl<'a, F: PrimeField> Chunk<'a, F> { #[derive(Clone, Debug)] pub(super) struct Memory<'a, F: PrimeField> { - memory_index: usize, subtable_poly: &'a MultilinearPolynomial, - e_poly: &'a MultilinearPolynomial, + pub(crate) e_poly: &'a Poly, } impl<'a, F: PrimeField> Memory<'a, F> { - fn new( - memory_index: usize, - subtable_poly: &'a MultilinearPolynomial, - e_poly: &'a MultilinearPolynomial, - ) -> Self { + fn new(subtable_poly: &'a MultilinearPolynomial, e_poly: &'a Poly) -> Self { Self { - memory_index, subtable_poly, e_poly, } } - pub fn memory_index(&self) -> usize { - self.memory_index - } - - pub fn e_poly(&self) -> &'a MultilinearPolynomial { - self.e_poly - } - - pub fn polys(&self) -> impl Iterator> { - chain!([self.subtable_poly, self.e_poly]) + pub fn polys(&'a self) -> impl Iterator> { + chain!([&self.subtable_poly, &self.e_poly.poly]) } } pub struct LassoProver< F: Field + PrimeField, Pcs: PolynomialCommitmentScheme>, -> { - // Remove this - scheme: Lasso, +>(PhantomData, PhantomData); + +impl< + F: Field + PrimeField, + Pcs: PolynomialCommitmentScheme>, + > LassoProver +{ + pub fn lookup_poly( + lookup: &(&Expression, &Expression), + polys: &[&MultilinearPolynomial], + ) -> (MultilinearPolynomial, MultilinearPolynomial) { + let num_vars = polys[0].num_vars(); + let expression = lookup.0 + lookup.1; + let lagranges = { + let bh = BooleanHypercube::new(num_vars).iter().collect_vec(); + expression + .used_langrange() + .into_iter() + .map(|i| (i, bh[i.rem_euclid(1 << num_vars) as usize])) + .collect::>() + }; + let bh = BooleanHypercube::new(num_vars); + + let evaluate = |expression: &Expression| { + let mut evals = vec![F::ZERO; 1 << num_vars]; + parallelize(&mut evals, |(evals, start)| { + for (b, eval) in (start..).zip(evals) { + *eval = expression.evaluate( + &|constant| constant, + &|common_poly| match common_poly { + CommonPolynomial::Identity => F::from(b as u64), + CommonPolynomial::Lagrange(i) => { + if lagranges.contains(&(i, b)) { + F::ONE + } else { + F::ZERO + } + } + CommonPolynomial::EqXY(_) => unreachable!(), + }, + &|query| polys[query.poly()][bh.rotate(b, query.rotation())], + &|_| unreachable!(), + &|value| -value, + &|lhs, rhs| lhs + &rhs, + &|lhs, rhs| lhs * &rhs, + &|value, scalar| value * &scalar, + ); + } + }); + MultilinearPolynomial::new(evals) + }; + + let (input, index) = lookup; + (evaluate(input), evaluate(index)) + } } impl< @@ -164,9 +214,9 @@ impl< Pcs: PolynomialCommitmentScheme>, > LassoProver { - pub fn e_polys( - subtable_polys: &[&MultilinearPolynomial], + fn e_polys( table: &Box>, + subtable_polys: &[&MultilinearPolynomial], nz: &Vec<&[usize]>, ) -> Vec> { let num_chunks = table.num_chunks(); @@ -186,21 +236,21 @@ impl< .collect_vec() } - pub fn chunks<'a>( + fn chunks<'a>( table: &Box>, subtable_polys: &'a [&MultilinearPolynomial], - e_polys: &'a [MultilinearPolynomial], - dims: &'a [MultilinearPolynomial], - read_ts_polys: &'a [MultilinearPolynomial], - final_cts_polys: &'a [MultilinearPolynomial], + dims: &'a [Poly], + read_ts_polys: &'a [Poly], + final_cts_polys: &'a [Poly], + e_polys: &'a [Poly], ) -> Vec> { // key: chunk index, value: chunk - let mut chunk_map: HashMap> = HashMap::new(); + let mut chunk_map: HashMap> = HashMap::new(); let num_memories = table.num_memories(); let memories = (0..num_memories).map(|memory_index| { let subtable_poly = subtable_polys[table.memory_to_subtable_index(memory_index)]; - Memory::new(memory_index, subtable_poly, &e_polys[memory_index]) + Memory::new(subtable_poly, &e_polys[memory_index]) }); memories.enumerate().for_each(|(memory_index, memory)| { let chunk_index = table.memory_to_chunk_index(memory_index); @@ -230,27 +280,48 @@ impl< chunks.into_iter().map(|(_, chunk)| chunk).collect_vec() } - pub fn prepare_memory_checking<'a>( + pub fn prove_sum_check( + points_offset: usize, + table: &Box>, + input_poly: &Poly, + e_polys: &[&Poly], + r: &[F], + num_vars: usize, + transcript: &mut impl TranscriptWrite, F>, + ) -> Result<(Vec>, Vec>), Error> { + Surge::::prove_sum_check( + table, + input_poly, + &e_polys, + r, + num_vars, + points_offset, + transcript, + ) + } + + fn prepare_memory_checking<'a>( + points_offset: usize, table: &Box>, subtable_polys: &'a [&MultilinearPolynomial], - e_polys: &'a [MultilinearPolynomial], - dims: &'a [MultilinearPolynomial], - read_ts_polys: &'a [MultilinearPolynomial], - final_cts_polys: &'a [MultilinearPolynomial], + dims: &'a [Poly], + read_ts_polys: &'a [Poly], + final_cts_polys: &'a [Poly], + e_polys: &'a [Poly], gamma: &F, tau: &F, ) -> Vec> { let chunks = Self::chunks( table, subtable_polys, - e_polys, dims, read_ts_polys, final_cts_polys, + e_polys, ); let chunk_bits = table.chunk_bits(); // key: chunk bits, value: chunks - let mut chunk_map: HashMap>> = HashMap::new(); + let mut chunk_map: HashMap>> = HashMap::new(); chunks.iter().enumerate().for_each(|(chunk_index, chunk)| { let chunk_bits = chunk_bits[chunk_index]; @@ -265,7 +336,139 @@ impl< chunk_map .into_iter() - .map(|(_, chunks)| MemoryCheckingProver::new(chunks, tau, gamma)) + .enumerate() + .map(|(index, (_, chunks))| { + let points_offset = points_offset + 2 + 2 * index; + MemoryCheckingProver::new(points_offset, chunks, tau, gamma) + }) .collect_vec() } + + pub fn memory_checking<'a>( + points_offset: usize, + table: &Box>, + subtable_polys: &'a [&MultilinearPolynomial], + dims: &'a [Poly], + read_ts_polys: &'a [Poly], + final_cts_polys: &'a [Poly], + e_polys: &'a [Poly], + gamma: &F, + tau: &F, + transcript: &mut impl FieldTranscriptWrite, + ) -> Result<(Vec>, Vec>), Error> { + let mut memory_checking = LassoProver::::prepare_memory_checking( + points_offset, + &table, + &subtable_polys, + &dims, + &read_ts_polys, + &final_cts_polys, + &e_polys, + &gamma, + &tau, + ); + + let (mem_check_opening_points, mem_check_opening_evals) = memory_checking + .iter_mut() + .map(|memory_checking| memory_checking.prove(transcript)) + .collect::>, Vec>)>, Error>>()? + .into_iter() + .unzip::<_, _, Vec<_>, Vec<_>>(); + Ok(( + mem_check_opening_points.concat(), + mem_check_opening_evals.concat(), + )) + } + + pub fn commit( + pp: &Pcs::ProverParam, + lookup_polys_offset: usize, + table: &Box>, + subtable_polys: &[&MultilinearPolynomial], + lookup_input_poly: MultilinearPolynomial, + lookup_nz_poly: &MultilinearPolynomial, + transcript: &mut impl TranscriptWrite, + ) -> Result<(Vec>>, Vec>), Error> { + let num_chunks = table.num_chunks(); + + // commit to input_poly + let lookup_input_comm = Pcs::commit_and_write(&pp, &lookup_input_poly, transcript)?; + + // get surge and dims + let mut surge = Surge::::new(); + + // commit to dims + let dims = surge.commit(&table, lookup_nz_poly); + let dim_comms = Pcs::batch_commit_and_write(pp, &dims, transcript)?; + + // get e_polys & read_ts_polys & final_cts_polys + let e_polys = { + let nz = surge.nz(); + LassoProver::::e_polys(&table, subtable_polys, &nz) + }; + let (read_ts_polys, final_cts_polys) = surge.counter_polys(&table); + + // commit to read_ts_polys & final_cts_polys & e_polys + let read_ts_comms = Pcs::batch_commit_and_write(&pp, &read_ts_polys, transcript)?; + let final_cts_comms = Pcs::batch_commit_and_write(&pp, &final_cts_polys, transcript)?; + let e_comms = Pcs::batch_commit_and_write(&pp, e_polys.as_slice(), transcript)?; + + let lookup_input_poly = Poly { + offset: lookup_polys_offset, + poly: lookup_input_poly, + }; + + let dims = dims + .into_iter() + .enumerate() + .map(|(chunk_index, dim)| Poly { + offset: lookup_polys_offset + 1 + chunk_index, + poly: dim, + }) + .collect_vec(); + + let read_ts_polys = read_ts_polys + .into_iter() + .enumerate() + .map(|(chunk_index, read_ts_poly)| Poly { + offset: lookup_polys_offset + 1 + num_chunks + chunk_index, + poly: read_ts_poly, + }) + .collect_vec(); + + let final_cts_polys = final_cts_polys + .into_iter() + .enumerate() + .map(|(chunk_index, final_cts_poly)| Poly { + offset: lookup_polys_offset + 1 + 2 * num_chunks + chunk_index, + poly: final_cts_poly, + }) + .collect_vec(); + + let e_polys = e_polys + .into_iter() + .enumerate() + .map(|(memory_index, e_poly)| Poly { + offset: lookup_polys_offset + 1 + 3 * num_chunks + memory_index, + poly: e_poly, + }) + .collect_vec(); + + Ok(( + vec![ + vec![lookup_input_poly], + dims, + read_ts_polys, + final_cts_polys, + e_polys, + ], + vec![ + vec![lookup_input_comm], + dim_comms, + read_ts_comms, + final_cts_comms, + e_comms, + ], + )) + } } diff --git a/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs b/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs index 20d3d7f..404dda1 100644 --- a/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs +++ b/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs @@ -20,6 +20,8 @@ use crate::{ Error, }; +use super::Poly; + type SumCheck = ClassicSumCheck>; pub struct Surge< @@ -139,15 +141,16 @@ impl< pub fn prove_sum_check( table: &Box>, - input_poly: &MultilinearPolynomial, - e_polys: &[MultilinearPolynomial], + input_poly: &Poly, + e_polys: &[&Poly], r: &[F], num_vars: usize, - polys_offset: usize, points_offset: usize, transcript: &mut impl TranscriptWrite, F>, ) -> Result<(Vec>, Vec>), Error> { - let claimed_sum = Self::sum_check_claim(&r, &table, input_poly, &e_polys); + let claimed_sum = Self::sum_check_claim(&r, &table, &e_polys); + assert_eq!(claimed_sum, input_poly.evaluate(r)); + transcript.write_field_element(&claimed_sum)?; let expression = Self::sum_check_expression(&table); @@ -156,25 +159,35 @@ impl< let (x, evals) = SumCheck::prove( &(), num_vars, - VirtualPolynomial::new(&expression, e_polys, &[], &[r.to_vec()]), + VirtualPolynomial::new( + &expression, + e_polys.iter().map(|e_poly| &e_poly.poly), + &[], + &[r.to_vec()], + ), claimed_sum, transcript, )?; let points = vec![r.to_vec(), x]; let pcs_query = Self::pcs_query(&expression, 0); - let e_polys_offset = polys_offset + 1 + table.num_chunks() * 3; let evals = pcs_query .into_iter() .map(|query| { - transcript.write_field_element(&evals[query.poly()]).unwrap(); + transcript + .write_field_element(&evals[query.poly()]) + .unwrap(); Evaluation::new( - e_polys_offset + query.poly(), + e_polys[query.poly()].offset, points_offset + 1, evals[query.poly()], ) }) - .chain([Evaluation::new(polys_offset, points_offset, claimed_sum)]) + .chain([Evaluation::new( + input_poly.offset, + points_offset, + claimed_sum, + )]) .collect_vec(); Ok((points, evals)) @@ -183,8 +196,7 @@ impl< pub fn sum_check_claim( r: &[F], table: &Box>, - input_poly: &MultilinearPolynomial, - e_polys: &[MultilinearPolynomial], + e_polys: &[&Poly], ) -> F { let num_memories = table.num_memories(); assert_eq!(e_polys.len(), num_memories); @@ -199,7 +211,6 @@ impl< eq[k] * table.combine_lookups(&operands) }) .sum(); - assert_eq!(input_poly.evaluate(r), claim); claim } diff --git a/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs b/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs index 99530a7..4f1a629 100644 --- a/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs +++ b/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs @@ -1,4 +1,4 @@ -use std::{collections::HashMap, marker::PhantomData}; +use std::{collections::HashMap, iter, marker::PhantomData}; use halo2_curves::ff::{Field, PrimeField}; use itertools::Itertools; @@ -10,7 +10,7 @@ use crate::{ SumCheck, }, poly::multilinear::MultilinearPolynomial, - util::transcript::FieldTranscriptRead, + util::transcript::{FieldTranscriptRead, TranscriptRead}, Error, }; @@ -30,6 +30,30 @@ impl< Pcs: PolynomialCommitmentScheme>, > LassoVerifier { + pub fn read_commitments( + vp: &Pcs::VerifierParam, + table: &Box>, + transcript: &mut impl TranscriptRead, + ) -> Result, Error> { + // read input_comm, dim_comms + let num_chunks = table.num_chunks(); + let num_memories = table.num_memories(); + let input_comm = Pcs::read_commitment(vp, transcript)?; + let dim_comms = Pcs::read_commitments(vp, num_chunks, transcript)?; + + // read read_ts_comms & final_cts_comms & e_comms + let read_ts_comms = Pcs::read_commitments(vp, num_chunks, transcript)?; + let final_cts_comms = Pcs::read_commitments(vp, num_chunks, transcript)?; + let e_comms = Pcs::read_commitments(vp, num_memories, transcript)?; + Ok(iter::empty() + .chain(vec![input_comm]) + .chain(dim_comms) + .chain(read_ts_comms) + .chain(final_cts_comms) + .chain(e_comms) + .collect_vec()) + } + pub fn verify_sum_check( table: &Box>, num_vars: usize, @@ -94,6 +118,7 @@ impl< } pub fn prepare_memory_checking<'a>( + points_offset: usize, table: &Box>, ) -> Vec> { let chunks = Self::chunks(table); @@ -115,7 +140,42 @@ impl< }); chunk_map .into_iter() - .map(|(_, chunks)| MemoryCheckingVerifier::new(chunks)) + .enumerate() + .map(|(index, (_, chunks))| { + let points_offset = points_offset + 2 + 2 * index; + MemoryCheckingVerifier::new(points_offset, chunks) + }) .collect_vec() } + + pub fn memory_checking( + num_reads: usize, + polys_offset: usize, + points_offset: usize, + table: &Box>, + gamma: &F, + tau: &F, + transcript: &mut impl FieldTranscriptRead, + ) -> Result<(Vec>, Vec>), Error> { + let memory_checking = Self::prepare_memory_checking(points_offset, table); + let (mem_check_opening_points, mem_check_opening_evals) = memory_checking + .iter() + .map(|memory_checking| { + memory_checking.verify( + table.num_chunks(), + num_reads, + polys_offset, + &gamma, + &tau, + transcript, + ) + }) + .collect::>, Vec>)>, Error>>()? + .into_iter() + .unzip::<_, _, Vec<_>, Vec<_>>(); + Ok(( + mem_check_opening_points.concat(), + mem_check_opening_evals.concat(), + )) + } } diff --git a/plonkish_backend/src/frontend/halo2.rs b/plonkish_backend/src/frontend/halo2.rs index 1abedbb..c50cb04 100644 --- a/plonkish_backend/src/frontend/halo2.rs +++ b/plonkish_backend/src/frontend/halo2.rs @@ -134,7 +134,7 @@ impl> PlonkishCircuit for Halo2Circuit { .collect_vec() }) .collect(); - let lasso_lookups = vec![]; + let lasso_lookup = vec![]; let num_instances = instances.iter().map(Vec::len).collect_vec(); let preprocess_polys = @@ -158,7 +158,7 @@ impl> PlonkishCircuit for Halo2Circuit { num_challenges: num_by_phase(&cs.challenge_phase()), constraints, lookups, - lasso_lookups, + lasso_lookup, permutations, max_degree: Some(cs.degree::()), }) From c28f06958fee993eaa470e7fc88873e0965e69e7 Mon Sep 17 00:00:00 2001 From: DoHoonKim Date: Thu, 2 Nov 2023 23:30:26 +0900 Subject: [PATCH 04/27] Use preprocessed lookup_points_offset value --- plonkish_backend/src/backend/hyperplonk.rs | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/plonkish_backend/src/backend/hyperplonk.rs b/plonkish_backend/src/backend/hyperplonk.rs index ff5d5f7..d6fdf6b 100644 --- a/plonkish_backend/src/backend/hyperplonk.rs +++ b/plonkish_backend/src/backend/hyperplonk.rs @@ -232,8 +232,6 @@ where LassoProver::::lookup_poly(&lookup, &polys); let num_vars = lookup_input_poly.num_vars(); - // why this is 3?? - let lookup_points_offset = 3; // get subtable_polys let subtable_polys = table.subtable_polys(); @@ -263,7 +261,7 @@ where ); // Lasso Sumcheck let (lookup_points, lookup_evals) = LassoProver::::prove_sum_check( - lookup_points_offset, + pp.lookup_points_offset, &table, input_poly, &e_polys.iter().collect_vec(), @@ -279,7 +277,7 @@ where // memory_checking let (mem_check_opening_points, mem_check_opening_evals) = LassoProver::::memory_checking( - lookup_points_offset, + pp.lookup_points_offset, table, subtable_polys, dims, @@ -386,7 +384,6 @@ where } let lookup_table = &vp.lasso_table; - let lookup_points_offset = 3; let lookup_comms = LassoVerifier::::read_commitments(&vp.pcs, lookup_table, transcript)?; @@ -398,7 +395,7 @@ where lookup_table, vp.num_vars, vp.lookup_polys_offset, - lookup_points_offset, + vp.lookup_points_offset, &r, transcript, )?; @@ -412,7 +409,7 @@ where LassoVerifier::::memory_checking( vp.num_vars, vp.lookup_polys_offset, - lookup_points_offset, + vp.lookup_points_offset, lookup_table, &beta, &gamma, From 49b36f26f6b95281c616538db34f3dd5545039c1 Mon Sep 17 00:00:00 2001 From: DoHoonKim Date: Sun, 5 Nov 2023 15:03:18 +0900 Subject: [PATCH 05/27] Cleanup --- plonkish_backend/src/backend/hyperplonk.rs | 4 +- .../src/backend/hyperplonk/preprocessor.rs | 2 +- .../src/backend/hyperplonk/prover.rs | 14 +-- .../src/backend/hyperplonk/verifier.rs | 4 +- plonkish_backend/src/backend/lookup/lasso.rs | 19 +--- .../lookup/lasso/memory_checking/mod.rs | 99 ----------------- .../lookup/lasso/memory_checking/prover.rs | 63 ++++------- .../lookup/lasso/memory_checking/verifier.rs | 103 +++++++++++++++++- .../src/backend/lookup/lasso/prover/mod.rs | 9 +- .../src/backend/lookup/lasso/prover/surge.rs | 4 +- .../src/backend/lookup/lasso/test/mod.rs | 2 +- .../src/backend/lookup/lasso/verifier/mod.rs | 10 +- plonkish_backend/src/frontend/halo2/test.rs | 3 +- 13 files changed, 142 insertions(+), 194 deletions(-) diff --git a/plonkish_backend/src/backend/hyperplonk.rs b/plonkish_backend/src/backend/hyperplonk.rs index d6fdf6b..93109bd 100644 --- a/plonkish_backend/src/backend/hyperplonk.rs +++ b/plonkish_backend/src/backend/hyperplonk.rs @@ -3,12 +3,12 @@ use crate::{ hyperplonk::{ preprocessor::{batch_size, compose, permutation_polys}, prover::{instance_polys, permutation_z_polys, prove_zero_check}, - verifier::{pcs_query, verify_zero_check, zero_check_opening_points_len}, + verifier::{verify_zero_check, zero_check_opening_points_len}, }, lookup::lasso::verifier::LassoVerifier, PlonkishBackend, PlonkishCircuit, PlonkishCircuitInfo, WitnessEncoding, }, - pcs::{Evaluation, PolynomialCommitmentScheme}, + pcs::PolynomialCommitmentScheme, poly::multilinear::MultilinearPolynomial, util::{ arithmetic::{BooleanHypercube, PrimeField}, diff --git a/plonkish_backend/src/backend/hyperplonk/preprocessor.rs b/plonkish_backend/src/backend/hyperplonk/preprocessor.rs index eebd89f..08d53f5 100644 --- a/plonkish_backend/src/backend/hyperplonk/preprocessor.rs +++ b/plonkish_backend/src/backend/hyperplonk/preprocessor.rs @@ -8,7 +8,7 @@ use crate::{ Itertools, }, }; -use std::{array, borrow::Cow, iter, mem}; +use std::{array, iter, mem}; pub(super) fn batch_size(circuit_info: &PlonkishCircuitInfo) -> usize { let num_permutation_polys = circuit_info.permutation_polys().len(); diff --git a/plonkish_backend/src/backend/hyperplonk/prover.rs b/plonkish_backend/src/backend/hyperplonk/prover.rs index 12bdf65..da3fb1b 100644 --- a/plonkish_backend/src/backend/hyperplonk/prover.rs +++ b/plonkish_backend/src/backend/hyperplonk/prover.rs @@ -11,23 +11,19 @@ use crate::{ classic::{ClassicSumCheck, EvaluationsProver}, SumCheck, VirtualPolynomial, }, - poly::{multilinear::MultilinearPolynomial, Polynomial}, + poly::multilinear::MultilinearPolynomial, util::{ - arithmetic::{div_ceil, steps_by, sum, BatchInvert, BooleanHypercube, PrimeField}, + arithmetic::{div_ceil, steps_by, BatchInvert, BooleanHypercube, PrimeField}, end_timer, - expression::{CommonPolynomial, Expression, Rotation}, - parallel::{num_threads, par_map_collect, parallelize, parallelize_iter}, + expression::{Expression, Rotation}, + parallel::{par_map_collect, parallelize}, start_timer, transcript::FieldTranscriptWrite, Itertools, }, Error, }; -use std::{ - collections::{HashMap, HashSet}, - hash::Hash, - iter, -}; +use std::iter; pub(crate) fn instance_polys<'a, F: PrimeField>( num_vars: usize, diff --git a/plonkish_backend/src/backend/hyperplonk/verifier.rs b/plonkish_backend/src/backend/hyperplonk/verifier.rs index d8a46f9..5ae5ca4 100644 --- a/plonkish_backend/src/backend/hyperplonk/verifier.rs +++ b/plonkish_backend/src/backend/hyperplonk/verifier.rs @@ -191,8 +191,6 @@ pub(super) fn zero_check_opening_points_len( .map(Query::rotation) .collect::>() .into_iter() - .map(|rotation| { - 1 << rotation.distance() - }) + .map(|rotation| 1 << rotation.distance()) .sum() } diff --git a/plonkish_backend/src/backend/lookup/lasso.rs b/plonkish_backend/src/backend/lookup/lasso.rs index 3056475..65d4edb 100644 --- a/plonkish_backend/src/backend/lookup/lasso.rs +++ b/plonkish_backend/src/backend/lookup/lasso.rs @@ -1,23 +1,10 @@ -use std::{collections::HashSet, fmt::Debug, iter, marker::PhantomData}; +use std::{fmt::Debug, marker::PhantomData}; use halo2_curves::ff::{Field, PrimeField}; -use itertools::Itertools; use crate::{ - backend::lookup::lasso::prover::Surge, - pcs::{CommitmentChunk, Evaluation, PolynomialCommitmentScheme}, - piop::sum_check::{ - classic::{ClassicSumCheck, EvaluationsProver}, - SumCheck, - }, - poly::multilinear::MultilinearPolynomial, - util::{ - arithmetic::BooleanHypercube, - expression::{CommonPolynomial, Expression}, - parallel::parallelize, - transcript::{FieldTranscriptRead, TranscriptWrite}, - }, - Error, + pcs::PolynomialCommitmentScheme, poly::multilinear::MultilinearPolynomial, + util::expression::Expression, }; pub mod memory_checking; diff --git a/plonkish_backend/src/backend/lookup/lasso/memory_checking/mod.rs b/plonkish_backend/src/backend/lookup/lasso/memory_checking/mod.rs index aabb3ec..8f224b4 100644 --- a/plonkish_backend/src/backend/lookup/lasso/memory_checking/mod.rs +++ b/plonkish_backend/src/backend/lookup/lasso/memory_checking/mod.rs @@ -13,105 +13,6 @@ use crate::{ Error, }; -#[derive(Clone, Debug)] -pub struct Chunk { - chunk_index: usize, - chunk_bits: usize, - pub(crate) memory: Vec>, -} - -impl Chunk { - pub fn chunk_polys_index(&self, offset: usize, num_chunks: usize) -> Vec { - let dim_poly_index = offset + 1 + self.chunk_index; - let read_ts_poly_index = offset + 1 + num_chunks + self.chunk_index; - let final_cts_poly_index = offset + 1 + 2 * num_chunks + self.chunk_index; - vec![dim_poly_index, read_ts_poly_index, final_cts_poly_index] - } - - pub fn new(chunk_index: usize, chunk_bits: usize, memory: Memory) -> Self { - Self { - chunk_index, - chunk_bits, - memory: vec![memory], - } - } - - pub fn num_memories(&self) -> usize { - self.memory.len() - } - - pub fn chunk_bits(&self) -> usize { - self.chunk_bits - } - - pub fn add_memory(&mut self, memory: Memory) { - self.memory.push(memory); - } - - pub fn memory_indices(&self) -> Vec { - self.memory - .iter() - .map(|memory| memory.memory_index) - .collect_vec() - } - - /// check the following relations: - /// - $read(x) == hash(dim(x), E(x), read_ts(x))$ - /// - $write(x) == hash(dim(x), E(x), read_ts(x) + 1)$ - /// - $init(y) == hash(y, T(y), 0)$ - /// - $final_read(y) == hash(y, T(y), final_cts(x))$ - pub fn verify_memories( - &self, - read_xs: &[F], - write_xs: &[F], - init_ys: &[F], - final_read_ys: &[F], - y: &[F], - hash: impl Fn(&F, &F, &F) -> F, - transcript: &mut impl FieldTranscriptRead, - ) -> Result<(F, F, F, Vec), Error> { - let [dim_x, read_ts_poly_x, final_cts_poly_y] = - transcript.read_field_elements(3)?.try_into().unwrap(); - let e_poly_xs = transcript.read_field_elements(self.num_memories())?; - let id_poly_y = inner_product( - iter::successors(Some(F::ONE), |power_of_two| Some(power_of_two.double())) - .take(y.len()) - .collect_vec() - .iter(), - y, - ); - self.memory.iter().enumerate().for_each(|(i, memory)| { - assert_eq!(read_xs[i], hash(&dim_x, &e_poly_xs[i], &read_ts_poly_x)); - assert_eq!( - write_xs[i], - hash(&dim_x, &e_poly_xs[i], &(read_ts_poly_x + F::ONE)) - ); - let subtable_poly_y = memory.subtable_poly.evaluate(y); - assert_eq!(init_ys[i], hash(&id_poly_y, &subtable_poly_y, &F::ZERO)); - assert_eq!( - final_read_ys[i], - hash(&id_poly_y, &subtable_poly_y, &final_cts_poly_y) - ); - }); - Ok((dim_x, read_ts_poly_x, final_cts_poly_y, e_poly_xs)) - } -} - -#[derive(Clone, Debug)] -pub struct Memory { - memory_index: usize, - subtable_poly: MultilinearPolynomial, -} - -impl Memory { - pub fn new(memory_index: usize, subtable_poly: MultilinearPolynomial) -> Self { - Self { - memory_index, - subtable_poly, - } - } -} - #[derive(Clone, Debug)] struct MemoryGKR { init: MultilinearPolynomial, diff --git a/plonkish_backend/src/backend/lookup/lasso/memory_checking/prover.rs b/plonkish_backend/src/backend/lookup/lasso/memory_checking/prover.rs index d76077b..1814c36 100644 --- a/plonkish_backend/src/backend/lookup/lasso/memory_checking/prover.rs +++ b/plonkish_backend/src/backend/lookup/lasso/memory_checking/prover.rs @@ -5,7 +5,7 @@ use itertools::{chain, Itertools}; use rayon::prelude::{IntoParallelIterator, ParallelIterator}; use crate::{ - backend::lookup::lasso::prover::{Chunk, Point}, + backend::lookup::lasso::prover::Chunk, pcs::Evaluation, piop::gkr::prove_grand_product, poly::multilinear::MultilinearPolynomial, @@ -22,10 +22,6 @@ pub struct MemoryCheckingProver<'a, F: PrimeField> { chunks: Vec>, /// GKR initial polynomials for each memory memories: Vec>, - /// random point at which `read_write` polynomials opened - x: Point, - /// random point at which `init_final_read` polynomials opened - y: Point, } impl<'a, F: PrimeField> MemoryCheckingProver<'a, F> { @@ -81,8 +77,6 @@ impl<'a, F: PrimeField> MemoryCheckingProver<'a, F> { points_offset, chunks, memories: memories_gkr, - x: Point::default(), - y: Point::default(), } } @@ -168,49 +162,27 @@ impl<'a, F: PrimeField> MemoryCheckingProver<'a, F> { transcript, )?; - self.chunks.iter().for_each(|chunk| { - let chunk_poly_evals = chunk.chunk_poly_evals(&x, &y); - let e_poly_xs = chunk.e_poly_evals(&x); - transcript.write_field_elements(&chunk_poly_evals).unwrap(); - transcript.write_field_elements(&e_poly_xs).unwrap(); - }); - - self.x = Point { - offset: self.points_offset, - point: x, - }; - self.y = Point { - offset: self.points_offset + 1, - point: y, - }; - - let opening_points = self.opening_points().collect_vec(); - let opening_evals = self.opening_evals().collect_vec(); - - Ok((opening_points, opening_evals)) - } - - pub fn opening_points(&self) -> impl Iterator> { - chain!([self.x.point.clone(), self.y.point.clone()]) - } - - pub fn opening_evals(&self) -> impl Iterator> { let (dim_xs, read_ts_poly_xs, final_cts_poly_xs, e_poly_xs) = self .chunks .iter() .map(|chunk| { - let chunk_poly_evals = chunk.chunk_poly_evals(&self.x.point, &self.y.point); - let x = self.x.offset; - let y = self.y.offset; - let e_poly_xs = chunk.e_poly_evals(&self.x.point); + let chunk_poly_evals = chunk.chunk_poly_evals(&x, &y); + let e_poly_xs = chunk.e_poly_evals(&x); + transcript.write_field_elements(&chunk_poly_evals).unwrap(); + transcript.write_field_elements(&e_poly_xs).unwrap(); + + let x_offset = self.points_offset; + let y_offset = x_offset + 1; ( - Evaluation::new(chunk.dim.offset, x, chunk_poly_evals[0]), - Evaluation::new(chunk.read_ts_poly.offset, x, chunk_poly_evals[1]), - Evaluation::new(chunk.final_cts_poly.offset, y, chunk_poly_evals[2]), + Evaluation::new(chunk.dim.offset, x_offset, chunk_poly_evals[0]), + Evaluation::new(chunk.read_ts_poly.offset, x_offset, chunk_poly_evals[1]), + Evaluation::new(chunk.final_cts_poly.offset, y_offset, chunk_poly_evals[2]), chunk .memories() .enumerate() - .map(|(i, memory)| Evaluation::new(memory.e_poly.offset, x, e_poly_xs[i])) + .map(|(i, memory)| { + Evaluation::new(memory.e_poly.offset, x_offset, e_poly_xs[i]) + }) .collect_vec(), ) }) @@ -220,11 +192,16 @@ impl<'a, F: PrimeField> MemoryCheckingProver<'a, F> { Vec>, Vec>>, )>(); - chain!( + + let opening_points = vec![x, y]; + let opening_evals = chain!( dim_xs, read_ts_poly_xs, final_cts_poly_xs, e_poly_xs.concat() ) + .collect_vec(); + + Ok((opening_points, opening_evals)) } } diff --git a/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs b/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs index 8a17b13..f5cdd44 100644 --- a/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs +++ b/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs @@ -4,13 +4,110 @@ use halo2_curves::ff::PrimeField; use itertools::{chain, Itertools}; use crate::{ - pcs::Evaluation, piop::gkr::verify_grand_product, util::transcript::FieldTranscriptRead, Error, + pcs::Evaluation, piop::gkr::verify_grand_product, util::{transcript::FieldTranscriptRead, arithmetic::inner_product}, Error, poly::multilinear::MultilinearPolynomial, }; -use super::Chunk; +#[derive(Clone, Debug)] +pub(in crate::backend::lookup::lasso) struct Chunk { + chunk_index: usize, + chunk_bits: usize, + pub(crate) memory: Vec>, +} + +impl Chunk { + pub fn chunk_polys_index(&self, offset: usize, num_chunks: usize) -> Vec { + let dim_poly_index = offset + 1 + self.chunk_index; + let read_ts_poly_index = offset + 1 + num_chunks + self.chunk_index; + let final_cts_poly_index = offset + 1 + 2 * num_chunks + self.chunk_index; + vec![dim_poly_index, read_ts_poly_index, final_cts_poly_index] + } + + pub fn new(chunk_index: usize, chunk_bits: usize, memory: Memory) -> Self { + Self { + chunk_index, + chunk_bits, + memory: vec![memory], + } + } + + pub fn num_memories(&self) -> usize { + self.memory.len() + } + + pub fn chunk_bits(&self) -> usize { + self.chunk_bits + } + + pub fn add_memory(&mut self, memory: Memory) { + self.memory.push(memory); + } + + pub fn memory_indices(&self) -> Vec { + self.memory + .iter() + .map(|memory| memory.memory_index) + .collect_vec() + } + + /// check the following relations: + /// - $read(x) == hash(dim(x), E(x), read_ts(x))$ + /// - $write(x) == hash(dim(x), E(x), read_ts(x) + 1)$ + /// - $init(y) == hash(y, T(y), 0)$ + /// - $final_read(y) == hash(y, T(y), final_cts(x))$ + pub fn verify_memories( + &self, + read_xs: &[F], + write_xs: &[F], + init_ys: &[F], + final_read_ys: &[F], + y: &[F], + hash: impl Fn(&F, &F, &F) -> F, + transcript: &mut impl FieldTranscriptRead, + ) -> Result<(F, F, F, Vec), Error> { + let [dim_x, read_ts_poly_x, final_cts_poly_y] = + transcript.read_field_elements(3)?.try_into().unwrap(); + let e_poly_xs = transcript.read_field_elements(self.num_memories())?; + let id_poly_y = inner_product( + iter::successors(Some(F::ONE), |power_of_two| Some(power_of_two.double())) + .take(y.len()) + .collect_vec() + .iter(), + y, + ); + self.memory.iter().enumerate().for_each(|(i, memory)| { + assert_eq!(read_xs[i], hash(&dim_x, &e_poly_xs[i], &read_ts_poly_x)); + assert_eq!( + write_xs[i], + hash(&dim_x, &e_poly_xs[i], &(read_ts_poly_x + F::ONE)) + ); + let subtable_poly_y = memory.subtable_poly.evaluate(y); + assert_eq!(init_ys[i], hash(&id_poly_y, &subtable_poly_y, &F::ZERO)); + assert_eq!( + final_read_ys[i], + hash(&id_poly_y, &subtable_poly_y, &final_cts_poly_y) + ); + }); + Ok((dim_x, read_ts_poly_x, final_cts_poly_y, e_poly_xs)) + } +} + +#[derive(Clone, Debug)] +pub(in crate::backend::lookup::lasso) struct Memory { + memory_index: usize, + subtable_poly: MultilinearPolynomial, +} + +impl Memory { + pub fn new(memory_index: usize, subtable_poly: MultilinearPolynomial) -> Self { + Self { + memory_index, + subtable_poly, + } + } +} #[derive(Clone, Debug)] -pub struct MemoryCheckingVerifier { +pub(in crate::backend::lookup::lasso) struct MemoryCheckingVerifier { /// offset of MemoryCheckingProver instance opening points points_offset: usize, /// chunks with the same bits size diff --git a/plonkish_backend/src/backend/lookup/lasso/prover/mod.rs b/plonkish_backend/src/backend/lookup/lasso/prover/mod.rs index d4c2171..c1dc161 100644 --- a/plonkish_backend/src/backend/lookup/lasso/prover/mod.rs +++ b/plonkish_backend/src/backend/lookup/lasso/prover/mod.rs @@ -25,13 +25,6 @@ mod surge; pub use surge::Surge; -#[derive(Default)] -pub struct Point { - /// point offset in batch opening - pub(crate) offset: usize, - pub(crate) point: Vec, -} - #[derive(Clone, Debug)] pub struct Poly { /// polynomial offset in batch opening @@ -254,7 +247,7 @@ impl< }); memories.enumerate().for_each(|(memory_index, memory)| { let chunk_index = table.memory_to_chunk_index(memory_index); - if let Some(_) = chunk_map.get(&chunk_index) { + if chunk_map.get(&chunk_index).is_some() { chunk_map.entry(chunk_index).and_modify(|chunk| { chunk.add_memory(memory); }); diff --git a/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs b/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs index 404dda1..08d0f90 100644 --- a/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs +++ b/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs @@ -11,11 +11,11 @@ use crate::{ classic::{ClassicSumCheck, EvaluationsProver}, SumCheck as _, VirtualPolynomial, }, - poly::{multilinear::MultilinearPolynomial, Polynomial}, + poly::multilinear::MultilinearPolynomial, util::{ arithmetic::{fe_to_bits_le, usize_from_bits_le}, expression::{Expression, Query, Rotation}, - transcript::{FieldTranscriptRead, TranscriptWrite}, + transcript::TranscriptWrite, }, Error, }; diff --git a/plonkish_backend/src/backend/lookup/lasso/test/mod.rs b/plonkish_backend/src/backend/lookup/lasso/test/mod.rs index 8683948..d3471f1 100644 --- a/plonkish_backend/src/backend/lookup/lasso/test/mod.rs +++ b/plonkish_backend/src/backend/lookup/lasso/test/mod.rs @@ -6,7 +6,7 @@ use itertools::Itertools; use crate::{ poly::multilinear::MultilinearPolynomial, util::{ - arithmetic::{inner_product, powers, split_bits}, + arithmetic::{inner_product, split_bits}, expression::Expression, }, }; diff --git a/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs b/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs index 4f1a629..4f6d704 100644 --- a/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs +++ b/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs @@ -15,7 +15,7 @@ use crate::{ }; use super::{ - memory_checking::{verifier::MemoryCheckingVerifier, Chunk, Memory}, + memory_checking::verifier::{MemoryCheckingVerifier, Chunk, Memory}, prover::Surge, DecomposableTable, }; @@ -64,7 +64,7 @@ impl< ) -> Result<(Vec>, Vec>), Error> { let expression = Surge::::sum_check_expression(&table); let claim = transcript.read_field_element()?; - let (eval, x) = ClassicSumCheck::>::verify( + let (_, x) = ClassicSumCheck::>::verify( &(), num_vars, expression.degree(), @@ -97,7 +97,7 @@ impl< let chunk_bits = chunk_bits[chunk_index]; let subtable_poly = &subtable_polys[table.memory_to_subtable_index(memory_index)]; let memory = Memory::new(memory_index, subtable_poly.clone()); - if let Some(_) = chunk_map.get(&chunk_index) { + if chunk_map.get(&chunk_index).is_some() { chunk_map.entry(chunk_index).and_modify(|chunk| { chunk.add_memory(memory); }); @@ -117,7 +117,7 @@ impl< chunks.into_iter().map(|(_, chunk)| chunk).collect_vec() } - pub fn prepare_memory_checking<'a>( + fn prepare_memory_checking( points_offset: usize, table: &Box>, ) -> Vec> { @@ -130,7 +130,7 @@ impl< .enumerate() .for_each(|(chunk_index, chunk)| { let chunk_bits = chunk_bits[chunk_index]; - if let Some(_) = chunk_map.get(&chunk_bits) { + if chunk_map.get(&chunk_bits).is_some() { chunk_map.entry(chunk_bits).and_modify(|chunks| { chunks.push(chunk); }); diff --git a/plonkish_backend/src/frontend/halo2/test.rs b/plonkish_backend/src/frontend/halo2/test.rs index 8539243..6eb2d73 100644 --- a/plonkish_backend/src/frontend/halo2/test.rs +++ b/plonkish_backend/src/frontend/halo2/test.rs @@ -1,6 +1,5 @@ use crate::backend::{ hyperplonk::{util, HyperPlonk}, - lookup::logup::LogUp, test::run_plonkish_backend, PlonkishCircuit, }; @@ -9,7 +8,7 @@ use crate::{ pcs::multilinear::MultilinearKzg, util::transcript::Keccak256Transcript, }; -use halo2_curves::bn256::{self, Bn256, Fr}; +use halo2_curves::bn256::{Bn256, Fr}; use rand::rngs::OsRng; #[test] From 860691e3e85218844e11ba469678e1a0da8e9fb4 Mon Sep 17 00:00:00 2001 From: DoHoonKim Date: Mon, 6 Nov 2023 17:30:04 +0900 Subject: [PATCH 06/27] Add subtable_indices method to DecomposableTable trait --- plonkish_backend/src/backend/lookup/lasso.rs | 4 ++++ .../src/backend/lookup/lasso/prover/surge.rs | 19 ++++--------------- 2 files changed, 8 insertions(+), 15 deletions(-) diff --git a/plonkish_backend/src/backend/lookup/lasso.rs b/plonkish_backend/src/backend/lookup/lasso.rs index 65d4edb..4ddd84c 100644 --- a/plonkish_backend/src/backend/lookup/lasso.rs +++ b/plonkish_backend/src/backend/lookup/lasso.rs @@ -35,6 +35,10 @@ pub trait DecomposableTable: Debug + Sync + DecomposableTableClon /// Each chunk can have different bits. fn chunk_bits(&self) -> Vec; + /// Returns the indices of each subtable lookups + /// The length of `index_bits` is same as actual bit length of table index + fn subtable_indices(&self, index_bits: Vec) -> Vec>; + fn memory_to_subtable_index(&self, memory_index: usize) -> usize; fn memory_to_chunk_index(&self, memory_index: usize) -> usize; diff --git a/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs b/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs index 08d0f90..6c7a637 100644 --- a/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs +++ b/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs @@ -53,18 +53,6 @@ impl< .collect_vec() } - fn split_by_chunk_bits(bits: &[bool], chunk_bits: &[usize]) -> Vec> { - let mut offset = 0; - let mut chunked_bits = vec![]; - chunk_bits.iter().for_each(|chunk_bits| { - let mut chunked = vec![true; *chunk_bits]; - chunked.copy_from_slice(&bits[offset..offset + chunk_bits]); - chunked_bits.push(chunked); - offset = offset + chunk_bits; - }); - chunked_bits - } - /// computes dim_1, ..., dim_c where c == DecomposableTable::C pub fn commit( &mut self, @@ -73,13 +61,14 @@ impl< ) -> Vec> { let num_rows: usize = 1 << nz_poly.num_vars(); let num_chunks = table.num_chunks(); - let chunk_bits = table.chunk_bits(); // get indices of non-zero columns of all rows where each index is chunked let indices = (0..num_rows) .map(|i| { - let index_bits = fe_to_bits_le(nz_poly[i]); + let mut index_bits = fe_to_bits_le(nz_poly[i]); + index_bits.truncate(table.chunk_bits().iter().sum()); + let mut chunked_index = repeat(0).take(num_chunks).collect_vec(); - let chunked_index_bits = Self::split_by_chunk_bits(&index_bits, &chunk_bits); + let chunked_index_bits = table.subtable_indices(index_bits); chunked_index .iter_mut() .zip(chunked_index_bits) From 8026dd03f8a2bb443e8fdef1fb535f0ff94331dd Mon Sep 17 00:00:00 2001 From: DoHoonKim Date: Mon, 6 Nov 2023 17:31:40 +0900 Subject: [PATCH 07/27] Fix ANDTable test case --- plonkish_backend/src/backend/hyperplonk.rs | 3 +- .../src/backend/hyperplonk/util.rs | 27 ++++------- .../lookup/lasso/memory_checking/prover.rs | 8 +--- .../lookup/lasso/memory_checking/verifier.rs | 6 ++- .../src/backend/lookup/lasso/test/mod.rs | 48 ++++++++++++++----- .../src/backend/lookup/lasso/verifier/mod.rs | 2 +- plonkish_backend/src/util/arithmetic.rs | 12 +++++ 7 files changed, 65 insertions(+), 41 deletions(-) diff --git a/plonkish_backend/src/backend/hyperplonk.rs b/plonkish_backend/src/backend/hyperplonk.rs index 93109bd..915c263 100644 --- a/plonkish_backend/src/backend/hyperplonk.rs +++ b/plonkish_backend/src/backend/hyperplonk.rs @@ -135,7 +135,8 @@ where + circuit_info.num_witness_polys.iter().sum::() + permutation_polys.len() + num_permutation_z_polys; - let lookup_points_offset = zero_check_opening_points_len(&expression, circuit_info.num_instances.len()); + let lookup_points_offset = + zero_check_opening_points_len(&expression, circuit_info.num_instances.len()); let lasso_lookup = &circuit_info.lasso_lookup[0]; let vp = HyperPlonkVerifierParam { diff --git a/plonkish_backend/src/backend/hyperplonk/util.rs b/plonkish_backend/src/backend/hyperplonk/util.rs index 4f2a2c8..bc59f27 100644 --- a/plonkish_backend/src/backend/hyperplonk/util.rs +++ b/plonkish_backend/src/backend/hyperplonk/util.rs @@ -93,8 +93,11 @@ pub fn vanilla_plonk_with_lasso_lookup_circuit_info( ) -> PlonkishCircuitInfo { let [pi, q_l, q_r, q_m, q_o, q_c, q_lookup, t_l, t_r, t_o, w_l, w_r, w_o] = &array::from_fn(|poly| Query::new(poly, Rotation::cur())).map(Expression::::Polynomial); - let lasso_lookup_input = w_l.clone(); - let lasso_lookup_indices = w_r.clone(); + let lasso_lookup_input = w_o.clone(); + let lasso_lookup_indices = Expression::DistributePowers( + vec![w_l.clone(), w_r.clone()], + Box::new(Expression::Constant(F::from_u128(1 << 64))), + ); let lasso_table = Box::new(AndTable::::new()); let chunk_bits = lasso_table.chunk_bits(); let num_vars = chunk_bits.iter().chain([&num_vars]).max().unwrap(); @@ -373,24 +376,10 @@ pub fn rand_vanilla_plonk_with_lasso_lookup_circuit( for poly in [10, 11, 12] { permutation.copy((poly, 1), (poly, 1)); } - let and_table = AndTable::::new(); - let subtable_poly = &and_table.subtable_polys()[0]; for idx in 0..size - 1 { - let (w_l, w_r) = { - let index = witness_rng.next_u64(); - let index_bits = fe_to_bits_le(F::from(index)); - assert_eq!(usize_from_bits_le(&index_bits) as u64, index); - let operands = index_bits[..64] - .chunks(16) - .map(|chunked_index_bits| { - let chunked_index = usize_from_bits_le(chunked_index_bits); - subtable_poly[chunked_index] - }) - .collect_vec(); - let value = and_table.combine_lookups(&operands); - (value, F::from(index)) - }; - let values = vec![(10, w_l), (11, w_r)]; + let [w_l, w_r] = [(); 2].map(|_| witness_rng.next_u64()); + let w_o = w_l & w_r; + let values = vec![(10, F::from(w_l)), (11, F::from(w_r)), (12, F::from(w_o))]; for (poly, value) in values { polys[poly][idx] = value; } diff --git a/plonkish_backend/src/backend/lookup/lasso/memory_checking/prover.rs b/plonkish_backend/src/backend/lookup/lasso/memory_checking/prover.rs index 1814c36..479b0c6 100644 --- a/plonkish_backend/src/backend/lookup/lasso/memory_checking/prover.rs +++ b/plonkish_backend/src/backend/lookup/lasso/memory_checking/prover.rs @@ -5,12 +5,8 @@ use itertools::{chain, Itertools}; use rayon::prelude::{IntoParallelIterator, ParallelIterator}; use crate::{ - backend::lookup::lasso::prover::Chunk, - pcs::Evaluation, - piop::gkr::prove_grand_product, - poly::multilinear::MultilinearPolynomial, - util::transcript::FieldTranscriptWrite, - Error, + backend::lookup::lasso::prover::Chunk, pcs::Evaluation, piop::gkr::prove_grand_product, + poly::multilinear::MultilinearPolynomial, util::transcript::FieldTranscriptWrite, Error, }; use super::MemoryGKR; diff --git a/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs b/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs index f5cdd44..e29df27 100644 --- a/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs +++ b/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs @@ -4,7 +4,11 @@ use halo2_curves::ff::PrimeField; use itertools::{chain, Itertools}; use crate::{ - pcs::Evaluation, piop::gkr::verify_grand_product, util::{transcript::FieldTranscriptRead, arithmetic::inner_product}, Error, poly::multilinear::MultilinearPolynomial, + pcs::Evaluation, + piop::gkr::verify_grand_product, + poly::multilinear::MultilinearPolynomial, + util::{arithmetic::inner_product, transcript::FieldTranscriptRead}, + Error, }; #[derive(Clone, Debug)] diff --git a/plonkish_backend/src/backend/lookup/lasso/test/mod.rs b/plonkish_backend/src/backend/lookup/lasso/test/mod.rs index d3471f1..57f64fd 100644 --- a/plonkish_backend/src/backend/lookup/lasso/test/mod.rs +++ b/plonkish_backend/src/backend/lookup/lasso/test/mod.rs @@ -1,12 +1,12 @@ use std::{iter, marker::PhantomData}; use halo2_curves::ff::PrimeField; -use itertools::Itertools; +use itertools::{izip, Itertools}; use crate::{ poly::multilinear::MultilinearPolynomial, util::{ - arithmetic::{inner_product, split_bits}, + arithmetic::{inner_product, split_bits, split_by_chunk_bits}, expression::Expression, }, }; @@ -22,13 +22,14 @@ impl AndTable { } } +/// T[X || Y] = T_1[X_1 || Y_1] + T_2[X_2 || Y_2] * 2^8 + ... + T_8[X_8 || Y_8] * 2^56 impl DecomposableTable for AndTable { fn num_chunks(&self) -> usize { - 4 + 8 } fn num_memories(&self) -> usize { - 4 + 8 } fn subtable_polys(&self) -> Vec> { @@ -43,23 +44,44 @@ impl DecomposableTable for AndTable { } fn chunk_bits(&self) -> Vec { - vec![16, 16, 16, 16] + vec![16; 8] } - fn combine_lookup_expressions(&self, expressions: Vec>) -> Expression { - Expression::DistributePowers( - expressions, - Box::new(Expression::Constant(F::from(2 << 16))), + fn subtable_indices(&self, index_bits: Vec) -> Vec> { + assert!(index_bits.len() % 2 == 0); + let chunk_bits = self + .chunk_bits() + .iter() + .map(|chunk_bits| chunk_bits / 2) + .collect_vec(); + let (lhs, rhs) = index_bits.split_at(index_bits.len() / 2); + izip!( + split_by_chunk_bits(lhs, &chunk_bits), + split_by_chunk_bits(rhs, &chunk_bits) ) + .map(|(chunked_lhs_bits, chunked_rhs_bits)| { + iter::empty() + .chain(chunked_lhs_bits) + .chain(chunked_rhs_bits) + .collect_vec() + }) + .collect_vec() + } + + fn combine_lookup_expressions(&self, expressions: Vec>) -> Expression { + Expression::DistributePowers(expressions, Box::new(Expression::Constant(F::from(1 << 8)))) } fn combine_lookups(&self, operands: &[F]) -> F { + let weight = F::from(1 << 8); inner_product( operands, - iter::successors(Some(F::ONE), |power_of_two| Some(power_of_two.double())) - .take(operands.len()) - .collect_vec() - .iter(), + iter::successors(Some(F::ONE), |power_of_weight| { + Some(*power_of_weight * weight) + }) + .take(operands.len()) + .collect_vec() + .iter(), ) } diff --git a/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs b/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs index 4f6d704..0b95825 100644 --- a/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs +++ b/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs @@ -15,7 +15,7 @@ use crate::{ }; use super::{ - memory_checking::verifier::{MemoryCheckingVerifier, Chunk, Memory}, + memory_checking::verifier::{Chunk, Memory, MemoryCheckingVerifier}, prover::Surge, DecomposableTable, }; diff --git a/plonkish_backend/src/util/arithmetic.rs b/plonkish_backend/src/util/arithmetic.rs index 3189b24..100ea8e 100644 --- a/plonkish_backend/src/util/arithmetic.rs +++ b/plonkish_backend/src/util/arithmetic.rs @@ -220,6 +220,18 @@ pub fn split_bits(item: usize, num_bits: usize) -> (usize, usize) { (high_chunk, low_chunk) } +pub fn split_by_chunk_bits(bits: &[bool], chunk_bits: &[usize]) -> Vec> { + let mut offset = 0; + let mut chunked_bits = vec![]; + chunk_bits.iter().for_each(|chunk_bits| { + let mut chunked = vec![true; *chunk_bits]; + chunked.copy_from_slice(&bits[offset..offset + chunk_bits]); + chunked_bits.push(chunked); + offset = offset + chunk_bits; + }); + chunked_bits +} + #[cfg(test)] mod test { use crate::util::arithmetic; From 58bb57afa52fe9e955ee7f782c53cdd7b9a75e34 Mon Sep 17 00:00:00 2001 From: DoHoonKim Date: Mon, 6 Nov 2023 18:52:55 +0900 Subject: [PATCH 08/27] Remove num_chunks from DecomposableTable trait --- .../src/backend/hyperplonk/util.rs | 2 +- plonkish_backend/src/backend/lookup/lasso.rs | 2 - .../src/backend/lookup/lasso/prover/mod.rs | 6 +- .../src/backend/lookup/lasso/prover/surge.rs | 4 +- .../src/backend/lookup/lasso/test/mod.rs | 97 +------------------ .../src/backend/lookup/lasso/verifier/mod.rs | 8 +- 6 files changed, 12 insertions(+), 107 deletions(-) diff --git a/plonkish_backend/src/backend/hyperplonk/util.rs b/plonkish_backend/src/backend/hyperplonk/util.rs index bc59f27..81abcc2 100644 --- a/plonkish_backend/src/backend/hyperplonk/util.rs +++ b/plonkish_backend/src/backend/hyperplonk/util.rs @@ -4,7 +4,7 @@ use crate::{ preprocessor::{compose, permutation_polys}, prover::{instance_polys, permutation_z_polys}, }, - lookup::lasso::{test::AndTable, DecomposableTable}, + lookup::lasso::{test::and::AndTable, DecomposableTable}, mock::MockCircuit, PlonkishCircuit, PlonkishCircuitInfo, }, diff --git a/plonkish_backend/src/backend/lookup/lasso.rs b/plonkish_backend/src/backend/lookup/lasso.rs index 4ddd84c..e3f1189 100644 --- a/plonkish_backend/src/backend/lookup/lasso.rs +++ b/plonkish_backend/src/backend/lookup/lasso.rs @@ -19,8 +19,6 @@ pub trait Subtable { /// This is a trait that contains information about decomposable table to which /// backend prover and verifier can ask pub trait DecomposableTable: Debug + Sync + DecomposableTableClone { - fn num_chunks(&self) -> usize; - fn num_memories(&self) -> usize; /// Returns multilinear extension polynomials of each subtable diff --git a/plonkish_backend/src/backend/lookup/lasso/prover/mod.rs b/plonkish_backend/src/backend/lookup/lasso/prover/mod.rs index c1dc161..af03eb6 100644 --- a/plonkish_backend/src/backend/lookup/lasso/prover/mod.rs +++ b/plonkish_backend/src/backend/lookup/lasso/prover/mod.rs @@ -212,7 +212,7 @@ impl< subtable_polys: &[&MultilinearPolynomial], nz: &Vec<&[usize]>, ) -> Vec> { - let num_chunks = table.num_chunks(); + let num_chunks = table.chunk_bits().len(); let num_memories = table.num_memories(); assert_eq!(nz.len(), num_chunks); let num_reads = nz[0].len(); @@ -264,7 +264,7 @@ impl< // sanity check { - let num_chunks = table.num_chunks(); + let num_chunks = table.chunk_bits().len(); assert_eq!(chunk_map.len(), num_chunks); } @@ -382,7 +382,7 @@ impl< lookup_nz_poly: &MultilinearPolynomial, transcript: &mut impl TranscriptWrite, ) -> Result<(Vec>>, Vec>), Error> { - let num_chunks = table.num_chunks(); + let num_chunks = table.chunk_bits().len(); // commit to input_poly let lookup_input_comm = Pcs::commit_and_write(&pp, &lookup_input_poly, transcript)?; diff --git a/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs b/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs index 6c7a637..ade0fc8 100644 --- a/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs +++ b/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs @@ -60,7 +60,7 @@ impl< nz_poly: &MultilinearPolynomial, ) -> Vec> { let num_rows: usize = 1 << nz_poly.num_vars(); - let num_chunks = table.num_chunks(); + let num_chunks = table.chunk_bits().len(); // get indices of non-zero columns of all rows where each index is chunked let indices = (0..num_rows) .map(|i| { @@ -102,7 +102,7 @@ impl< &self, table: &Box>, ) -> (Vec>, Vec>) { - let num_chunks = table.num_chunks(); + let num_chunks = table.chunk_bits().len(); let mut read_ts_polys = Vec::with_capacity(num_chunks); let mut final_cts_polys = Vec::with_capacity(num_chunks); let chunk_bits = table.chunk_bits(); diff --git a/plonkish_backend/src/backend/lookup/lasso/test/mod.rs b/plonkish_backend/src/backend/lookup/lasso/test/mod.rs index 57f64fd..c5ab6df 100644 --- a/plonkish_backend/src/backend/lookup/lasso/test/mod.rs +++ b/plonkish_backend/src/backend/lookup/lasso/test/mod.rs @@ -1,95 +1,2 @@ -use std::{iter, marker::PhantomData}; - -use halo2_curves::ff::PrimeField; -use itertools::{izip, Itertools}; - -use crate::{ - poly::multilinear::MultilinearPolynomial, - util::{ - arithmetic::{inner_product, split_bits, split_by_chunk_bits}, - expression::Expression, - }, -}; - -use super::DecomposableTable; - -#[derive(Clone, Debug)] -pub struct AndTable(PhantomData); - -impl AndTable { - pub fn new() -> Self { - Self(PhantomData) - } -} - -/// T[X || Y] = T_1[X_1 || Y_1] + T_2[X_2 || Y_2] * 2^8 + ... + T_8[X_8 || Y_8] * 2^56 -impl DecomposableTable for AndTable { - fn num_chunks(&self) -> usize { - 8 - } - - fn num_memories(&self) -> usize { - 8 - } - - fn subtable_polys(&self) -> Vec> { - let memory_size = 1 << 16; - let mut evals = vec![]; - (0..memory_size).for_each(|i| { - let (lhs, rhs) = split_bits(i, 8); - let result = F::from((lhs & rhs) as u64); - evals.push(result) - }); - vec![MultilinearPolynomial::new(evals)] - } - - fn chunk_bits(&self) -> Vec { - vec![16; 8] - } - - fn subtable_indices(&self, index_bits: Vec) -> Vec> { - assert!(index_bits.len() % 2 == 0); - let chunk_bits = self - .chunk_bits() - .iter() - .map(|chunk_bits| chunk_bits / 2) - .collect_vec(); - let (lhs, rhs) = index_bits.split_at(index_bits.len() / 2); - izip!( - split_by_chunk_bits(lhs, &chunk_bits), - split_by_chunk_bits(rhs, &chunk_bits) - ) - .map(|(chunked_lhs_bits, chunked_rhs_bits)| { - iter::empty() - .chain(chunked_lhs_bits) - .chain(chunked_rhs_bits) - .collect_vec() - }) - .collect_vec() - } - - fn combine_lookup_expressions(&self, expressions: Vec>) -> Expression { - Expression::DistributePowers(expressions, Box::new(Expression::Constant(F::from(1 << 8)))) - } - - fn combine_lookups(&self, operands: &[F]) -> F { - let weight = F::from(1 << 8); - inner_product( - operands, - iter::successors(Some(F::ONE), |power_of_weight| { - Some(*power_of_weight * weight) - }) - .take(operands.len()) - .collect_vec() - .iter(), - ) - } - - fn memory_to_chunk_index(&self, memory_index: usize) -> usize { - memory_index - } - - fn memory_to_subtable_index(&self, memory_index: usize) -> usize { - 0 - } -} +pub mod and; +pub mod range; diff --git a/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs b/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs index 0b95825..9efe52e 100644 --- a/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs +++ b/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs @@ -36,7 +36,7 @@ impl< transcript: &mut impl TranscriptRead, ) -> Result, Error> { // read input_comm, dim_comms - let num_chunks = table.num_chunks(); + let num_chunks = table.chunk_bits().len(); let num_memories = table.num_memories(); let input_comm = Pcs::read_commitment(vp, transcript)?; let dim_comms = Pcs::read_commitments(vp, num_chunks, transcript)?; @@ -73,7 +73,7 @@ impl< )?; let points = vec![r.to_vec(), x]; let pcs_query = Surge::::pcs_query(&expression, 0); - let e_polys_offset = polys_offset + 1 + table.num_chunks() * 3; + let e_polys_offset = polys_offset + 1 + table.chunk_bits().len() * 3; let evals = pcs_query .iter() .map(|query| { @@ -108,7 +108,7 @@ impl< // sanity check { - let num_chunks = table.num_chunks(); + let num_chunks = table.chunk_bits().len(); assert_eq!(chunk_map.len(), num_chunks); } @@ -162,7 +162,7 @@ impl< .iter() .map(|memory_checking| { memory_checking.verify( - table.num_chunks(), + table.chunk_bits().len(), num_reads, polys_offset, &gamma, From 5d6e1550c3a4495f5fc9447bf1882d6c47670b90 Mon Sep 17 00:00:00 2001 From: DoHoonKim Date: Tue, 7 Nov 2023 16:16:49 +0900 Subject: [PATCH 09/27] Cleanup test util --- plonkish_backend/src/backend/hyperplonk.rs | 14 +- .../src/backend/hyperplonk/util.rs | 82 ------ .../src/backend/lookup/lasso/test/and.rs | 250 ++++++++++++++++++ 3 files changed, 252 insertions(+), 94 deletions(-) create mode 100644 plonkish_backend/src/backend/lookup/lasso/test/and.rs diff --git a/plonkish_backend/src/backend/hyperplonk.rs b/plonkish_backend/src/backend/hyperplonk.rs index 915c263..f45c8a5 100644 --- a/plonkish_backend/src/backend/hyperplonk.rs +++ b/plonkish_backend/src/backend/hyperplonk.rs @@ -473,10 +473,7 @@ mod test { use crate::{ backend::{ hyperplonk::{ - util::{ - rand_vanilla_plonk_circuit, rand_vanilla_plonk_with_lasso_lookup_circuit, - rand_vanilla_plonk_with_lookup_circuit, - }, + util::{rand_vanilla_plonk_circuit, rand_vanilla_plonk_with_lookup_circuit}, HyperPlonk, }, test::run_plonkish_backend, @@ -514,17 +511,10 @@ mod test { rand_vanilla_plonk_with_lookup_circuit(num_vars, seeded_std_rng(), seeded_std_rng()) }); } - - #[test] - fn [<$name _hyperplonk_vanilla_plonk_with_lasso_lookup>]() { - run_plonkish_backend::<_, HyperPlonk<$pcs>, Keccak256Transcript<_>, _>($num_vars_range, |num_vars| { - rand_vanilla_plonk_with_lasso_lookup_circuit(num_vars, seeded_std_rng(), seeded_std_rng()) - }); - } } }; ($name:ident, $pcs:ty) => { - tests!($name, $pcs, 15..16); + tests!($name, $pcs, 2..16); }; } diff --git a/plonkish_backend/src/backend/hyperplonk/util.rs b/plonkish_backend/src/backend/hyperplonk/util.rs index 81abcc2..e4f6e0a 100644 --- a/plonkish_backend/src/backend/hyperplonk/util.rs +++ b/plonkish_backend/src/backend/hyperplonk/util.rs @@ -85,36 +85,6 @@ pub fn vanilla_plonk_with_lookup_circuit_info( } } -pub fn vanilla_plonk_with_lasso_lookup_circuit_info( - num_vars: usize, - num_instances: usize, - preprocess_polys: [Vec; 9], - permutations: Vec>, -) -> PlonkishCircuitInfo { - let [pi, q_l, q_r, q_m, q_o, q_c, q_lookup, t_l, t_r, t_o, w_l, w_r, w_o] = - &array::from_fn(|poly| Query::new(poly, Rotation::cur())).map(Expression::::Polynomial); - let lasso_lookup_input = w_o.clone(); - let lasso_lookup_indices = Expression::DistributePowers( - vec![w_l.clone(), w_r.clone()], - Box::new(Expression::Constant(F::from_u128(1 << 64))), - ); - let lasso_table = Box::new(AndTable::::new()); - let chunk_bits = lasso_table.chunk_bits(); - let num_vars = chunk_bits.iter().chain([&num_vars]).max().unwrap(); - PlonkishCircuitInfo { - k: *num_vars, - num_instances: vec![num_instances], - preprocess_polys: preprocess_polys.to_vec(), - num_witness_polys: vec![3], - num_challenges: vec![0], - constraints: vec![], - lookups: vec![vec![]], - lasso_lookup: vec![(lasso_lookup_input, lasso_lookup_indices, lasso_table)], - permutations, - max_degree: Some(4), - } -} - pub fn vanilla_plonk_with_lookup_expression(num_vars: usize) -> Expression { let circuit_info = vanilla_plonk_with_lookup_circuit_info( num_vars, @@ -345,58 +315,6 @@ pub fn rand_vanilla_plonk_with_lookup_circuit( ) } -pub fn rand_vanilla_plonk_with_lasso_lookup_circuit( - num_vars: usize, - mut preprocess_rng: impl RngCore, - mut witness_rng: impl RngCore, -) -> (PlonkishCircuitInfo, impl PlonkishCircuit) { - let num_vars = 16; - let size = 1 << num_vars; - let mut polys = [(); 13].map(|_| vec![F::ZERO; size]); - - let [t_l, t_r, t_o] = [(); 3].map(|_| { - iter::empty() - .chain([F::ZERO, F::ZERO]) - .chain(iter::repeat_with(|| F::random(&mut preprocess_rng))) - .take(size) - .collect_vec() - }); - polys[7] = t_l; - polys[8] = t_r; - polys[9] = t_o; - - let instances = rand_vec(num_vars, &mut witness_rng); - polys[0] = instance_polys(num_vars, [&instances])[0].evals().to_vec(); - let instance_rows = BooleanHypercube::new(num_vars) - .iter() - .take(num_vars + 1) - .collect::>(); - - let mut permutation = Permutation::default(); - for poly in [10, 11, 12] { - permutation.copy((poly, 1), (poly, 1)); - } - for idx in 0..size - 1 { - let [w_l, w_r] = [(); 2].map(|_| witness_rng.next_u64()); - let w_o = w_l & w_r; - let values = vec![(10, F::from(w_l)), (11, F::from(w_r)), (12, F::from(w_o))]; - for (poly, value) in values { - polys[poly][idx] = value; - } - } - let [_, q_l, q_r, q_m, q_o, q_c, q_lookup, t_l, t_r, t_o, w_l, w_r, w_o] = polys; - let circuit_info = vanilla_plonk_with_lasso_lookup_circuit_info( - num_vars, - instances.len(), - [q_l, q_r, q_m, q_o, q_c, q_lookup, t_l, t_r, t_o], - permutation.into_cycles(), - ); - ( - circuit_info, - MockCircuit::new(vec![instances], vec![w_l, w_r, w_o]), - ) -} - pub fn rand_vanilla_plonk_with_lookup_assignment( num_vars: usize, mut preprocess_rng: impl RngCore, diff --git a/plonkish_backend/src/backend/lookup/lasso/test/and.rs b/plonkish_backend/src/backend/lookup/lasso/test/and.rs new file mode 100644 index 0000000..8094b80 --- /dev/null +++ b/plonkish_backend/src/backend/lookup/lasso/test/and.rs @@ -0,0 +1,250 @@ +use std::{iter, marker::PhantomData}; + +use halo2_curves::ff::PrimeField; +use itertools::{izip, Itertools}; + +use crate::{ + backend::lookup::lasso::DecomposableTable, + poly::multilinear::MultilinearPolynomial, + util::{ + arithmetic::{inner_product, split_bits, split_by_chunk_bits}, + expression::Expression, + }, +}; + +#[derive(Clone, Debug)] +pub struct AndTable(PhantomData); + +impl AndTable { + pub fn new() -> Self { + Self(PhantomData) + } +} + +/// T[X || Y] = T_1[X_1 || Y_1] + T_2[X_2 || Y_2] * 2^8 + ... + T_8[X_8 || Y_8] * 2^56 +impl DecomposableTable for AndTable { + fn num_memories(&self) -> usize { + 8 + } + + fn subtable_polys(&self) -> Vec> { + let memory_size = 1 << 16; + let mut evals = vec![]; + (0..memory_size).for_each(|i| { + let (lhs, rhs) = split_bits(i, 8); + let result = F::from((lhs & rhs) as u64); + evals.push(result) + }); + vec![MultilinearPolynomial::new(evals)] + } + + fn chunk_bits(&self) -> Vec { + vec![16; 8] + } + + fn subtable_indices(&self, index_bits: Vec) -> Vec> { + assert!(index_bits.len() % 2 == 0); + let chunk_bits = self + .chunk_bits() + .iter() + .map(|chunk_bits| chunk_bits / 2) + .collect_vec(); + let (lhs, rhs) = index_bits.split_at(index_bits.len() / 2); + izip!( + split_by_chunk_bits(lhs, &chunk_bits), + split_by_chunk_bits(rhs, &chunk_bits) + ) + .map(|(chunked_lhs_bits, chunked_rhs_bits)| { + iter::empty() + .chain(chunked_lhs_bits) + .chain(chunked_rhs_bits) + .collect_vec() + }) + .collect_vec() + } + + fn combine_lookup_expressions(&self, expressions: Vec>) -> Expression { + Expression::DistributePowers(expressions, Box::new(Expression::Constant(F::from(1 << 8)))) + } + + fn combine_lookups(&self, operands: &[F]) -> F { + let weight = F::from(1 << 8); + inner_product( + operands, + iter::successors(Some(F::ONE), |power_of_weight| { + Some(*power_of_weight * weight) + }) + .take(operands.len()) + .collect_vec() + .iter(), + ) + } + + fn memory_to_chunk_index(&self, memory_index: usize) -> usize { + memory_index + } + + fn memory_to_subtable_index(&self, memory_index: usize) -> usize { + 0 + } +} + +#[cfg(test)] +mod test { + use std::{iter, array}; + + use super::AndTable; + use crate::{ + backend::{ + hyperplonk::{HyperPlonk, prover::instance_polys, util::Permutation}, + test::run_plonkish_backend, lookup::lasso::DecomposableTable, PlonkishCircuitInfo, PlonkishCircuit, mock::MockCircuit, + }, + pcs::{ + multilinear::{ + Gemini, MultilinearBrakedown, MultilinearHyrax, MultilinearIpa, MultilinearKzg, + Zeromorph, + }, + univariate::UnivariateKzg, + }, + util::{ + code::BrakedownSpec6, hash::Keccak256, test::{seeded_std_rng, rand_vec, rand_idx}, + transcript::Keccak256Transcript, arithmetic::{usize_from_bits_le, fe_to_bits_le}, expression::{Query, Rotation, Expression}, + }, poly::Polynomial, + }; + use halo2_curves::{ + bn256::{self, Bn256}, + grumpkin, ff::PrimeField, + }; + use itertools::Itertools; + use num_integer::Integer; + use rand::RngCore; + + fn rand_vanilla_plonk_with_lasso_lookup_circuit( + num_vars: usize, + table: Box>, + mut preprocess_rng: impl RngCore, + mut witness_rng: impl RngCore, + ) -> (PlonkishCircuitInfo, impl PlonkishCircuit) { + let size = 1 << num_vars; + let mut polys = [(); 13].map(|_| vec![F::ZERO; size]); + + let [t_l, t_r, t_o] = [(); 3].map(|_| { + iter::empty() + .chain([F::ZERO, F::ZERO]) + .chain(iter::repeat_with(|| F::random(&mut preprocess_rng))) + .take(size) + .collect_vec() + }); + polys[7] = t_l; + polys[8] = t_r; + polys[9] = t_o; + + let instances = rand_vec(num_vars, &mut witness_rng); + polys[0] = instance_polys(num_vars, [&instances])[0].evals().to_vec(); + + let mut permutation = Permutation::default(); + for poly in [10, 11, 12] { + permutation.copy((poly, 1), (poly, 1)); + } + for idx in 0..size - 1 { + let use_copy = preprocess_rng.next_u32().is_even() && idx > 1; + let [w_l, w_r, w_o] = if use_copy { + let [l_copy_idx, r_copy_idx] = [(); 2].map(|_| { + ( + rand_idx(10..13, &mut preprocess_rng), + rand_idx(1..idx, &mut preprocess_rng), + ) + }); + permutation.copy(l_copy_idx, (10, idx)); + permutation.copy(r_copy_idx, (11, idx)); + let w_l = polys[l_copy_idx.0][l_copy_idx.1]; + let w_r = polys[r_copy_idx.0][r_copy_idx.1]; + let w_o = F::from( + (usize_from_bits_le(&fe_to_bits_le(w_l)) & usize_from_bits_le(&fe_to_bits_le(w_r))) + as u64, + ); + [w_l, w_r, w_o] + } else { + let [w_l, w_r] = [(); 2].map(|_| witness_rng.next_u64()); + let w_o = w_l & w_r; + [F::from(w_l), F::from(w_r), F::from(w_o)] + }; + + let values = vec![(10, w_l), (11, w_r), (12, w_o)]; + for (poly, value) in values { + polys[poly][idx] = value; + } + } + let [_, q_l, q_r, q_m, q_o, q_c, q_lookup, t_l, t_r, t_o, w_l, w_r, w_o] = polys; + let circuit_info = vanilla_plonk_with_lasso_lookup_circuit_info( + num_vars, + instances.len(), + [q_l, q_r, q_m, q_o, q_c, q_lookup, t_l, t_r, t_o], + table, + permutation.into_cycles(), + ); + ( + circuit_info, + MockCircuit::new(vec![instances], vec![w_l, w_r, w_o]), + ) + } + + fn vanilla_plonk_with_lasso_lookup_circuit_info( + num_vars: usize, + num_instances: usize, + preprocess_polys: [Vec; 9], + table: Box>, + permutations: Vec>, + ) -> PlonkishCircuitInfo { + let [pi, q_l, q_r, q_m, q_o, q_c, q_lookup, t_l, t_r, t_o, w_l, w_r, w_o] = + &array::from_fn(|poly| Query::new(poly, Rotation::cur())).map(Expression::::Polynomial); + let lasso_lookup_input = w_o.clone(); + let lasso_lookup_indices = Expression::DistributePowers( + vec![w_l.clone(), w_r.clone()], + Box::new(Expression::Constant(F::from_u128(1 << 64))), + ); + let chunk_bits = table.chunk_bits(); + let num_vars = chunk_bits.iter().chain([&num_vars]).max().unwrap(); + PlonkishCircuitInfo { + k: *num_vars, + num_instances: vec![num_instances], + preprocess_polys: preprocess_polys.to_vec(), + num_witness_polys: vec![3], + num_challenges: vec![0], + constraints: vec![], + lookups: vec![vec![]], + lasso_lookup: vec![(lasso_lookup_input, lasso_lookup_indices, table)], + permutations, + max_degree: Some(4), + } + } + + macro_rules! test { + ($name:ident, $f:ty, $pcs:ty, $num_vars_range:expr) => { + paste::paste! { + #[test] + fn [<$name _hyperplonk_vanilla_plonk_with_lasso_lookup>]() { + run_plonkish_backend::<_, HyperPlonk<$pcs>, Keccak256Transcript<_>, _>($num_vars_range, |num_vars| { + let table = Box::new(AndTable::<$f>::new()); + rand_vanilla_plonk_with_lasso_lookup_circuit(num_vars, table, seeded_std_rng(), seeded_std_rng()) + }); + } + } + }; + ($name:ident, $f:ty, $pcs:ty) => { + test!($name, $f, $pcs, 16..17); + }; + } + + test!(brakedown, bn256::Fr, MultilinearBrakedown); + test!( + hyrax, + grumpkin::Fr, + MultilinearHyrax, + 5..16 + ); + test!(ipa, grumpkin::Fr, MultilinearIpa); + test!(kzg, bn256::Fr, MultilinearKzg); + test!(gemini_kzg, bn256::Fr, Gemini>); + test!(zeromorph_kzg, bn256::Fr, Zeromorph>); +} From 2ed82387893e4f2e8dd7e2f865df3a95233df0cd Mon Sep 17 00:00:00 2001 From: DoHoonKim Date: Tue, 7 Nov 2023 19:02:22 +0900 Subject: [PATCH 10/27] Cleanup --- plonkish_backend/src/backend.rs | 2 +- plonkish_backend/src/backend/hyperplonk.rs | 150 ++++-------------- .../src/backend/hyperplonk/prover.rs | 111 ++++++++++++- .../src/backend/hyperplonk/util.rs | 4 +- .../src/backend/hyperplonk/verifier.rs | 80 +++++++++- .../src/backend/lookup/lasso/test/and.rs | 40 +++-- plonkish_backend/src/frontend/halo2.rs | 3 +- 7 files changed, 241 insertions(+), 149 deletions(-) diff --git a/plonkish_backend/src/backend.rs b/plonkish_backend/src/backend.rs index 35aed46..9e39f94 100644 --- a/plonkish_backend/src/backend.rs +++ b/plonkish_backend/src/backend.rs @@ -68,7 +68,7 @@ pub struct PlonkishCircuitInfo { /// respectively. pub lookups: Vec, Expression)>>, /// Represents Lasso lookup argument, which contains input, indices, and table - pub lasso_lookup: Vec<(Expression, Expression, Box>)>, + pub lasso_lookup: Option<(Expression, Expression, Box>)>, /// Each item inside outer vector repesents an closed permutation cycle, /// which contains vetor of tuples representing the polynomial index and /// row respectively. diff --git a/plonkish_backend/src/backend/hyperplonk.rs b/plonkish_backend/src/backend/hyperplonk.rs index f45c8a5..629e7e8 100644 --- a/plonkish_backend/src/backend/hyperplonk.rs +++ b/plonkish_backend/src/backend/hyperplonk.rs @@ -5,7 +5,6 @@ use crate::{ prover::{instance_polys, permutation_z_polys, prove_zero_check}, verifier::{verify_zero_check, zero_check_opening_points_len}, }, - lookup::lasso::verifier::LassoVerifier, PlonkishBackend, PlonkishCircuit, PlonkishCircuitInfo, WitnessEncoding, }, pcs::PolynomialCommitmentScheme, @@ -23,7 +22,9 @@ use crate::{ use rand::RngCore; use std::{fmt::Debug, hash::Hash, iter, marker::PhantomData}; -use super::lookup::lasso::{prover::LassoProver, DecomposableTable}; +use self::{prover::prove_lookup, verifier::verify_lookup}; + +use super::lookup::lasso::DecomposableTable; pub(crate) mod preprocessor; pub(crate) mod prover; @@ -46,8 +47,8 @@ where pub(crate) num_witness_polys: Vec, pub(crate) num_challenges: Vec, pub(crate) lookups: Vec, Expression)>>, - /// assume we have Just One Lookup Table - pub(crate) lasso_lookup: (Expression, Expression, Box>), + /// assume we have at most Just One Lookup Table + pub(crate) lasso_lookup: Option<(Expression, Expression, Box>)>, pub(crate) lookup_polys_offset: usize, pub(crate) lookup_points_offset: usize, pub(crate) num_permutation_z_polys: usize, @@ -69,7 +70,7 @@ where pub(crate) num_instances: Vec, pub(crate) num_witness_polys: Vec, pub(crate) num_challenges: Vec, - pub(crate) lasso_table: Box>, + pub(crate) lasso_table: Option>>, pub(crate) lookup_polys_offset: usize, pub(crate) lookup_points_offset: usize, pub(crate) num_permutation_z_polys: usize, @@ -137,14 +138,17 @@ where + num_permutation_z_polys; let lookup_points_offset = zero_check_opening_points_len(&expression, circuit_info.num_instances.len()); - let lasso_lookup = &circuit_info.lasso_lookup[0]; + let lasso_table = circuit_info + .lasso_lookup + .is_some() + .then(|| circuit_info.lasso_lookup.as_ref().unwrap().2.clone()); let vp = HyperPlonkVerifierParam { pcs: pcs_vp, num_instances: circuit_info.num_instances.clone(), num_witness_polys: circuit_info.num_witness_polys.clone(), num_challenges: circuit_info.num_challenges.clone(), - lasso_table: lasso_lookup.2.clone(), + lasso_table, lookup_polys_offset, lookup_points_offset, num_permutation_z_polys, @@ -163,7 +167,7 @@ where num_witness_polys: circuit_info.num_witness_polys.clone(), num_challenges: circuit_info.num_challenges.clone(), lookups: circuit_info.lookups.clone(), - lasso_lookup: lasso_lookup.clone(), + lasso_lookup: circuit_info.lasso_lookup.clone(), lookup_polys_offset, lookup_points_offset, num_permutation_z_polys, @@ -228,81 +232,13 @@ where .chain(witness_polys.iter()) .collect_vec(); - let (lookup, table) = ((&pp.lasso_lookup.0, &pp.lasso_lookup.1), &pp.lasso_lookup.2); - let (lookup_input_poly, lookup_nz_poly) = - LassoProver::::lookup_poly(&lookup, &polys); - - let num_vars = lookup_input_poly.num_vars(); - - // get subtable_polys - let subtable_polys = table.subtable_polys(); - let subtable_polys = subtable_polys.iter().collect_vec(); - let subtable_polys = subtable_polys.as_slice(); - - let (lookup_polys, lookup_comms) = LassoProver::::commit( - &pp.pcs, - pp.lookup_polys_offset, - &table, - subtable_polys, - lookup_input_poly, - &lookup_nz_poly, - transcript, - )?; - - // Round n - // squeeze `r` - let r = transcript.squeeze_challenges(num_vars); - - let (input_poly, dims, read_ts_polys, final_cts_polys, e_polys) = ( - &lookup_polys[0][0], - &lookup_polys[1], - &lookup_polys[2], - &lookup_polys[3], - &lookup_polys[4], - ); - // Lasso Sumcheck - let (lookup_points, lookup_evals) = LassoProver::::prove_sum_check( - pp.lookup_points_offset, - &table, - input_poly, - &e_polys.iter().collect_vec(), - &r, - num_vars, - transcript, - )?; - - // squeeze memory checking challenges -> we will reuse beta, gamma for memory checking of Lasso - // Round n+1 - let [beta, gamma] = transcript.squeeze_challenges(2).try_into().unwrap(); - - // memory_checking - let (mem_check_opening_points, mem_check_opening_evals) = - LassoProver::::memory_checking( - pp.lookup_points_offset, - table, - subtable_polys, - dims, - read_ts_polys, - final_cts_polys, - e_polys, - &beta, - &gamma, - transcript, - )?; - - let lookup_polys = lookup_polys - .iter() - .flat_map(|lookup_polys| lookup_polys.iter().map(|poly| &poly.poly).collect_vec()) - .collect_vec(); - let lookup_comms = lookup_comms.concat(); - let lookup_opening_points = iter::empty() - .chain(lookup_points) - .chain(mem_check_opening_points) - .collect_vec(); - let lookup_evals = iter::empty() - .chain(lookup_evals) - .chain(mem_check_opening_evals) - .collect_vec(); + let (lookup_polys, lookup_comms, lookup_opening_points, lookup_evals, lasso_challenges) = + prove_lookup(pp, &polys, transcript)?; + let [beta, gamma] = if pp.lasso_lookup.is_some() { + lasso_challenges.try_into().unwrap() + } else { + transcript.squeeze_challenges(2).try_into().unwrap() + }; let timer = start_timer(|| format!("permutation_z_polys-{}", pp.permutation_polys.len())); let permutation_z_polys = permutation_z_polys( @@ -338,7 +274,7 @@ where )?; // PCS open - let polys = iter::empty().chain(polys).chain(lookup_polys); + let polys = iter::empty().chain(polys).chain(lookup_polys.iter()); let dummy_comm = Pcs::Commitment::default(); let comms = iter::empty() .chain(iter::repeat(&dummy_comm).take(pp.num_instances.len())) @@ -384,45 +320,13 @@ where challenges.extend(transcript.squeeze_challenges(*num_challenges)); } - let lookup_table = &vp.lasso_table; - - let lookup_comms = - LassoVerifier::::read_commitments(&vp.pcs, lookup_table, transcript)?; - - // Round n - let r = transcript.squeeze_challenges(vp.num_vars); - - let (lookup_points, lookup_evals) = LassoVerifier::::verify_sum_check( - lookup_table, - vp.num_vars, - vp.lookup_polys_offset, - vp.lookup_points_offset, - &r, - transcript, - )?; - - // Round n+1 - - let [beta, gamma] = transcript.squeeze_challenges(2).try_into().unwrap(); - - // memory checking - let (mem_check_opening_points, mem_check_opening_evals) = - LassoVerifier::::memory_checking( - vp.num_vars, - vp.lookup_polys_offset, - vp.lookup_points_offset, - lookup_table, - &beta, - &gamma, - transcript, - )?; - - let lookup_opening_points = iter::empty() - .chain(lookup_points) - .chain(mem_check_opening_points); - let lookup_evals = iter::empty() - .chain(lookup_evals) - .chain(mem_check_opening_evals); + let (lookup_comms, lookup_opening_points, lookup_evals, lasso_challenges) = + verify_lookup::(vp, transcript)?; + let [beta, gamma] = if vp.lasso_table.is_some() { + lasso_challenges.try_into().unwrap() + } else { + transcript.squeeze_challenges(2).try_into().unwrap() + }; let permutation_z_comms = Pcs::read_commitments(&vp.pcs, vp.num_permutation_z_polys, transcript)?; diff --git a/plonkish_backend/src/backend/hyperplonk/prover.rs b/plonkish_backend/src/backend/hyperplonk/prover.rs index da3fb1b..48d2a8a 100644 --- a/plonkish_backend/src/backend/hyperplonk/prover.rs +++ b/plonkish_backend/src/backend/hyperplonk/prover.rs @@ -4,9 +4,10 @@ use crate::{ verifier::{pcs_query, point_offset, points}, HyperPlonk, }, + lookup::lasso::prover::LassoProver, WitnessEncoding, }, - pcs::Evaluation, + pcs::{Evaluation, PolynomialCommitmentScheme}, piop::sum_check::{ classic::{ClassicSumCheck, EvaluationsProver}, SumCheck, VirtualPolynomial, @@ -18,13 +19,15 @@ use crate::{ expression::{Expression, Rotation}, parallel::{par_map_collect, parallelize}, start_timer, - transcript::FieldTranscriptWrite, + transcript::{FieldTranscriptWrite, TranscriptWrite}, Itertools, }, Error, }; use std::iter; +use super::HyperPlonkProverParam; + pub(crate) fn instance_polys<'a, F: PrimeField>( num_vars: usize, instances: impl IntoIterator>, @@ -201,3 +204,107 @@ pub(crate) fn prove_sum_check( Ok((points(&pcs_query, &x), evals)) } + +pub(super) fn prove_lookup< + F: PrimeField, + Pcs: PolynomialCommitmentScheme>, +>( + pp: &HyperPlonkProverParam, + polys: &[&MultilinearPolynomial], + transcript: &mut impl TranscriptWrite, +) -> Result< + ( + Vec>, + Vec, + Vec>, + Vec>, + Vec, + ), + Error, +> { + if pp.lasso_lookup.is_none() { + return Ok((vec![], vec![], vec![], vec![], vec![])); + } + let lasso_lookup = pp.lasso_lookup.as_ref().unwrap(); + let (lookup, table) = ((&lasso_lookup.0, &lasso_lookup.1), &lasso_lookup.2); + let (lookup_input_poly, lookup_nz_poly) = LassoProver::::lookup_poly(&lookup, &polys); + + let num_vars = lookup_input_poly.num_vars(); + + // get subtable_polys + let subtable_polys = table.subtable_polys(); + let subtable_polys = subtable_polys.iter().collect_vec(); + let subtable_polys = subtable_polys.as_slice(); + + let (lookup_polys, lookup_comms) = LassoProver::::commit( + &pp.pcs, + pp.lookup_polys_offset, + &table, + subtable_polys, + lookup_input_poly, + &lookup_nz_poly, + transcript, + )?; + + // Round n + // squeeze `r` + let r = transcript.squeeze_challenges(num_vars); + + let (input_poly, dims, read_ts_polys, final_cts_polys, e_polys) = ( + &lookup_polys[0][0], + &lookup_polys[1], + &lookup_polys[2], + &lookup_polys[3], + &lookup_polys[4], + ); + // Lasso Sumcheck + let (lookup_points, lookup_evals) = LassoProver::::prove_sum_check( + pp.lookup_points_offset, + &table, + input_poly, + &e_polys.iter().collect_vec(), + &r, + num_vars, + transcript, + )?; + + // squeeze memory checking challenges -> we will reuse beta, gamma for memory checking of Lasso + // Round n+1 + let [beta, gamma] = transcript.squeeze_challenges(2).try_into().unwrap(); + + // memory_checking + let (mem_check_opening_points, mem_check_opening_evals) = + LassoProver::::memory_checking( + pp.lookup_points_offset, + table, + subtable_polys, + dims, + read_ts_polys, + final_cts_polys, + e_polys, + &beta, + &gamma, + transcript, + )?; + + let lookup_polys = lookup_polys + .into_iter() + .flat_map(|lookup_polys| lookup_polys.into_iter().map(|poly| poly.poly).collect_vec()) + .collect_vec(); + let lookup_comms = lookup_comms.concat(); + let lookup_opening_points = iter::empty() + .chain(lookup_points) + .chain(mem_check_opening_points) + .collect_vec(); + let lookup_evals = iter::empty() + .chain(lookup_evals) + .chain(mem_check_opening_evals) + .collect_vec(); + Ok(( + lookup_polys, + lookup_comms, + lookup_opening_points, + lookup_evals, + vec![beta, gamma], + )) +} diff --git a/plonkish_backend/src/backend/hyperplonk/util.rs b/plonkish_backend/src/backend/hyperplonk/util.rs index e4f6e0a..b5935b5 100644 --- a/plonkish_backend/src/backend/hyperplonk/util.rs +++ b/plonkish_backend/src/backend/hyperplonk/util.rs @@ -41,7 +41,7 @@ pub fn vanilla_plonk_circuit_info( num_challenges: vec![0], constraints: vec![q_l * w_l + q_r * w_r + q_m * w_l * w_r + q_o * w_o + q_c + pi], lookups: vec![], - lasso_lookup: vec![], + lasso_lookup: None, permutations, max_degree: Some(4), } @@ -79,7 +79,7 @@ pub fn vanilla_plonk_with_lookup_circuit_info( (q_lookup * w_r, t_r.clone()), (q_lookup * w_o, t_o.clone()), ]], - lasso_lookup: vec![], + lasso_lookup: None, permutations, max_degree: Some(4), } diff --git a/plonkish_backend/src/backend/hyperplonk/verifier.rs b/plonkish_backend/src/backend/hyperplonk/verifier.rs index 5ae5ca4..fbf39d5 100644 --- a/plonkish_backend/src/backend/hyperplonk/verifier.rs +++ b/plonkish_backend/src/backend/hyperplonk/verifier.rs @@ -1,19 +1,25 @@ use crate::{ - pcs::Evaluation, + backend::lookup::lasso::verifier::LassoVerifier, + pcs::{Evaluation, PolynomialCommitmentScheme}, piop::sum_check::{ classic::{ClassicSumCheck, EvaluationsProver}, evaluate, lagrange_eval, SumCheck, }, - poly::multilinear::{rotation_eval, rotation_eval_points}, + poly::multilinear::{rotation_eval, rotation_eval_points, MultilinearPolynomial}, util::{ arithmetic::{inner_product, BooleanHypercube, PrimeField}, expression::{Expression, Query, Rotation}, - transcript::FieldTranscriptRead, + transcript::{FieldTranscriptRead, TranscriptRead}, Itertools, }, Error, }; -use std::collections::{BTreeSet, HashMap}; +use std::{ + collections::{BTreeSet, HashMap}, + iter, +}; + +use super::HyperPlonkVerifierParam; #[allow(clippy::type_complexity)] pub(super) fn verify_zero_check( @@ -194,3 +200,69 @@ pub(super) fn zero_check_opening_points_len( .map(|rotation| 1 << rotation.distance()) .sum() } + +pub(super) fn verify_lookup< + F: PrimeField, + Pcs: PolynomialCommitmentScheme>, +>( + vp: &HyperPlonkVerifierParam, + transcript: &mut impl TranscriptRead, +) -> Result< + ( + Vec, + Vec>, + Vec>, + Vec, + ), + Error, +> { + if vp.lasso_table.is_none() { + return Ok((vec![], vec![], vec![], vec![])); + } + let lookup_table = vp.lasso_table.as_ref().unwrap(); + + let lookup_comms = + LassoVerifier::::read_commitments(&vp.pcs, lookup_table, transcript)?; + + // Round n + let r = transcript.squeeze_challenges(vp.num_vars); + + let (lookup_points, lookup_evals) = LassoVerifier::::verify_sum_check( + lookup_table, + vp.num_vars, + vp.lookup_polys_offset, + vp.lookup_points_offset, + &r, + transcript, + )?; + + // Round n+1 + let [beta, gamma] = transcript.squeeze_challenges(2).try_into().unwrap(); + + // memory checking + let (mem_check_opening_points, mem_check_opening_evals) = + LassoVerifier::::memory_checking( + vp.num_vars, + vp.lookup_polys_offset, + vp.lookup_points_offset, + lookup_table, + &beta, + &gamma, + transcript, + )?; + + let lookup_opening_points = iter::empty() + .chain(lookup_points) + .chain(mem_check_opening_points) + .collect_vec(); + let lookup_evals = iter::empty() + .chain(lookup_evals) + .chain(mem_check_opening_evals) + .collect_vec(); + Ok(( + lookup_comms, + lookup_opening_points, + lookup_evals, + vec![beta, gamma], + )) +} diff --git a/plonkish_backend/src/backend/lookup/lasso/test/and.rs b/plonkish_backend/src/backend/lookup/lasso/test/and.rs index 8094b80..f4c489e 100644 --- a/plonkish_backend/src/backend/lookup/lasso/test/and.rs +++ b/plonkish_backend/src/backend/lookup/lasso/test/and.rs @@ -91,13 +91,16 @@ impl DecomposableTable for AndTable { #[cfg(test)] mod test { - use std::{iter, array}; + use std::{array, iter}; use super::AndTable; use crate::{ backend::{ - hyperplonk::{HyperPlonk, prover::instance_polys, util::Permutation}, - test::run_plonkish_backend, lookup::lasso::DecomposableTable, PlonkishCircuitInfo, PlonkishCircuit, mock::MockCircuit, + hyperplonk::{prover::instance_polys, util::Permutation, HyperPlonk}, + lookup::lasso::DecomposableTable, + mock::MockCircuit, + test::run_plonkish_backend, + PlonkishCircuit, PlonkishCircuitInfo, }, pcs::{ multilinear::{ @@ -106,14 +109,20 @@ mod test { }, univariate::UnivariateKzg, }, + poly::Polynomial, util::{ - code::BrakedownSpec6, hash::Keccak256, test::{seeded_std_rng, rand_vec, rand_idx}, - transcript::Keccak256Transcript, arithmetic::{usize_from_bits_le, fe_to_bits_le}, expression::{Query, Rotation, Expression}, - }, poly::Polynomial, + arithmetic::{fe_to_bits_le, usize_from_bits_le}, + code::BrakedownSpec6, + expression::{Expression, Query, Rotation}, + hash::Keccak256, + test::{rand_idx, rand_vec, seeded_std_rng}, + transcript::Keccak256Transcript, + }, }; use halo2_curves::{ bn256::{self, Bn256}, - grumpkin, ff::PrimeField, + ff::PrimeField, + grumpkin, }; use itertools::Itertools; use num_integer::Integer; @@ -127,7 +136,7 @@ mod test { ) -> (PlonkishCircuitInfo, impl PlonkishCircuit) { let size = 1 << num_vars; let mut polys = [(); 13].map(|_| vec![F::ZERO; size]); - + let [t_l, t_r, t_o] = [(); 3].map(|_| { iter::empty() .chain([F::ZERO, F::ZERO]) @@ -138,10 +147,10 @@ mod test { polys[7] = t_l; polys[8] = t_r; polys[9] = t_o; - + let instances = rand_vec(num_vars, &mut witness_rng); polys[0] = instance_polys(num_vars, [&instances])[0].evals().to_vec(); - + let mut permutation = Permutation::default(); for poly in [10, 11, 12] { permutation.copy((poly, 1), (poly, 1)); @@ -160,8 +169,8 @@ mod test { let w_l = polys[l_copy_idx.0][l_copy_idx.1]; let w_r = polys[r_copy_idx.0][r_copy_idx.1]; let w_o = F::from( - (usize_from_bits_le(&fe_to_bits_le(w_l)) & usize_from_bits_le(&fe_to_bits_le(w_r))) - as u64, + (usize_from_bits_le(&fe_to_bits_le(w_l)) + & usize_from_bits_le(&fe_to_bits_le(w_r))) as u64, ); [w_l, w_r, w_o] } else { @@ -169,7 +178,7 @@ mod test { let w_o = w_l & w_r; [F::from(w_l), F::from(w_r), F::from(w_o)] }; - + let values = vec![(10, w_l), (11, w_r), (12, w_o)]; for (poly, value) in values { polys[poly][idx] = value; @@ -197,7 +206,8 @@ mod test { permutations: Vec>, ) -> PlonkishCircuitInfo { let [pi, q_l, q_r, q_m, q_o, q_c, q_lookup, t_l, t_r, t_o, w_l, w_r, w_o] = - &array::from_fn(|poly| Query::new(poly, Rotation::cur())).map(Expression::::Polynomial); + &array::from_fn(|poly| Query::new(poly, Rotation::cur())) + .map(Expression::::Polynomial); let lasso_lookup_input = w_o.clone(); let lasso_lookup_indices = Expression::DistributePowers( vec![w_l.clone(), w_r.clone()], @@ -213,7 +223,7 @@ mod test { num_challenges: vec![0], constraints: vec![], lookups: vec![vec![]], - lasso_lookup: vec![(lasso_lookup_input, lasso_lookup_indices, table)], + lasso_lookup: Some((lasso_lookup_input, lasso_lookup_indices, table)), permutations, max_degree: Some(4), } diff --git a/plonkish_backend/src/frontend/halo2.rs b/plonkish_backend/src/frontend/halo2.rs index c50cb04..13d529e 100644 --- a/plonkish_backend/src/frontend/halo2.rs +++ b/plonkish_backend/src/frontend/halo2.rs @@ -134,7 +134,6 @@ impl> PlonkishCircuit for Halo2Circuit { .collect_vec() }) .collect(); - let lasso_lookup = vec![]; let num_instances = instances.iter().map(Vec::len).collect_vec(); let preprocess_polys = @@ -158,7 +157,7 @@ impl> PlonkishCircuit for Halo2Circuit { num_challenges: num_by_phase(&cs.challenge_phase()), constraints, lookups, - lasso_lookup, + lasso_lookup: None, permutations, max_degree: Some(cs.degree::()), }) From 319e44dc26a76b64a5790f719b40933fb09c6ce0 Mon Sep 17 00:00:00 2001 From: DoHoonKim Date: Tue, 7 Nov 2023 19:49:08 +0900 Subject: [PATCH 11/27] Add range check test case --- .../src/backend/lookup/lasso/test/range.rs | 244 ++++++++++++++++++ 1 file changed, 244 insertions(+) create mode 100644 plonkish_backend/src/backend/lookup/lasso/test/range.rs diff --git a/plonkish_backend/src/backend/lookup/lasso/test/range.rs b/plonkish_backend/src/backend/lookup/lasso/test/range.rs new file mode 100644 index 0000000..af6c918 --- /dev/null +++ b/plonkish_backend/src/backend/lookup/lasso/test/range.rs @@ -0,0 +1,244 @@ +use std::{iter, marker::PhantomData}; + +use halo2_curves::ff::PrimeField; +use itertools::Itertools; + +use crate::{ + backend::lookup::lasso::DecomposableTable, + poly::multilinear::MultilinearPolynomial, + util::{ + arithmetic::{div_ceil, inner_product}, + expression::Expression, + }, +}; + +#[derive(Clone, Debug)] +pub struct RangeTable(PhantomData); + +impl RangeTable { + pub fn new() -> Self { + Self(PhantomData) + } +} + +impl DecomposableTable + for RangeTable +{ + fn chunk_bits(&self) -> Vec { + let remainder_bits = if NUM_BITS % LIMB_BITS != 0 { + vec![NUM_BITS % LIMB_BITS] + } else { + vec![] + }; + iter::repeat(LIMB_BITS) + .take(NUM_BITS / LIMB_BITS) + .chain(remainder_bits) + .collect_vec() + } + + fn combine_lookup_expressions(&self, expressions: Vec>) -> Expression { + Expression::DistributePowers( + expressions, + Box::new(Expression::Constant(F::from(1 << LIMB_BITS))), + ) + } + + fn combine_lookups(&self, operands: &[F]) -> F { + let weight = F::from(1 << LIMB_BITS); + inner_product( + operands, + iter::successors(Some(F::ONE), |power_of_weight| { + Some(*power_of_weight * weight) + }) + .take(operands.len()) + .collect_vec() + .iter(), + ) + } + + fn num_memories(&self) -> usize { + div_ceil(NUM_BITS, LIMB_BITS) + } + + fn subtable_indices(&self, index_bits: Vec) -> Vec> { + index_bits.chunks(LIMB_BITS).map(Vec::from).collect_vec() + } + + fn subtable_polys(&self) -> Vec> { + let mut evals = vec![]; + (0..1 << LIMB_BITS).for_each(|i| evals.push(F::from(i))); + let limb_subtable_poly = MultilinearPolynomial::new(evals); + if NUM_BITS % LIMB_BITS != 0 { + let remainder = NUM_BITS % LIMB_BITS; + let mut evals = vec![]; + (0..1 << remainder).for_each(|i| { + evals.push(F::from(i)); + }); + let rem_subtable_poly = MultilinearPolynomial::new(evals); + vec![limb_subtable_poly, rem_subtable_poly] + } else { + vec![limb_subtable_poly] + } + } + + fn memory_to_chunk_index(&self, memory_index: usize) -> usize { + memory_index + } + + fn memory_to_subtable_index(&self, memory_index: usize) -> usize { + if NUM_BITS % LIMB_BITS != 0 && memory_index == NUM_BITS / LIMB_BITS { + 1 + } else { + 0 + } + } +} + +#[cfg(test)] +mod test { + use std::array; + + use super::RangeTable; + use crate::{ + backend::{ + hyperplonk::{prover::instance_polys, util::Permutation, HyperPlonk}, + lookup::lasso::DecomposableTable, + mock::MockCircuit, + test::run_plonkish_backend, + PlonkishCircuit, PlonkishCircuitInfo, + }, + pcs::{ + multilinear::{ + Gemini, MultilinearBrakedown, MultilinearHyrax, MultilinearIpa, MultilinearKzg, + Zeromorph, + }, + univariate::UnivariateKzg, + }, + poly::Polynomial, + util::{ + code::BrakedownSpec6, + expression::{Expression, Query, Rotation}, + hash::Keccak256, + test::{rand_idx, rand_vec, seeded_std_rng}, + transcript::Keccak256Transcript, + }, + }; + use halo2_curves::{ + bn256::{self, Bn256}, + ff::PrimeField, + grumpkin, + }; + use num_integer::Integer; + use rand::RngCore; + + fn rand_vanilla_plonk_with_lasso_lookup_circuit( + num_vars: usize, + table: Box>, + mut preprocess_rng: impl RngCore, + mut witness_rng: impl RngCore, + ) -> (PlonkishCircuitInfo, impl PlonkishCircuit) { + let size = 1 << num_vars; + let mut polys = [(); 9].map(|_| vec![F::ZERO; size]); + + let instances = rand_vec(num_vars, &mut witness_rng); + polys[0] = instance_polys(num_vars, [&instances])[0].evals().to_vec(); + + let mut permutation = Permutation::default(); + for poly in [6, 7, 8] { + permutation.copy((poly, 1), (poly, 1)); + } + for idx in 0..size - 1 { + let [w_l, w_r] = if preprocess_rng.next_u32().is_even() && idx > 1 { + let [l_copy_idx, r_copy_idx] = [(); 2].map(|_| { + ( + rand_idx(6..9, &mut preprocess_rng), + rand_idx(1..idx, &mut preprocess_rng), + ) + }); + permutation.copy(l_copy_idx, (6, idx)); + permutation.copy(r_copy_idx, (7, idx)); + [ + polys[l_copy_idx.0][l_copy_idx.1], + polys[r_copy_idx.0][r_copy_idx.1], + ] + } else { + let a = witness_rng.next_u64() as usize; + let b = witness_rng.next_u64(); + [F::from_u128(a.pow(2) as u128), F::from(b)] + }; + let values = [(6, w_l), (7, w_r)]; + for (poly, value) in values { + polys[poly][idx] = value; + } + } + let [_, q_l, q_r, q_m, q_o, q_c, w_l, w_r, w_o] = polys; + let circuit_info = vanilla_plonk_with_lasso_lookup_circuit_info( + num_vars, + instances.len(), + [q_l, q_r, q_m, q_o, q_c], + table, + permutation.into_cycles(), + ); + ( + circuit_info, + MockCircuit::new(vec![instances], vec![w_l, w_r, w_o]), + ) + } + + fn vanilla_plonk_with_lasso_lookup_circuit_info( + num_vars: usize, + num_instances: usize, + preprocess_polys: [Vec; 5], + table: Box>, + permutations: Vec>, + ) -> PlonkishCircuitInfo { + let [pi, q_l, q_r, q_m, q_o, q_c, w_l, w_r, w_o] = + &array::from_fn(|poly| Query::new(poly, Rotation::cur())) + .map(Expression::::Polynomial); + let lasso_lookup_input = w_l.clone(); + let lasso_lookup_indices = w_l.clone(); + let chunk_bits = table.chunk_bits(); + let num_vars = chunk_bits.iter().chain([&num_vars]).max().unwrap(); + PlonkishCircuitInfo { + k: *num_vars, + num_instances: vec![num_instances], + preprocess_polys: preprocess_polys.to_vec(), + num_witness_polys: vec![3], + num_challenges: vec![0], + constraints: vec![], + lookups: vec![vec![]], + lasso_lookup: Some((lasso_lookup_input, lasso_lookup_indices, table)), + permutations, + max_degree: Some(4), + } + } + + macro_rules! test { + ($name:ident, $f:ty, $pcs:ty, $num_vars_range:expr) => { + paste::paste! { + #[test] + fn [<$name _hyperplonk_vanilla_plonk_with_lasso_lookup>]() { + run_plonkish_backend::<_, HyperPlonk<$pcs>, Keccak256Transcript<_>, _>($num_vars_range, |num_vars| { + let table = Box::new(RangeTable::<$f, 128, 16>::new()); + rand_vanilla_plonk_with_lasso_lookup_circuit(num_vars, table, seeded_std_rng(), seeded_std_rng()) + }); + } + } + }; + ($name:ident, $f:ty, $pcs:ty) => { + test!($name, $f, $pcs, 16..17); + }; + } + + test!(brakedown, bn256::Fr, MultilinearBrakedown); + test!( + hyrax, + grumpkin::Fr, + MultilinearHyrax, + 5..16 + ); + test!(ipa, grumpkin::Fr, MultilinearIpa); + test!(kzg, bn256::Fr, MultilinearKzg); + test!(gemini_kzg, bn256::Fr, Gemini>); + test!(zeromorph_kzg, bn256::Fr, Zeromorph>); +} From 6d588556e5205c70bb2784d0ab7187eea0385a9c Mon Sep 17 00:00:00 2001 From: DoHoonKim Date: Wed, 8 Nov 2023 15:54:42 +0900 Subject: [PATCH 12/27] Cleanup --- plonkish_backend/src/backend/hyperplonk.rs | 34 +++++++-- .../src/backend/hyperplonk/prover.rs | 64 ++++++----------- .../src/backend/hyperplonk/util.rs | 3 +- .../src/backend/hyperplonk/verifier.rs | 56 ++++++--------- .../lookup/lasso/memory_checking/mod.rs | 9 +-- .../lookup/lasso/memory_checking/prover.rs | 18 +++-- .../lookup/lasso/memory_checking/verifier.rs | 57 ++++++++------- .../src/backend/lookup/lasso/prover/mod.rs | 30 +++++--- .../src/backend/lookup/lasso/prover/surge.rs | 9 ++- .../src/backend/lookup/lasso/test/and.rs | 71 ++++++++++--------- .../src/backend/lookup/lasso/test/range.rs | 43 ++++++----- .../src/backend/lookup/lasso/verifier/mod.rs | 34 ++++----- 12 files changed, 224 insertions(+), 204 deletions(-) diff --git a/plonkish_backend/src/backend/hyperplonk.rs b/plonkish_backend/src/backend/hyperplonk.rs index 629e7e8..cf810ac 100644 --- a/plonkish_backend/src/backend/hyperplonk.rs +++ b/plonkish_backend/src/backend/hyperplonk.rs @@ -22,7 +22,7 @@ use crate::{ use rand::RngCore; use std::{fmt::Debug, hash::Hash, iter, marker::PhantomData}; -use self::{prover::prove_lookup, verifier::verify_lookup}; +use self::{prover::prove_lasso_lookup, verifier::verify_lasso_lookup}; use super::lookup::lasso::DecomposableTable; @@ -232,8 +232,15 @@ where .chain(witness_polys.iter()) .collect_vec(); - let (lookup_polys, lookup_comms, lookup_opening_points, lookup_evals, lasso_challenges) = - prove_lookup(pp, &polys, transcript)?; + let mut lookup_opening_points = vec![]; + let mut lookup_opening_evals = vec![]; + let (lookup_polys, lookup_comms, lasso_challenges) = prove_lasso_lookup( + pp, + &polys, + &mut lookup_opening_points, + &mut lookup_opening_evals, + transcript, + )?; let [beta, gamma] = if pp.lasso_lookup.is_some() { lasso_challenges.try_into().unwrap() } else { @@ -288,7 +295,10 @@ where .chain(points) .chain(lookup_opening_points) .collect_vec(); - let evals = iter::empty().chain(evals).chain(lookup_evals).collect_vec(); + let evals = iter::empty() + .chain(evals) + .chain(lookup_opening_evals) + .collect_vec(); let timer = start_timer(|| format!("pcs_batch_open-{}", evals.len())); Pcs::batch_open(&pp.pcs, polys, comms, &points, &evals, transcript)?; end_timer(timer); @@ -320,8 +330,14 @@ where challenges.extend(transcript.squeeze_challenges(*num_challenges)); } - let (lookup_comms, lookup_opening_points, lookup_evals, lasso_challenges) = - verify_lookup::(vp, transcript)?; + let mut lookup_opening_points = vec![]; + let mut lookup_opening_evals = vec![]; + let (lookup_comms, lasso_challenges) = verify_lasso_lookup::( + vp, + &mut lookup_opening_points, + &mut lookup_opening_evals, + transcript, + )?; let [beta, gamma] = if vp.lasso_table.is_some() { lasso_challenges.try_into().unwrap() } else { @@ -330,6 +346,7 @@ where let permutation_z_comms = Pcs::read_commitments(&vp.pcs, vp.num_permutation_z_polys, transcript)?; + // Round n+2 let alpha = transcript.squeeze_challenge(); @@ -359,7 +376,10 @@ where .chain(points) .chain(lookup_opening_points) .collect_vec(); - let evals = iter::empty().chain(evals).chain(lookup_evals).collect_vec(); + let evals = iter::empty() + .chain(evals) + .chain(lookup_opening_evals) + .collect_vec(); Pcs::batch_verify(&vp.pcs, comms, &points, &evals, transcript)?; Ok(()) diff --git a/plonkish_backend/src/backend/hyperplonk/prover.rs b/plonkish_backend/src/backend/hyperplonk/prover.rs index 48d2a8a..2429255 100644 --- a/plonkish_backend/src/backend/hyperplonk/prover.rs +++ b/plonkish_backend/src/backend/hyperplonk/prover.rs @@ -205,25 +205,18 @@ pub(crate) fn prove_sum_check( Ok((points(&pcs_query, &x), evals)) } -pub(super) fn prove_lookup< +pub(super) fn prove_lasso_lookup< F: PrimeField, Pcs: PolynomialCommitmentScheme>, >( pp: &HyperPlonkProverParam, polys: &[&MultilinearPolynomial], + lookup_opening_points: &mut Vec>, + lookup_opening_evals: &mut Vec>, transcript: &mut impl TranscriptWrite, -) -> Result< - ( - Vec>, - Vec, - Vec>, - Vec>, - Vec, - ), - Error, -> { +) -> Result<(Vec>, Vec, Vec), Error> { if pp.lasso_lookup.is_none() { - return Ok((vec![], vec![], vec![], vec![], vec![])); + return Ok((vec![], vec![], vec![])); } let lasso_lookup = pp.lasso_lookup.as_ref().unwrap(); let (lookup, table) = ((&lasso_lookup.0, &lasso_lookup.1), &lasso_lookup.2); @@ -258,8 +251,10 @@ pub(super) fn prove_lookup< &lookup_polys[4], ); // Lasso Sumcheck - let (lookup_points, lookup_evals) = LassoProver::::prove_sum_check( + LassoProver::::prove_sum_check( pp.lookup_points_offset, + lookup_opening_points, + lookup_opening_evals, &table, input_poly, &e_polys.iter().collect_vec(), @@ -273,38 +268,25 @@ pub(super) fn prove_lookup< let [beta, gamma] = transcript.squeeze_challenges(2).try_into().unwrap(); // memory_checking - let (mem_check_opening_points, mem_check_opening_evals) = - LassoProver::::memory_checking( - pp.lookup_points_offset, - table, - subtable_polys, - dims, - read_ts_polys, - final_cts_polys, - e_polys, - &beta, - &gamma, - transcript, - )?; + LassoProver::::memory_checking( + pp.lookup_points_offset, + lookup_opening_points, + lookup_opening_evals, + table, + subtable_polys, + dims, + read_ts_polys, + final_cts_polys, + e_polys, + &beta, + &gamma, + transcript, + )?; let lookup_polys = lookup_polys .into_iter() .flat_map(|lookup_polys| lookup_polys.into_iter().map(|poly| poly.poly).collect_vec()) .collect_vec(); let lookup_comms = lookup_comms.concat(); - let lookup_opening_points = iter::empty() - .chain(lookup_points) - .chain(mem_check_opening_points) - .collect_vec(); - let lookup_evals = iter::empty() - .chain(lookup_evals) - .chain(mem_check_opening_evals) - .collect_vec(); - Ok(( - lookup_polys, - lookup_comms, - lookup_opening_points, - lookup_evals, - vec![beta, gamma], - )) + Ok((lookup_polys, lookup_comms, vec![beta, gamma])) } diff --git a/plonkish_backend/src/backend/hyperplonk/util.rs b/plonkish_backend/src/backend/hyperplonk/util.rs index b5935b5..eea9bee 100644 --- a/plonkish_backend/src/backend/hyperplonk/util.rs +++ b/plonkish_backend/src/backend/hyperplonk/util.rs @@ -4,13 +4,12 @@ use crate::{ preprocessor::{compose, permutation_polys}, prover::{instance_polys, permutation_z_polys}, }, - lookup::lasso::{test::and::AndTable, DecomposableTable}, mock::MockCircuit, PlonkishCircuit, PlonkishCircuitInfo, }, poly::{multilinear::MultilinearPolynomial, Polynomial}, util::{ - arithmetic::{fe_to_bits_le, usize_from_bits_le, BooleanHypercube, PrimeField}, + arithmetic::{BooleanHypercube, PrimeField}, expression::{Expression, Query, Rotation}, test::{rand_array, rand_idx, rand_vec}, Itertools, diff --git a/plonkish_backend/src/backend/hyperplonk/verifier.rs b/plonkish_backend/src/backend/hyperplonk/verifier.rs index fbf39d5..2a414c2 100644 --- a/plonkish_backend/src/backend/hyperplonk/verifier.rs +++ b/plonkish_backend/src/backend/hyperplonk/verifier.rs @@ -201,23 +201,17 @@ pub(super) fn zero_check_opening_points_len( .sum() } -pub(super) fn verify_lookup< +pub(super) fn verify_lasso_lookup< F: PrimeField, Pcs: PolynomialCommitmentScheme>, >( vp: &HyperPlonkVerifierParam, + lookup_opening_points: &mut Vec>, + lookup_opening_evals: &mut Vec>, transcript: &mut impl TranscriptRead, -) -> Result< - ( - Vec, - Vec>, - Vec>, - Vec, - ), - Error, -> { +) -> Result<(Vec, Vec), Error> { if vp.lasso_table.is_none() { - return Ok((vec![], vec![], vec![], vec![])); + return Ok((vec![], vec![])); } let lookup_table = vp.lasso_table.as_ref().unwrap(); @@ -227,11 +221,13 @@ pub(super) fn verify_lookup< // Round n let r = transcript.squeeze_challenges(vp.num_vars); - let (lookup_points, lookup_evals) = LassoVerifier::::verify_sum_check( + LassoVerifier::::verify_sum_check( lookup_table, vp.num_vars, vp.lookup_polys_offset, vp.lookup_points_offset, + lookup_opening_points, + lookup_opening_evals, &r, transcript, )?; @@ -240,29 +236,17 @@ pub(super) fn verify_lookup< let [beta, gamma] = transcript.squeeze_challenges(2).try_into().unwrap(); // memory checking - let (mem_check_opening_points, mem_check_opening_evals) = - LassoVerifier::::memory_checking( - vp.num_vars, - vp.lookup_polys_offset, - vp.lookup_points_offset, - lookup_table, - &beta, - &gamma, - transcript, - )?; - - let lookup_opening_points = iter::empty() - .chain(lookup_points) - .chain(mem_check_opening_points) - .collect_vec(); - let lookup_evals = iter::empty() - .chain(lookup_evals) - .chain(mem_check_opening_evals) - .collect_vec(); - Ok(( - lookup_comms, + LassoVerifier::::memory_checking( + vp.num_vars, + vp.lookup_polys_offset, + vp.lookup_points_offset, lookup_opening_points, - lookup_evals, - vec![beta, gamma], - )) + lookup_opening_evals, + lookup_table, + &beta, + &gamma, + transcript, + )?; + + Ok((lookup_comms, vec![beta, gamma])) } diff --git a/plonkish_backend/src/backend/lookup/lasso/memory_checking/mod.rs b/plonkish_backend/src/backend/lookup/lasso/memory_checking/mod.rs index 8f224b4..c73fe0e 100644 --- a/plonkish_backend/src/backend/lookup/lasso/memory_checking/mod.rs +++ b/plonkish_backend/src/backend/lookup/lasso/memory_checking/mod.rs @@ -1,17 +1,10 @@ pub mod prover; pub mod verifier; -use std::iter; - use halo2_curves::ff::PrimeField; -use itertools::Itertools; pub use prover::MemoryCheckingProver; -use crate::{ - poly::multilinear::MultilinearPolynomial, - util::{arithmetic::inner_product, transcript::FieldTranscriptRead}, - Error, -}; +use crate::poly::multilinear::MultilinearPolynomial; #[derive(Clone, Debug)] struct MemoryGKR { diff --git a/plonkish_backend/src/backend/lookup/lasso/memory_checking/prover.rs b/plonkish_backend/src/backend/lookup/lasso/memory_checking/prover.rs index 479b0c6..94f201d 100644 --- a/plonkish_backend/src/backend/lookup/lasso/memory_checking/prover.rs +++ b/plonkish_backend/src/backend/lookup/lasso/memory_checking/prover.rs @@ -144,8 +144,11 @@ impl<'a, F: PrimeField> MemoryCheckingProver<'a, F> { pub fn prove( &mut self, + points_offset: usize, + lookup_opening_points: &mut Vec>, + lookup_opening_evals: &mut Vec>, transcript: &mut impl FieldTranscriptWrite, - ) -> Result<(Vec>, Vec>), Error> { + ) -> Result<(), Error> { let (_, x) = prove_grand_product( iter::repeat(None).take(self.memories.len() * 2), chain!(self.reads(), self.writes()), @@ -158,6 +161,12 @@ impl<'a, F: PrimeField> MemoryCheckingProver<'a, F> { transcript, )?; + assert_eq!( + points_offset + lookup_opening_points.len(), + self.points_offset + ); + let x_offset = points_offset + lookup_opening_points.len(); + let y_offset = x_offset + 1; let (dim_xs, read_ts_poly_xs, final_cts_poly_xs, e_poly_xs) = self .chunks .iter() @@ -167,8 +176,6 @@ impl<'a, F: PrimeField> MemoryCheckingProver<'a, F> { transcript.write_field_elements(&chunk_poly_evals).unwrap(); transcript.write_field_elements(&e_poly_xs).unwrap(); - let x_offset = self.points_offset; - let y_offset = x_offset + 1; ( Evaluation::new(chunk.dim.offset, x_offset, chunk_poly_evals[0]), Evaluation::new(chunk.read_ts_poly.offset, x_offset, chunk_poly_evals[1]), @@ -189,7 +196,7 @@ impl<'a, F: PrimeField> MemoryCheckingProver<'a, F> { Vec>>, )>(); - let opening_points = vec![x, y]; + lookup_opening_points.extend_from_slice(&[x, y]); let opening_evals = chain!( dim_xs, read_ts_poly_xs, @@ -197,7 +204,8 @@ impl<'a, F: PrimeField> MemoryCheckingProver<'a, F> { e_poly_xs.concat() ) .collect_vec(); + lookup_opening_evals.extend_from_slice(&opening_evals); - Ok((opening_points, opening_evals)) + Ok(()) } } diff --git a/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs b/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs index e29df27..a7aae10 100644 --- a/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs +++ b/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs @@ -112,17 +112,14 @@ impl Memory { #[derive(Clone, Debug)] pub(in crate::backend::lookup::lasso) struct MemoryCheckingVerifier { - /// offset of MemoryCheckingProver instance opening points - points_offset: usize, /// chunks with the same bits size chunks: Vec>, _marker: PhantomData, } impl<'a, F: PrimeField> MemoryCheckingVerifier { - pub fn new(points_offset: usize, chunks: Vec>) -> Self { + pub fn new(chunks: Vec>) -> Self { Self { - points_offset, chunks, _marker: PhantomData, } @@ -133,10 +130,13 @@ impl<'a, F: PrimeField> MemoryCheckingVerifier { num_chunks: usize, num_reads: usize, polys_offset: usize, + points_offset: usize, gamma: &F, tau: &F, + lookup_opening_points: &mut Vec>, + lookup_opening_evals: &mut Vec>, transcript: &mut impl FieldTranscriptRead, - ) -> Result<(Vec>, Vec>), Error> { + ) -> Result<(), Error> { let num_memories: usize = self.chunks.iter().map(|chunk| chunk.num_memories()).sum(); let memory_bits = self.chunks[0].chunk_bits(); let (read_write_xs, x) = verify_grand_product( @@ -176,29 +176,36 @@ impl<'a, F: PrimeField> MemoryCheckingVerifier { .into_iter() .multiunzip::<(Vec<_>, Vec<_>, Vec<_>, Vec>)>(); - let opening_evals = self - .opening_evals( - num_chunks, - polys_offset, - &dim_xs, - &read_ts_poly_xs, - &final_cts_poly_ys, - &e_poly_xs.concat(), - ) - .collect_vec(); + self.opening_evals( + num_chunks, + polys_offset, + points_offset, + &lookup_opening_points, + lookup_opening_evals, + &dim_xs, + &read_ts_poly_xs, + &final_cts_poly_ys, + &e_poly_xs.concat(), + ); + lookup_opening_points.extend_from_slice(&[x, y]); - Ok((vec![x, y], opening_evals)) + Ok(()) } fn opening_evals( &self, num_chunks: usize, polys_offset: usize, + points_offset: usize, + lookup_opening_points: &Vec>, + lookup_opening_evals: &mut Vec>, dim_xs: &[F], read_ts_poly_xs: &[F], final_cts_poly_ys: &[F], e_poly_xs: &[F], - ) -> impl Iterator> { + ) { + let x_offset = points_offset + lookup_opening_points.len(); + let y_offset = x_offset + 1; let (dim_xs, read_ts_poly_xs, final_cts_poly_ys) = self .chunks .iter() @@ -206,13 +213,9 @@ impl<'a, F: PrimeField> MemoryCheckingVerifier { .map(|(i, chunk)| { let chunk_polys_index = chunk.chunk_polys_index(polys_offset, num_chunks); ( - Evaluation::new(chunk_polys_index[0], self.points_offset, dim_xs[i]), - Evaluation::new(chunk_polys_index[1], self.points_offset, read_ts_poly_xs[i]), - Evaluation::new( - chunk_polys_index[2], - self.points_offset + 1, - final_cts_poly_ys[i], - ), + Evaluation::new(chunk_polys_index[0], x_offset, dim_xs[i]), + Evaluation::new(chunk_polys_index[1], x_offset, read_ts_poly_xs[i]), + Evaluation::new(chunk_polys_index[2], y_offset, final_cts_poly_ys[i]), ) }) .multiunzip::<(Vec>, Vec>, Vec>)>(); @@ -224,9 +227,11 @@ impl<'a, F: PrimeField> MemoryCheckingVerifier { .flat_map(|chunk| chunk.memory_indices()) .zip(e_poly_xs) .map(|(memory_index, &e_poly_x)| { - Evaluation::new(e_poly_offset + memory_index, self.points_offset, e_poly_x) + Evaluation::new(e_poly_offset + memory_index, x_offset, e_poly_x) }) .collect_vec(); - chain!(dim_xs, read_ts_poly_xs, final_cts_poly_ys, e_poly_xs) + lookup_opening_evals.extend_from_slice( + &chain!(dim_xs, read_ts_poly_xs, final_cts_poly_ys, e_poly_xs).collect_vec(), + ); } } diff --git a/plonkish_backend/src/backend/lookup/lasso/prover/mod.rs b/plonkish_backend/src/backend/lookup/lasso/prover/mod.rs index af03eb6..93efb05 100644 --- a/plonkish_backend/src/backend/lookup/lasso/prover/mod.rs +++ b/plonkish_backend/src/backend/lookup/lasso/prover/mod.rs @@ -275,13 +275,15 @@ impl< pub fn prove_sum_check( points_offset: usize, + lookup_opening_points: &mut Vec>, + lookup_opening_evals: &mut Vec>, table: &Box>, input_poly: &Poly, e_polys: &[&Poly], r: &[F], num_vars: usize, transcript: &mut impl TranscriptWrite, F>, - ) -> Result<(Vec>, Vec>), Error> { + ) -> Result<(), Error> { Surge::::prove_sum_check( table, input_poly, @@ -289,6 +291,8 @@ impl< r, num_vars, points_offset, + lookup_opening_points, + lookup_opening_evals, transcript, ) } @@ -339,6 +343,8 @@ impl< pub fn memory_checking<'a>( points_offset: usize, + lookup_opening_points: &mut Vec>, + lookup_opening_evals: &mut Vec>, table: &Box>, subtable_polys: &'a [&MultilinearPolynomial], dims: &'a [Poly], @@ -348,7 +354,7 @@ impl< gamma: &F, tau: &F, transcript: &mut impl FieldTranscriptWrite, - ) -> Result<(Vec>, Vec>), Error> { + ) -> Result<(), Error> { let mut memory_checking = LassoProver::::prepare_memory_checking( points_offset, &table, @@ -361,16 +367,18 @@ impl< &tau, ); - let (mem_check_opening_points, mem_check_opening_evals) = memory_checking + memory_checking .iter_mut() - .map(|memory_checking| memory_checking.prove(transcript)) - .collect::>, Vec>)>, Error>>()? - .into_iter() - .unzip::<_, _, Vec<_>, Vec<_>>(); - Ok(( - mem_check_opening_points.concat(), - mem_check_opening_evals.concat(), - )) + .map(|memory_checking| { + memory_checking.prove( + points_offset, + lookup_opening_points, + lookup_opening_evals, + transcript, + ) + }) + .collect::, Error>>()?; + Ok(()) } pub fn commit( diff --git a/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs b/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs index ade0fc8..9655b74 100644 --- a/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs +++ b/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs @@ -135,8 +135,10 @@ impl< r: &[F], num_vars: usize, points_offset: usize, + lookup_opening_points: &mut Vec>, + lookup_opening_evals: &mut Vec>, transcript: &mut impl TranscriptWrite, F>, - ) -> Result<(Vec>, Vec>), Error> { + ) -> Result<(), Error> { let claimed_sum = Self::sum_check_claim(&r, &table, &e_polys); assert_eq!(claimed_sum, input_poly.evaluate(r)); @@ -158,7 +160,7 @@ impl< transcript, )?; - let points = vec![r.to_vec(), x]; + lookup_opening_points.extend_from_slice(&[r.to_vec(), x]); let pcs_query = Self::pcs_query(&expression, 0); let evals = pcs_query .into_iter() @@ -178,8 +180,9 @@ impl< claimed_sum, )]) .collect_vec(); + lookup_opening_evals.extend_from_slice(&evals); - Ok((points, evals)) + Ok(()) } pub fn sum_check_claim( diff --git a/plonkish_backend/src/backend/lookup/lasso/test/and.rs b/plonkish_backend/src/backend/lookup/lasso/test/and.rs index f4c489e..20e9aae 100644 --- a/plonkish_backend/src/backend/lookup/lasso/test/and.rs +++ b/plonkish_backend/src/backend/lookup/lasso/test/and.rs @@ -84,14 +84,14 @@ impl DecomposableTable for AndTable { memory_index } - fn memory_to_subtable_index(&self, memory_index: usize) -> usize { + fn memory_to_subtable_index(&self, _memory_index: usize) -> usize { 0 } } #[cfg(test)] mod test { - use std::{array, iter}; + use std::array; use super::AndTable; use crate::{ @@ -124,35 +124,23 @@ mod test { ff::PrimeField, grumpkin, }; - use itertools::Itertools; use num_integer::Integer; use rand::RngCore; - fn rand_vanilla_plonk_with_lasso_lookup_circuit( + fn rand_lasso_lookup_circuit( num_vars: usize, table: Box>, mut preprocess_rng: impl RngCore, mut witness_rng: impl RngCore, ) -> (PlonkishCircuitInfo, impl PlonkishCircuit) { let size = 1 << num_vars; - let mut polys = [(); 13].map(|_| vec![F::ZERO; size]); - - let [t_l, t_r, t_o] = [(); 3].map(|_| { - iter::empty() - .chain([F::ZERO, F::ZERO]) - .chain(iter::repeat_with(|| F::random(&mut preprocess_rng))) - .take(size) - .collect_vec() - }); - polys[7] = t_l; - polys[8] = t_r; - polys[9] = t_o; + let mut polys = [(); 9].map(|_| vec![F::ZERO; size]); let instances = rand_vec(num_vars, &mut witness_rng); polys[0] = instance_polys(num_vars, [&instances])[0].evals().to_vec(); let mut permutation = Permutation::default(); - for poly in [10, 11, 12] { + for poly in [6, 7, 8] { permutation.copy((poly, 1), (poly, 1)); } for idx in 0..size - 1 { @@ -160,18 +148,17 @@ mod test { let [w_l, w_r, w_o] = if use_copy { let [l_copy_idx, r_copy_idx] = [(); 2].map(|_| { ( - rand_idx(10..13, &mut preprocess_rng), + rand_idx(6..9, &mut preprocess_rng), rand_idx(1..idx, &mut preprocess_rng), ) }); - permutation.copy(l_copy_idx, (10, idx)); - permutation.copy(r_copy_idx, (11, idx)); + permutation.copy(l_copy_idx, (6, idx)); + permutation.copy(r_copy_idx, (7, idx)); let w_l = polys[l_copy_idx.0][l_copy_idx.1]; let w_r = polys[r_copy_idx.0][r_copy_idx.1]; - let w_o = F::from( - (usize_from_bits_le(&fe_to_bits_le(w_l)) - & usize_from_bits_le(&fe_to_bits_le(w_r))) as u64, - ); + let w_o = usize_from_bits_le(&fe_to_bits_le(w_l)) + & usize_from_bits_le(&fe_to_bits_le(w_r)); + let w_o = F::from(w_o as u64); [w_l, w_r, w_o] } else { let [w_l, w_r] = [(); 2].map(|_| witness_rng.next_u64()); @@ -179,16 +166,36 @@ mod test { [F::from(w_l), F::from(w_r), F::from(w_o)] }; - let values = vec![(10, w_l), (11, w_r), (12, w_o)]; + let q_c = F::random(&mut preprocess_rng); + let values = if preprocess_rng.next_u32().is_even() { + vec![ + (1, F::ONE), + (2, F::ONE), + (4, -F::ONE), + (5, q_c), + (6, w_l), + (7, w_r), + (8, w_o), + ] + } else { + vec![ + (3, F::ONE), + (4, -F::ONE), + (5, q_c), + (6, w_l), + (7, w_r), + (8, w_o), + ] + }; for (poly, value) in values { polys[poly][idx] = value; } } - let [_, q_l, q_r, q_m, q_o, q_c, q_lookup, t_l, t_r, t_o, w_l, w_r, w_o] = polys; - let circuit_info = vanilla_plonk_with_lasso_lookup_circuit_info( + let [_, q_l, q_r, q_m, q_o, q_c, w_l, w_r, w_o] = polys; + let circuit_info = lasso_lookup_circuit_info( num_vars, instances.len(), - [q_l, q_r, q_m, q_o, q_c, q_lookup, t_l, t_r, t_o], + [q_l, q_r, q_m, q_o, q_c], table, permutation.into_cycles(), ); @@ -198,14 +205,14 @@ mod test { ) } - fn vanilla_plonk_with_lasso_lookup_circuit_info( + fn lasso_lookup_circuit_info( num_vars: usize, num_instances: usize, - preprocess_polys: [Vec; 9], + preprocess_polys: [Vec; 5], table: Box>, permutations: Vec>, ) -> PlonkishCircuitInfo { - let [pi, q_l, q_r, q_m, q_o, q_c, q_lookup, t_l, t_r, t_o, w_l, w_r, w_o] = + let [_, _, _, _, _, _, w_l, w_r, w_o] = &array::from_fn(|poly| Query::new(poly, Rotation::cur())) .map(Expression::::Polynomial); let lasso_lookup_input = w_o.clone(); @@ -236,7 +243,7 @@ mod test { fn [<$name _hyperplonk_vanilla_plonk_with_lasso_lookup>]() { run_plonkish_backend::<_, HyperPlonk<$pcs>, Keccak256Transcript<_>, _>($num_vars_range, |num_vars| { let table = Box::new(AndTable::<$f>::new()); - rand_vanilla_plonk_with_lasso_lookup_circuit(num_vars, table, seeded_std_rng(), seeded_std_rng()) + rand_lasso_lookup_circuit(num_vars, table, seeded_std_rng(), seeded_std_rng()) }); } } diff --git a/plonkish_backend/src/backend/lookup/lasso/test/range.rs b/plonkish_backend/src/backend/lookup/lasso/test/range.rs index af6c918..974a5db 100644 --- a/plonkish_backend/src/backend/lookup/lasso/test/range.rs +++ b/plonkish_backend/src/backend/lookup/lasso/test/range.rs @@ -148,25 +148,36 @@ mod test { permutation.copy((poly, 1), (poly, 1)); } for idx in 0..size - 1 { - let [w_l, w_r] = if preprocess_rng.next_u32().is_even() && idx > 1 { - let [l_copy_idx, r_copy_idx] = [(); 2].map(|_| { - ( - rand_idx(6..9, &mut preprocess_rng), - rand_idx(1..idx, &mut preprocess_rng), - ) - }); + let w_l = if preprocess_rng.next_u32().is_even() && idx > 1 { + let l_copy_idx = (6, rand_idx(1..idx, &mut preprocess_rng)); permutation.copy(l_copy_idx, (6, idx)); - permutation.copy(r_copy_idx, (7, idx)); - [ - polys[l_copy_idx.0][l_copy_idx.1], - polys[r_copy_idx.0][r_copy_idx.1], + polys[l_copy_idx.0][l_copy_idx.1] + } else { + let value = witness_rng.next_u64() as usize; + F::from_u128(value.pow(2) as u128) + }; + let w_r = F::from(witness_rng.next_u64()); + let q_c = F::random(&mut preprocess_rng); + let values = if preprocess_rng.next_u32().is_even() { + vec![ + (1, F::ONE), + (2, F::ONE), + (4, -F::ONE), + (5, q_c), + (6, w_l), + (7, w_r), + (8, w_l + w_r + q_c + polys[0][idx]), ] } else { - let a = witness_rng.next_u64() as usize; - let b = witness_rng.next_u64(); - [F::from_u128(a.pow(2) as u128), F::from(b)] + vec![ + (3, F::ONE), + (4, -F::ONE), + (5, q_c), + (6, w_l), + (7, w_r), + (8, w_l * w_r + q_c + polys[0][idx]), + ] }; - let values = [(6, w_l), (7, w_r)]; for (poly, value) in values { polys[poly][idx] = value; } @@ -205,7 +216,7 @@ mod test { preprocess_polys: preprocess_polys.to_vec(), num_witness_polys: vec![3], num_challenges: vec![0], - constraints: vec![], + constraints: vec![q_l * w_l + q_r * w_r + q_m * w_l * w_r + q_o * w_o + q_c + pi], lookups: vec![vec![]], lasso_lookup: Some((lasso_lookup_input, lasso_lookup_indices, table)), permutations, diff --git a/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs b/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs index 9efe52e..de3eb5a 100644 --- a/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs +++ b/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs @@ -59,9 +59,11 @@ impl< num_vars: usize, polys_offset: usize, points_offset: usize, + lookup_opening_points: &mut Vec>, + lookup_opening_evals: &mut Vec>, r: &[F], transcript: &mut impl FieldTranscriptRead, - ) -> Result<(Vec>, Vec>), Error> { + ) -> Result<(), Error> { let expression = Surge::::sum_check_expression(&table); let claim = transcript.read_field_element()?; let (_, x) = ClassicSumCheck::>::verify( @@ -71,7 +73,8 @@ impl< claim, transcript, )?; - let points = vec![r.to_vec(), x]; + lookup_opening_points.extend_from_slice(&[r.to_vec(), x]); + let pcs_query = Surge::::pcs_query(&expression, 0); let e_polys_offset = polys_offset + 1 + table.chunk_bits().len() * 3; let evals = pcs_query @@ -82,8 +85,8 @@ impl< }) .chain([Evaluation::new(polys_offset, points_offset, claim)]) .collect_vec(); - - Ok((points, evals)) + lookup_opening_evals.extend_from_slice(&evals); + Ok(()) } fn chunks(table: &Box>) -> Vec> { @@ -141,10 +144,7 @@ impl< chunk_map .into_iter() .enumerate() - .map(|(index, (_, chunks))| { - let points_offset = points_offset + 2 + 2 * index; - MemoryCheckingVerifier::new(points_offset, chunks) - }) + .map(|(_, (_, chunks))| MemoryCheckingVerifier::new(chunks)) .collect_vec() } @@ -152,30 +152,30 @@ impl< num_reads: usize, polys_offset: usize, points_offset: usize, + lookup_opening_points: &mut Vec>, + lookup_opening_evals: &mut Vec>, table: &Box>, gamma: &F, tau: &F, transcript: &mut impl FieldTranscriptRead, - ) -> Result<(Vec>, Vec>), Error> { + ) -> Result<(), Error> { let memory_checking = Self::prepare_memory_checking(points_offset, table); - let (mem_check_opening_points, mem_check_opening_evals) = memory_checking + memory_checking .iter() .map(|memory_checking| { memory_checking.verify( table.chunk_bits().len(), num_reads, polys_offset, + points_offset, &gamma, &tau, + lookup_opening_points, + lookup_opening_evals, transcript, ) }) - .collect::>, Vec>)>, Error>>()? - .into_iter() - .unzip::<_, _, Vec<_>, Vec<_>>(); - Ok(( - mem_check_opening_points.concat(), - mem_check_opening_evals.concat(), - )) + .collect::, Error>>()?; + Ok(()) } } From a4c1c5ac9213a82eb97919d9b578c0fb9283c967 Mon Sep 17 00:00:00 2001 From: DoHoonKim Date: Fri, 10 Nov 2023 14:14:46 +0900 Subject: [PATCH 13/27] Add assertion on `index_bits` --- plonkish_backend/src/backend/lookup/lasso/prover/surge.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs b/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs index 9655b74..728b601 100644 --- a/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs +++ b/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs @@ -66,6 +66,7 @@ impl< .map(|i| { let mut index_bits = fe_to_bits_le(nz_poly[i]); index_bits.truncate(table.chunk_bits().iter().sum()); + assert_eq!(usize_from_bits_le(&fe_to_bits_le(nz_poly[i])), usize_from_bits_le(&index_bits)); let mut chunked_index = repeat(0).take(num_chunks).collect_vec(); let chunked_index_bits = table.subtable_indices(index_bits); From ac2b24d1d3c0add576c1ab9072d8b08009eab591 Mon Sep 17 00:00:00 2001 From: DoHoonKim Date: Tue, 14 Nov 2023 06:09:57 +0900 Subject: [PATCH 14/27] Fix misleading variable name --- .../src/backend/lookup/lasso/memory_checking/prover.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plonkish_backend/src/backend/lookup/lasso/memory_checking/prover.rs b/plonkish_backend/src/backend/lookup/lasso/memory_checking/prover.rs index 94f201d..ebe97ed 100644 --- a/plonkish_backend/src/backend/lookup/lasso/memory_checking/prover.rs +++ b/plonkish_backend/src/backend/lookup/lasso/memory_checking/prover.rs @@ -167,7 +167,7 @@ impl<'a, F: PrimeField> MemoryCheckingProver<'a, F> { ); let x_offset = points_offset + lookup_opening_points.len(); let y_offset = x_offset + 1; - let (dim_xs, read_ts_poly_xs, final_cts_poly_xs, e_poly_xs) = self + let (dim_xs, read_ts_poly_xs, final_cts_poly_ys, e_poly_xs) = self .chunks .iter() .map(|chunk| { @@ -200,7 +200,7 @@ impl<'a, F: PrimeField> MemoryCheckingProver<'a, F> { let opening_evals = chain!( dim_xs, read_ts_poly_xs, - final_cts_poly_xs, + final_cts_poly_ys, e_poly_xs.concat() ) .collect_vec(); From 2e213d276f581c2a44065cfbefa0ad568fd812ee Mon Sep 17 00:00:00 2001 From: DoHoonKim Date: Wed, 15 Nov 2023 22:35:15 +0900 Subject: [PATCH 15/27] Remove unnecessary things --- benchmark/benches/proof_system.rs | 2 +- plonkish_backend/src/backend.rs | 2 +- plonkish_backend/src/backend/hyperplonk.rs | 2 - .../src/backend/hyperplonk/preprocessor.rs | 46 +-- .../src/backend/hyperplonk/verifier.rs | 5 +- .../src/backend/lookup/lasso/verifier/mod.rs | 3 +- plonkish_backend/src/backend/lookup/logup.rs | 292 ------------------ plonkish_backend/src/backend/lookup/mod.rs | 1 - plonkish_backend/src/frontend/halo2/lookup.rs | 27 +- plonkish_backend/src/lib.rs | 3 - 10 files changed, 7 insertions(+), 376 deletions(-) delete mode 100644 plonkish_backend/src/backend/lookup/logup.rs diff --git a/benchmark/benches/proof_system.rs b/benchmark/benches/proof_system.rs index 8c5c384..99000df 100644 --- a/benchmark/benches/proof_system.rs +++ b/benchmark/benches/proof_system.rs @@ -15,7 +15,7 @@ use halo2_proofs::{ }; use itertools::Itertools; use plonkish_backend::{ - backend::{self, lookup::logup, PlonkishBackend, PlonkishCircuit}, + backend::{self, PlonkishBackend, PlonkishCircuit}, frontend::halo2::{circuit::VanillaPlonk, CircuitExt, Halo2Circuit}, halo2_curves::bn256::{Bn256, Fr}, pcs::multilinear, diff --git a/plonkish_backend/src/backend.rs b/plonkish_backend/src/backend.rs index 9e39f94..0359fe9 100644 --- a/plonkish_backend/src/backend.rs +++ b/plonkish_backend/src/backend.rs @@ -4,7 +4,7 @@ use crate::{ arithmetic::Field, expression::Expression, transcript::{TranscriptRead, TranscriptWrite}, - Deserialize, DeserializeOwned, Itertools, Serialize, + Itertools, }, Error, }; diff --git a/plonkish_backend/src/backend/hyperplonk.rs b/plonkish_backend/src/backend/hyperplonk.rs index cf810ac..7e37c0f 100644 --- a/plonkish_backend/src/backend/hyperplonk.rs +++ b/plonkish_backend/src/backend/hyperplonk.rs @@ -46,7 +46,6 @@ where pub(crate) num_instances: Vec, pub(crate) num_witness_polys: Vec, pub(crate) num_challenges: Vec, - pub(crate) lookups: Vec, Expression)>>, /// assume we have at most Just One Lookup Table pub(crate) lasso_lookup: Option<(Expression, Expression, Box>)>, pub(crate) lookup_polys_offset: usize, @@ -166,7 +165,6 @@ where num_instances: circuit_info.num_instances.clone(), num_witness_polys: circuit_info.num_witness_polys.clone(), num_challenges: circuit_info.num_challenges.clone(), - lookups: circuit_info.lookups.clone(), lasso_lookup: circuit_info.lasso_lookup.clone(), lookup_polys_offset, lookup_points_offset, diff --git a/plonkish_backend/src/backend/hyperplonk/preprocessor.rs b/plonkish_backend/src/backend/hyperplonk/preprocessor.rs index 08d53f5..e8329fa 100644 --- a/plonkish_backend/src/backend/hyperplonk/preprocessor.rs +++ b/plonkish_backend/src/backend/hyperplonk/preprocessor.rs @@ -17,7 +17,7 @@ pub(super) fn batch_size(circuit_info: &PlonkishCircuitInfo) - circuit_info.num_witness_polys.clone(), [div_ceil( num_permutation_polys, - max_degree(circuit_info, None) - 1 + max_degree(circuit_info) - 1 )], ] .sum() @@ -30,11 +30,7 @@ pub(super) fn compose( let [beta, gamma, alpha] = &array::from_fn(|idx| Expression::::Challenge(challenge_offset + idx)); - // To use Lasso as lookup argument, we will run Sumcheck for Lasso seperately - // Will generalize this function later - // let (lookup_constraints, lookup_zero_checks) = lookup_constraints(circuit_info, beta, gamma); - - let max_degree = max_degree(circuit_info, None); + let max_degree = max_degree(circuit_info); let (num_permutation_z_polys, permutation_constraints) = permutation_constraints(circuit_info, max_degree, beta, gamma, 0); @@ -53,12 +49,7 @@ pub(super) fn compose( pub(super) fn max_degree( circuit_info: &PlonkishCircuitInfo, - lookup_constraints: Option<&[Expression]>, ) -> usize { - // let lookup_constraints = lookup_constraints.map(Cow::Borrowed).unwrap_or_else(|| { - // let dummy_challenge = Expression::zero(); - // Cow::Owned(self::lookup_constraints(circuit_info, &dummy_challenge, &dummy_challenge).0) - // }); iter::empty() .chain(circuit_info.constraints.iter().map(Expression::degree)) .chain(circuit_info.max_degree) @@ -67,39 +58,6 @@ pub(super) fn max_degree( .unwrap() } -// LogUp lookup_constraints -pub(super) fn lookup_constraints( - circuit_info: &PlonkishCircuitInfo, - beta: &Expression, - gamma: &Expression, -) -> (Vec>, Vec>) { - let m_offset = circuit_info.num_poly() + circuit_info.permutation_polys().len(); - let h_offset = m_offset + circuit_info.lookups.len(); - let constraints = circuit_info - .lookups - .iter() - .zip(m_offset..) - .zip(h_offset..) - .flat_map(|((lookup, m), h)| { - let [m, h] = &[m, h] - .map(|poly| Query::new(poly, Rotation::cur())) - .map(Expression::::Polynomial); - let (inputs, tables) = lookup - .iter() - .map(|(input, table)| (input, table)) - .unzip::<_, _, Vec<_>, Vec<_>>(); - let input = &Expression::distribute_powers(inputs, beta); - let table = &Expression::distribute_powers(tables, beta); - [h * (input + gamma) * (table + gamma) - (table + gamma) + m * (input + gamma)] - }) - .collect_vec(); - let sum_check = (h_offset..) - .take(circuit_info.lookups.len()) - .map(|h| Query::new(h, Rotation::cur()).into()) - .collect_vec(); - (constraints, sum_check) -} - pub(crate) fn permutation_constraints( circuit_info: &PlonkishCircuitInfo, max_degree: usize, diff --git a/plonkish_backend/src/backend/hyperplonk/verifier.rs b/plonkish_backend/src/backend/hyperplonk/verifier.rs index 2a414c2..95d57f9 100644 --- a/plonkish_backend/src/backend/hyperplonk/verifier.rs +++ b/plonkish_backend/src/backend/hyperplonk/verifier.rs @@ -14,10 +14,7 @@ use crate::{ }, Error, }; -use std::{ - collections::{BTreeSet, HashMap}, - iter, -}; +use std::collections::{BTreeSet, HashMap}; use super::HyperPlonkVerifierParam; diff --git a/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs b/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs index de3eb5a..703b5db 100644 --- a/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs +++ b/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs @@ -121,7 +121,6 @@ impl< } fn prepare_memory_checking( - points_offset: usize, table: &Box>, ) -> Vec> { let chunks = Self::chunks(table); @@ -159,7 +158,7 @@ impl< tau: &F, transcript: &mut impl FieldTranscriptRead, ) -> Result<(), Error> { - let memory_checking = Self::prepare_memory_checking(points_offset, table); + let memory_checking = Self::prepare_memory_checking(table); memory_checking .iter() .map(|memory_checking| { diff --git a/plonkish_backend/src/backend/lookup/logup.rs b/plonkish_backend/src/backend/lookup/logup.rs deleted file mode 100644 index 14a0745..0000000 --- a/plonkish_backend/src/backend/lookup/logup.rs +++ /dev/null @@ -1,292 +0,0 @@ -use std::{ - collections::{HashMap, HashSet}, - hash::Hash, - iter, - marker::PhantomData, -}; - -use halo2_curves::ff::{BatchInvert, Field, PrimeField}; -use itertools::Itertools; - -use crate::{ - pcs::{CommitmentChunk, PolynomialCommitmentScheme}, - poly::{multilinear::MultilinearPolynomial, Polynomial}, - util::{ - arithmetic::{div_ceil, powers, sum, BooleanHypercube}, - end_timer, - expression::{CommonPolynomial, Expression}, - parallel::{num_threads, par_map_collect, parallelize, parallelize_iter}, - start_timer, - transcript::TranscriptWrite, - }, - Error, -}; - -use super::{MVLookupStrategy, MVLookupStrategyOutput}; - -#[derive(Clone, Debug)] -pub struct LogUp>(PhantomData, PhantomData); - -impl> LogUp { - pub fn lookup_compressed_polys( - lookups: &[Vec<(Expression, Expression)>], - polys: &[&MultilinearPolynomial], - challenges: &[F], - betas: &[F], - ) -> Vec<[MultilinearPolynomial; 2]> { - if lookups.is_empty() { - return Default::default(); - } - - let num_vars = polys[0].num_vars(); - let expression = lookups - .iter() - .flat_map(|lookup| lookup.iter().map(|(input, table)| (input + table))) - .sum::>(); - let lagranges = { - let bh = BooleanHypercube::new(num_vars).iter().collect_vec(); - expression - .used_langrange() - .into_iter() - .map(|i| (i, bh[i.rem_euclid(1 << num_vars) as usize])) - .collect::>() - }; - lookups - .iter() - .map(|lookup| { - Self::lookup_compressed_poly(lookup, &lagranges, polys, challenges, betas) - }) - .collect() - } - - pub fn lookup_compressed_poly( - lookup: &[(Expression, Expression)], - lagranges: &HashSet<(i32, usize)>, - polys: &[&MultilinearPolynomial], - challenges: &[F], - betas: &[F], - ) -> [MultilinearPolynomial; 2] { - let num_vars = polys[0].num_vars(); - let bh = BooleanHypercube::new(num_vars); - let compress = |expressions: &[&Expression]| { - betas - .iter() - .copied() - .zip(expressions.iter().map(|expression| { - let mut compressed = vec![F::ZERO; 1 << num_vars]; - parallelize(&mut compressed, |(compressed, start)| { - for (b, compressed) in (start..).zip(compressed) { - *compressed = expression.evaluate( - &|constant| constant, - &|common_poly| match common_poly { - CommonPolynomial::Identity => F::from(b as u64), - CommonPolynomial::Lagrange(i) => { - if lagranges.contains(&(i, b)) { - F::ONE - } else { - F::ZERO - } - } - CommonPolynomial::EqXY(_) => unreachable!(), - }, - &|query| polys[query.poly()][bh.rotate(b, query.rotation())], - &|challenge| challenges[challenge], - &|value| -value, - &|lhs, rhs| lhs + &rhs, - &|lhs, rhs| lhs * &rhs, - &|value, scalar| value * &scalar, - ); - } - }); - MultilinearPolynomial::new(compressed) - })) - .sum::>() - }; - - let (inputs, tables) = lookup - .iter() - .map(|(input, table)| (input, table)) - .unzip::<_, _, Vec<_>, Vec<_>>(); - - let timer = start_timer(|| "compressed_input_poly"); - let compressed_input_poly = compress(&inputs); - end_timer(timer); - - let timer = start_timer(|| "compressed_table_poly"); - let compressed_table_poly = compress(&tables); - end_timer(timer); - - [compressed_input_poly, compressed_table_poly] - } -} - -impl> LogUp { - pub(crate) fn lookup_m_polys( - compressed_polys: &[[MultilinearPolynomial; 2]], - ) -> Result>, Error> { - compressed_polys - .iter() - .map(|compressed_polys| Self::lookup_m_poly(compressed_polys)) - .try_collect() - } - - pub(super) fn lookup_m_poly( - compressed_polys: &[MultilinearPolynomial; 2], - ) -> Result, Error> { - let [input, table] = compressed_polys; - - let counts = { - let indice_map = table.iter().zip(0..).collect::>(); - - let chunk_size = div_ceil(input.evals().len(), num_threads()); - let num_chunks = div_ceil(input.evals().len(), chunk_size); - let mut counts = vec![HashMap::new(); num_chunks]; - let mut valids = vec![true; num_chunks]; - parallelize_iter( - counts - .iter_mut() - .zip(valids.iter_mut()) - .zip((0..).step_by(chunk_size)), - |((count, valid), start)| { - for input in input[start..].iter().take(chunk_size) { - if let Some(idx) = indice_map.get(input) { - count - .entry(*idx) - .and_modify(|count| *count += 1) - .or_insert(1); - } else { - *valid = false; - break; - } - } - }, - ); - if valids.iter().any(|valid| !valid) { - return Err(Error::InvalidSnark("Invalid lookup input".to_string())); - } - counts - }; - - let mut m = vec![0; 1 << input.num_vars()]; - for (idx, count) in counts.into_iter().flatten() { - m[idx] += count; - } - let m = par_map_collect(m, |count| match count { - 0 => F::ZERO, - 1 => F::ONE, - count => F::from(count), - }); - Ok(MultilinearPolynomial::new(m)) - } - - pub(super) fn lookup_h_polys( - compressed_polys: &[[MultilinearPolynomial; 2]], - m_polys: &[MultilinearPolynomial], - gamma: &F, - ) -> Vec> { - compressed_polys - .iter() - .zip(m_polys.iter()) - .map(|(compressed_polys, m_poly)| Self::lookup_h_poly(compressed_polys, m_poly, gamma)) - .collect() - } - - pub(super) fn lookup_h_poly( - compressed_polys: &[MultilinearPolynomial; 2], - m_poly: &MultilinearPolynomial, - gamma: &F, - ) -> MultilinearPolynomial { - let [input, table] = compressed_polys; - let mut h_input = vec![F::ZERO; 1 << input.num_vars()]; - let mut h_table = vec![F::ZERO; 1 << input.num_vars()]; - - parallelize(&mut h_input, |(h_input, start)| { - for (h_input, input) in h_input.iter_mut().zip(input[start..].iter()) { - *h_input = *gamma + input; - } - }); - parallelize(&mut h_table, |(h_table, start)| { - for (h_table, table) in h_table.iter_mut().zip(table[start..].iter()) { - *h_table = *gamma + table; - } - }); - - let chunk_size = div_ceil(2 * h_input.len(), num_threads()); - parallelize_iter( - iter::empty() - .chain(h_input.chunks_mut(chunk_size)) - .chain(h_table.chunks_mut(chunk_size)), - |h| { - h.iter_mut().batch_invert(); - }, - ); - - parallelize(&mut h_input, |(h_input, start)| { - for (h_input, (h_table, m)) in h_input - .iter_mut() - .zip(h_table[start..].iter().zip(m_poly[start..].iter())) - { - *h_input -= *h_table * m; - } - }); - - if cfg!(feature = "sanity-check") { - assert_eq!(sum::(&h_input), F::ZERO); - } - - MultilinearPolynomial::new(h_input) - } -} - -impl< - F: Field + PrimeField + Hash, - Pcs: PolynomialCommitmentScheme>, - > MVLookupStrategy for LogUp -{ - type Pcs = Pcs; - - fn preprocess( - lookups: &[Vec<(Expression, Expression)>], - polys: &[&MultilinearPolynomial], - challenges: &mut Vec, - ) -> Result; 2]>, Error> { - let timer = start_timer(|| format!("lookup_compressed_polys-{}", lookups.len())); - let lookup_compressed_polys = { - let beta = challenges.last().unwrap(); - let max_lookup_width = lookups.iter().map(Vec::len).max().unwrap_or_default(); - let betas = powers(*beta).take(max_lookup_width).collect_vec(); - Self::lookup_compressed_polys(lookups, &polys, &challenges, &betas) - }; - end_timer(timer); - Ok(lookup_compressed_polys) - } - - fn commit( - pp: &Pcs::ProverParam, - lookup_compressed_polys: &[[MultilinearPolynomial; 2]], - challenges: &mut Vec, - transcript: &mut impl TranscriptWrite, F>, - ) -> Result, crate::Error> { - let timer = start_timer(|| format!("lookup_m_polys-{}", lookup_compressed_polys.len())); - let lookup_m_polys = Self::lookup_m_polys(&lookup_compressed_polys)?; - end_timer(timer); - - let lookup_m_comms = Pcs::batch_commit_and_write(&pp, &lookup_m_polys, transcript)?; - - let gamma = transcript.squeeze_challenge(); - challenges.extend([gamma]); - - let timer = start_timer(|| format!("lookup_h_polys-{}", lookup_compressed_polys.len())); - let lookup_h_polys = - Self::lookup_h_polys(&lookup_compressed_polys, &lookup_m_polys, &gamma); - end_timer(timer); - - let lookup_h_comms = Pcs::batch_commit_and_write(&pp, &lookup_h_polys, transcript)?; - - let mut polys = Vec::with_capacity(2 * lookup_compressed_polys.len()); - polys.extend([lookup_m_polys, lookup_h_polys]); - let mut comms = Vec::with_capacity(lookup_m_comms.len() + lookup_h_comms.len()); - comms.extend([lookup_m_comms, lookup_h_comms]); - Ok(MVLookupStrategyOutput { polys, comms }) - } -} diff --git a/plonkish_backend/src/backend/lookup/mod.rs b/plonkish_backend/src/backend/lookup/mod.rs index 0a0bdcb..8c4791e 100644 --- a/plonkish_backend/src/backend/lookup/mod.rs +++ b/plonkish_backend/src/backend/lookup/mod.rs @@ -10,7 +10,6 @@ use crate::{ }; pub mod lasso; -pub mod logup; pub struct MVLookupStrategyOutput< F: Field, diff --git a/plonkish_backend/src/frontend/halo2/lookup.rs b/plonkish_backend/src/frontend/halo2/lookup.rs index dad76a5..f115c78 100644 --- a/plonkish_backend/src/frontend/halo2/lookup.rs +++ b/plonkish_backend/src/frontend/halo2/lookup.rs @@ -1,9 +1,7 @@ -use std::fmt::Debug; - use halo2_curves::ff::PrimeField; use halo2_proofs::plonk::ConstraintSystem; -use crate::{poly::multilinear::MultilinearPolynomial, util::expression::Expression}; +use crate::util::expression::Expression; /// This is a trait that decomposable tables provide implementations for. /// This will be converted into `DecomposableTable` @@ -22,26 +20,3 @@ pub trait SubtableStrategy< /// The `g` function that computes T[r] = g(T_1[r_1], ..., T_k[r_1], T_{k+1}[r_2], ..., T_{\alpha}[r_c]) fn combine_lookups(&self, config: Self::Config) -> Expression; } - -/// This is a trait that contains information about decomposable table to which -/// backend prover and verifier can ask -pub trait DecomposableTable: Clone + Debug + Sync { - const NUM_CHUNKS: usize; - const NUM_MEMORIES: usize; - - /// Returns multilinear extension polynomials of each subtable - fn subtable_polys(&self) -> Vec>; - - fn combine_lookup_expressions(&self, expressions: &[&Expression]) -> Expression; - - /// The `g` function that computes T[r] = g(T_1[r_1], ..., T_k[r_1], T_{k+1}[r_2], ..., T_{\alpha}[r_c]) - fn combine_lookups(&self, operands: &[F]) -> F; - - /// Returns the size of bits for each chunk. - /// Each chunk can have different bits. - fn chunk_bits(&self) -> [usize; Self::NUM_CHUNKS]; - - fn memory_to_subtable_index(&self, memory_index: usize) -> usize; - - fn memory_to_chunk_index(&self, memory_index: usize) -> usize; -} diff --git a/plonkish_backend/src/lib.rs b/plonkish_backend/src/lib.rs index 9cae148..054488d 100644 --- a/plonkish_backend/src/lib.rs +++ b/plonkish_backend/src/lib.rs @@ -1,7 +1,4 @@ #![allow(clippy::op_ref)] -#![feature(generic_const_exprs)] -#![feature(map_first_last)] -#![feature(int_log)] pub mod backend; pub mod frontend; From 71b3d7fff4895f6a0c8798cf4d7c6a4304a887cf Mon Sep 17 00:00:00 2001 From: DoHoonKim Date: Mon, 5 Feb 2024 17:07:26 +0900 Subject: [PATCH 16/27] Use debug message --- .../src/backend/lookup/lasso/memory_checking/prover.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/plonkish_backend/src/backend/lookup/lasso/memory_checking/prover.rs b/plonkish_backend/src/backend/lookup/lasso/memory_checking/prover.rs index ebe97ed..d9b6327 100644 --- a/plonkish_backend/src/backend/lookup/lasso/memory_checking/prover.rs +++ b/plonkish_backend/src/backend/lookup/lasso/memory_checking/prover.rs @@ -122,9 +122,10 @@ impl<'a, F: PrimeField> MemoryCheckingProver<'a, F> { let claimed_final_read_0 = final_read.iter().product(); // sanity check - assert_eq!( + debug_assert_eq!( claimed_init_0 * claimed_write_0, - claimed_read_0 * claimed_final_read_0 + claimed_read_0 * claimed_final_read_0, + "Multiset hashes don't match", ); ( Some(claimed_read_0), From 5af95819f39ef8a1064719f9ac03be064531caaa Mon Sep 17 00:00:00 2001 From: DoHoonKim Date: Fri, 9 Feb 2024 02:43:10 +0900 Subject: [PATCH 17/27] Add missing consistency check at the last step of Lasso sumcheck --- .../src/backend/lookup/lasso/verifier/mod.rs | 23 ++++++++++++++----- 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs b/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs index 703b5db..a7388ee 100644 --- a/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs +++ b/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs @@ -6,8 +6,7 @@ use itertools::Itertools; use crate::{ pcs::{Evaluation, PolynomialCommitmentScheme}, piop::sum_check::{ - classic::{ClassicSumCheck, EvaluationsProver}, - SumCheck, + classic::{ClassicSumCheck, EvaluationsProver}, evaluate, SumCheck }, poly::multilinear::MultilinearPolynomial, util::transcript::{FieldTranscriptRead, TranscriptRead}, @@ -66,21 +65,33 @@ impl< ) -> Result<(), Error> { let expression = Surge::::sum_check_expression(&table); let claim = transcript.read_field_element()?; - let (_, x) = ClassicSumCheck::>::verify( + let (x_eval, x) = ClassicSumCheck::>::verify( &(), num_vars, expression.degree(), claim, transcript, )?; - lookup_opening_points.extend_from_slice(&[r.to_vec(), x]); + lookup_opening_points.extend_from_slice(&[r.to_vec(), x.clone()]); let pcs_query = Surge::::pcs_query(&expression, 0); - let e_polys_offset = polys_offset + 1 + table.chunk_bits().len() * 3; let evals = pcs_query - .iter() + .into_iter() .map(|query| { let value = transcript.read_field_element().unwrap(); + (query, value) + }) + .collect(); + if evaluate(&expression, num_vars, &evals, &[], &[r], &x) != x_eval { + return Err(Error::InvalidSnark( + "Unmatched between Lasso sum_check output and query evaluation".to_string(), + )); + } + let e_polys_offset = polys_offset + 1 + table.chunk_bits().len() * 3; + let evals = evals + .into_iter() + .sorted_by(|a, b| Ord::cmp(&a.0, &b.0)) + .map(|(query, value)| { Evaluation::new(e_polys_offset + query.poly(), points_offset + 1, value) }) .chain([Evaluation::new(polys_offset, points_offset, claim)]) From 8b64d04c6b29ac3f24db455ea3edb948156bf003 Mon Sep 17 00:00:00 2001 From: DoHoonKim Date: Fri, 9 Feb 2024 02:46:55 +0900 Subject: [PATCH 18/27] Cleanup --- .../src/backend/lookup/lasso/prover/surge.rs | 10 ++-------- .../src/backend/lookup/lasso/verifier/mod.rs | 5 +++-- 2 files changed, 5 insertions(+), 10 deletions(-) diff --git a/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs b/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs index 728b601..c1b20b2 100644 --- a/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs +++ b/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs @@ -162,8 +162,8 @@ impl< )?; lookup_opening_points.extend_from_slice(&[r.to_vec(), x]); - let pcs_query = Self::pcs_query(&expression, 0); - let evals = pcs_query + let evals = expression + .used_query() .into_iter() .map(|query| { transcript @@ -219,10 +219,4 @@ impl< let eq_xy = Expression::::eq_xy(0); eq_xy * exprs } - - pub fn pcs_query(expression: &Expression, offset: usize) -> BTreeSet { - let mut used_query = expression.used_query(); - used_query.retain(|query| query.poly() >= offset); - used_query - } } diff --git a/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs b/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs index a7388ee..17fe573 100644 --- a/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs +++ b/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs @@ -6,7 +6,8 @@ use itertools::Itertools; use crate::{ pcs::{Evaluation, PolynomialCommitmentScheme}, piop::sum_check::{ - classic::{ClassicSumCheck, EvaluationsProver}, evaluate, SumCheck + classic::{ClassicSumCheck, EvaluationsProver}, + evaluate, SumCheck, }, poly::multilinear::MultilinearPolynomial, util::transcript::{FieldTranscriptRead, TranscriptRead}, @@ -74,7 +75,7 @@ impl< )?; lookup_opening_points.extend_from_slice(&[r.to_vec(), x.clone()]); - let pcs_query = Surge::::pcs_query(&expression, 0); + let pcs_query = expression.used_query(); let evals = pcs_query .into_iter() .map(|query| { From 0c283227a9e265ab6a428267718757f690ed8d82 Mon Sep 17 00:00:00 2001 From: jeong0982 Date: Fri, 9 Feb 2024 17:34:59 +0900 Subject: [PATCH 19/27] Add new polynomial form for subtable polynomials --- .../src/backend/hyperplonk/preprocessor.rs | 4 +- plonkish_backend/src/backend/lookup/lasso.rs | 4 +- .../lookup/lasso/memory_checking/verifier.rs | 12 +++- .../src/backend/lookup/lasso/prover/surge.rs | 5 +- .../src/backend/lookup/lasso/test/and.rs | 19 ++++++- .../src/backend/lookup/lasso/test/range.rs | 32 ++++++++++- .../src/backend/lookup/lasso/verifier/mod.rs | 4 +- plonkish_backend/src/poly/multilinear.rs | 57 +++++++++++++++++++ 8 files changed, 126 insertions(+), 11 deletions(-) diff --git a/plonkish_backend/src/backend/hyperplonk/preprocessor.rs b/plonkish_backend/src/backend/hyperplonk/preprocessor.rs index e8329fa..299d1d2 100644 --- a/plonkish_backend/src/backend/hyperplonk/preprocessor.rs +++ b/plonkish_backend/src/backend/hyperplonk/preprocessor.rs @@ -47,9 +47,7 @@ pub(super) fn compose( (num_permutation_z_polys, expression) } -pub(super) fn max_degree( - circuit_info: &PlonkishCircuitInfo, -) -> usize { +pub(super) fn max_degree(circuit_info: &PlonkishCircuitInfo) -> usize { iter::empty() .chain(circuit_info.constraints.iter().map(Expression::degree)) .chain(circuit_info.max_degree) diff --git a/plonkish_backend/src/backend/lookup/lasso.rs b/plonkish_backend/src/backend/lookup/lasso.rs index e3f1189..518e943 100644 --- a/plonkish_backend/src/backend/lookup/lasso.rs +++ b/plonkish_backend/src/backend/lookup/lasso.rs @@ -3,7 +3,8 @@ use std::{fmt::Debug, marker::PhantomData}; use halo2_curves::ff::{Field, PrimeField}; use crate::{ - pcs::PolynomialCommitmentScheme, poly::multilinear::MultilinearPolynomial, + pcs::PolynomialCommitmentScheme, + poly::multilinear::{MultilinearPolynomial, MultilinearPolynomialTerms}, util::expression::Expression, }; @@ -23,6 +24,7 @@ pub trait DecomposableTable: Debug + Sync + DecomposableTableClon /// Returns multilinear extension polynomials of each subtable fn subtable_polys(&self) -> Vec>; + fn subtable_polys_terms(&self) -> Vec>; fn combine_lookup_expressions(&self, expressions: Vec>) -> Expression; diff --git a/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs b/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs index a7aae10..f3e6eb1 100644 --- a/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs +++ b/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs @@ -6,7 +6,7 @@ use itertools::{chain, Itertools}; use crate::{ pcs::Evaluation, piop::gkr::verify_grand_product, - poly::multilinear::MultilinearPolynomial, + poly::multilinear::{MultilinearPolynomial, MultilinearPolynomialTerms}, util::{arithmetic::inner_product, transcript::FieldTranscriptRead}, Error, }; @@ -84,7 +84,7 @@ impl Chunk { write_xs[i], hash(&dim_x, &e_poly_xs[i], &(read_ts_poly_x + F::ONE)) ); - let subtable_poly_y = memory.subtable_poly.evaluate(y); + let subtable_poly_y = memory.subtable_poly_new.evaluate(y); assert_eq!(init_ys[i], hash(&id_poly_y, &subtable_poly_y, &F::ZERO)); assert_eq!( final_read_ys[i], @@ -99,13 +99,19 @@ impl Chunk { pub(in crate::backend::lookup::lasso) struct Memory { memory_index: usize, subtable_poly: MultilinearPolynomial, + subtable_poly_new: MultilinearPolynomialTerms, } impl Memory { - pub fn new(memory_index: usize, subtable_poly: MultilinearPolynomial) -> Self { + pub fn new( + memory_index: usize, + subtable_poly: MultilinearPolynomial, + s: MultilinearPolynomialTerms, + ) -> Self { Self { memory_index, subtable_poly, + subtable_poly_new: s, } } } diff --git a/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs b/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs index c1b20b2..519b5f3 100644 --- a/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs +++ b/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs @@ -66,7 +66,10 @@ impl< .map(|i| { let mut index_bits = fe_to_bits_le(nz_poly[i]); index_bits.truncate(table.chunk_bits().iter().sum()); - assert_eq!(usize_from_bits_le(&fe_to_bits_le(nz_poly[i])), usize_from_bits_le(&index_bits)); + assert_eq!( + usize_from_bits_le(&fe_to_bits_le(nz_poly[i])), + usize_from_bits_le(&index_bits) + ); let mut chunked_index = repeat(0).take(num_chunks).collect_vec(); let chunked_index_bits = table.subtable_indices(index_bits); diff --git a/plonkish_backend/src/backend/lookup/lasso/test/and.rs b/plonkish_backend/src/backend/lookup/lasso/test/and.rs index 20e9aae..538facc 100644 --- a/plonkish_backend/src/backend/lookup/lasso/test/and.rs +++ b/plonkish_backend/src/backend/lookup/lasso/test/and.rs @@ -5,7 +5,7 @@ use itertools::{izip, Itertools}; use crate::{ backend::lookup::lasso::DecomposableTable, - poly::multilinear::MultilinearPolynomial, + poly::multilinear::{MultilinearPolynomial, MultilinearPolynomialTerms, PolyExpr::*}, util::{ arithmetic::{inner_product, split_bits, split_by_chunk_bits}, expression::Expression, @@ -29,6 +29,7 @@ impl DecomposableTable for AndTable { fn subtable_polys(&self) -> Vec> { let memory_size = 1 << 16; + println!("{}", self.num_memories()); let mut evals = vec![]; (0..memory_size).for_each(|i| { let (lhs, rhs) = split_bits(i, 8); @@ -38,6 +39,22 @@ impl DecomposableTable for AndTable { vec![MultilinearPolynomial::new(evals)] } + fn subtable_polys_terms(&self) -> Vec> { + let init = Prod(vec![Var(0), Var(self.num_memories())]); + let mut terms = vec![init]; + (1..16).for_each(|i| { + let coeff = Pow(Box::new(Const(F::from(2))), i as u32); + let x = Var(i); + let y = Var(i + self.num_memories()); + let term = Prod(vec![coeff, x, y]); + terms.push(term); + }); + vec![MultilinearPolynomialTerms::new( + 16, + Sum(terms), + )] + } + fn chunk_bits(&self) -> Vec { vec![16; 8] } diff --git a/plonkish_backend/src/backend/lookup/lasso/test/range.rs b/plonkish_backend/src/backend/lookup/lasso/test/range.rs index 974a5db..f572bd5 100644 --- a/plonkish_backend/src/backend/lookup/lasso/test/range.rs +++ b/plonkish_backend/src/backend/lookup/lasso/test/range.rs @@ -5,7 +5,7 @@ use itertools::Itertools; use crate::{ backend::lookup::lasso::DecomposableTable, - poly::multilinear::MultilinearPolynomial, + poly::multilinear::{MultilinearPolynomial, MultilinearPolynomialTerms, PolyExpr::*}, util::{ arithmetic::{div_ceil, inner_product}, expression::Expression, @@ -81,6 +81,36 @@ impl DecomposableT } } + fn subtable_polys_terms(&self) -> Vec> { + let limb_init = Var(0); + let mut limb_terms = vec![limb_init]; + (1..LIMB_BITS).for_each(|i| { + let coeff = Pow(Box::new(Const(F::from(2))), i as u32); + let x = Var(i); + let term = Prod(vec![coeff, x]); + limb_terms.push(term); + }); + let limb_subtable_poly = MultilinearPolynomialTerms::new( + LIMB_BITS, + Sum(limb_terms), + ); + if NUM_BITS % LIMB_BITS == 0 { + vec![limb_subtable_poly] + } else { + let remainder = NUM_BITS % LIMB_BITS; + let rem_init = Var(0); + let mut rem_terms = vec![rem_init]; + (1..remainder).for_each(|i| { + let coeff = Pow(Box::new(Const(F::from(2))), i as u32); + let x = Var(i); + let term = Prod(vec![coeff, x]); + rem_terms.push(term); + }); + vec![limb_subtable_poly, MultilinearPolynomialTerms::new(remainder, Sum(rem_terms))] + } + + } + fn memory_to_chunk_index(&self, memory_index: usize) -> usize { memory_index } diff --git a/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs b/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs index 17fe573..9b67ba8 100644 --- a/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs +++ b/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs @@ -105,13 +105,15 @@ impl< let num_memories = table.num_memories(); let chunk_bits = table.chunk_bits(); let subtable_polys = table.subtable_polys(); + let s_new = table.subtable_polys_terms(); // key: chunk index, value: chunk let mut chunk_map: HashMap> = HashMap::new(); (0..num_memories).for_each(|memory_index| { let chunk_index = table.memory_to_chunk_index(memory_index); let chunk_bits = chunk_bits[chunk_index]; let subtable_poly = &subtable_polys[table.memory_to_subtable_index(memory_index)]; - let memory = Memory::new(memory_index, subtable_poly.clone()); + let s = &s_new[table.memory_to_subtable_index(memory_index)]; + let memory = Memory::new(memory_index, subtable_poly.clone(), s.clone()); if chunk_map.get(&chunk_index).is_some() { chunk_map.entry(chunk_index).and_modify(|chunk| { chunk.add_memory(memory); diff --git a/plonkish_backend/src/poly/multilinear.rs b/plonkish_backend/src/poly/multilinear.rs index 428a3a1..1c6d679 100644 --- a/plonkish_backend/src/poly/multilinear.rs +++ b/plonkish_backend/src/poly/multilinear.rs @@ -11,6 +11,7 @@ use crate::{ use halo2_curves::ff::PrimeField; use num_integer::Integer; use rand::RngCore; +use rayon::prelude::{IntoParallelRefIterator, ParallelIterator}; use std::{ borrow::Cow, iter::{self, Sum}, @@ -18,6 +19,62 @@ use std::{ ops::{Add, AddAssign, Mul, MulAssign, Sub, SubAssign}, }; +/// Multilinear polynomials are represented as expressions +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] +pub struct MultilinearPolynomialTerms { + num_vars: usize, + expression: PolyExpr, +} + +impl MultilinearPolynomialTerms { + pub fn new(num_vars: usize, expression: PolyExpr) -> Self { + Self { + num_vars, + expression, + } + } +} + +impl MultilinearPolynomialTerms { + pub fn evaluate(&self, x: &[F]) -> F { + assert_eq!(x.len(), self.num_vars); + self.expression.evaluate(x) + } +} + +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] +pub enum PolyExpr { + Const(F), + Var(usize), + Sum(Vec>), + Prod(Vec>), + Pow(Box>, u32), +} + +impl PolyExpr { + fn evaluate(&self, x: &[F]) -> F { + match self { + PolyExpr::Const(c) => c.clone(), + PolyExpr::Var(i) => x[*i], + PolyExpr::Sum(v) => { + v.par_iter().map(|t| { + t.evaluate(x) + }).reduce(|| F::ZERO, |acc, f| acc + f) + } + PolyExpr::Prod(v) => { + v.par_iter().map(|t| { + t.evaluate(x) + }).reduce(|| F::ONE, |acc, f| acc * f) + } + PolyExpr::Pow(inner, e) => { + let res = inner.evaluate(x); + let exp = [*e as u64]; + res.pow(exp) + } + } + } +} + #[derive(Clone, Debug, Serialize, Deserialize)] pub struct MultilinearPolynomial { evals: Vec, From 28228f20160407507fb3d6d02cb8db08f24375f8 Mon Sep 17 00:00:00 2001 From: jeong0982 Date: Fri, 9 Feb 2024 18:28:55 +0900 Subject: [PATCH 20/27] Fix minor error --- .../src/backend/lookup/lasso/test/and.rs | 9 +++---- .../src/backend/lookup/lasso/test/range.rs | 11 ++++----- plonkish_backend/src/poly/multilinear.rs | 24 +++++++------------ 3 files changed, 17 insertions(+), 27 deletions(-) diff --git a/plonkish_backend/src/backend/lookup/lasso/test/and.rs b/plonkish_backend/src/backend/lookup/lasso/test/and.rs index 538facc..75d115d 100644 --- a/plonkish_backend/src/backend/lookup/lasso/test/and.rs +++ b/plonkish_backend/src/backend/lookup/lasso/test/and.rs @@ -40,19 +40,16 @@ impl DecomposableTable for AndTable { } fn subtable_polys_terms(&self) -> Vec> { - let init = Prod(vec![Var(0), Var(self.num_memories())]); + let init = Prod(vec![Var(0), Var(16)]); let mut terms = vec![init]; (1..16).for_each(|i| { let coeff = Pow(Box::new(Const(F::from(2))), i as u32); let x = Var(i); - let y = Var(i + self.num_memories()); + let y = Var(i + 16); let term = Prod(vec![coeff, x, y]); terms.push(term); }); - vec![MultilinearPolynomialTerms::new( - 16, - Sum(terms), - )] + vec![MultilinearPolynomialTerms::new(16, Sum(terms))] } fn chunk_bits(&self) -> Vec { diff --git a/plonkish_backend/src/backend/lookup/lasso/test/range.rs b/plonkish_backend/src/backend/lookup/lasso/test/range.rs index f572bd5..ef2cfe6 100644 --- a/plonkish_backend/src/backend/lookup/lasso/test/range.rs +++ b/plonkish_backend/src/backend/lookup/lasso/test/range.rs @@ -90,10 +90,7 @@ impl DecomposableT let term = Prod(vec![coeff, x]); limb_terms.push(term); }); - let limb_subtable_poly = MultilinearPolynomialTerms::new( - LIMB_BITS, - Sum(limb_terms), - ); + let limb_subtable_poly = MultilinearPolynomialTerms::new(LIMB_BITS, Sum(limb_terms)); if NUM_BITS % LIMB_BITS == 0 { vec![limb_subtable_poly] } else { @@ -106,9 +103,11 @@ impl DecomposableT let term = Prod(vec![coeff, x]); rem_terms.push(term); }); - vec![limb_subtable_poly, MultilinearPolynomialTerms::new(remainder, Sum(rem_terms))] + vec![ + limb_subtable_poly, + MultilinearPolynomialTerms::new(remainder, Sum(rem_terms)), + ] } - } fn memory_to_chunk_index(&self, memory_index: usize) -> usize { diff --git a/plonkish_backend/src/poly/multilinear.rs b/plonkish_backend/src/poly/multilinear.rs index 1c6d679..a7e3052 100644 --- a/plonkish_backend/src/poly/multilinear.rs +++ b/plonkish_backend/src/poly/multilinear.rs @@ -56,21 +56,15 @@ impl PolyExpr { match self { PolyExpr::Const(c) => c.clone(), PolyExpr::Var(i) => x[*i], - PolyExpr::Sum(v) => { - v.par_iter().map(|t| { - t.evaluate(x) - }).reduce(|| F::ZERO, |acc, f| acc + f) - } - PolyExpr::Prod(v) => { - v.par_iter().map(|t| { - t.evaluate(x) - }).reduce(|| F::ONE, |acc, f| acc * f) - } - PolyExpr::Pow(inner, e) => { - let res = inner.evaluate(x); - let exp = [*e as u64]; - res.pow(exp) - } + PolyExpr::Sum(v) => v + .par_iter() + .map(|t| t.evaluate(x)) + .reduce(|| F::ZERO, |acc, f| acc + f), + PolyExpr::Prod(v) => v + .par_iter() + .map(|t| t.evaluate(x)) + .reduce(|| F::ONE, |acc, f| acc * f), + PolyExpr::Pow(inner, e) => inner.evaluate(x).pow([*e as u64]), } } } From e9c515e1c38afc8c8e3289d23e26a7692c0a2ee4 Mon Sep 17 00:00:00 2001 From: jeong0982 Date: Sat, 10 Feb 2024 17:18:05 +0900 Subject: [PATCH 21/27] Fix errors --- .../backend/lookup/lasso/memory_checking/verifier.rs | 12 +++--------- .../src/backend/lookup/lasso/test/and.rs | 9 ++++----- .../src/backend/lookup/lasso/verifier/mod.rs | 4 +--- 3 files changed, 8 insertions(+), 17 deletions(-) diff --git a/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs b/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs index f3e6eb1..469943b 100644 --- a/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs +++ b/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs @@ -6,7 +6,7 @@ use itertools::{chain, Itertools}; use crate::{ pcs::Evaluation, piop::gkr::verify_grand_product, - poly::multilinear::{MultilinearPolynomial, MultilinearPolynomialTerms}, + poly::multilinear::MultilinearPolynomialTerms, util::{arithmetic::inner_product, transcript::FieldTranscriptRead}, Error, }; @@ -98,20 +98,14 @@ impl Chunk { #[derive(Clone, Debug)] pub(in crate::backend::lookup::lasso) struct Memory { memory_index: usize, - subtable_poly: MultilinearPolynomial, subtable_poly_new: MultilinearPolynomialTerms, } impl Memory { - pub fn new( - memory_index: usize, - subtable_poly: MultilinearPolynomial, - s: MultilinearPolynomialTerms, - ) -> Self { + pub fn new(memory_index: usize, subtable_poly: MultilinearPolynomialTerms) -> Self { Self { memory_index, - subtable_poly, - subtable_poly_new: s, + subtable_poly_new: subtable_poly, } } } diff --git a/plonkish_backend/src/backend/lookup/lasso/test/and.rs b/plonkish_backend/src/backend/lookup/lasso/test/and.rs index 75d115d..78191ac 100644 --- a/plonkish_backend/src/backend/lookup/lasso/test/and.rs +++ b/plonkish_backend/src/backend/lookup/lasso/test/and.rs @@ -29,7 +29,6 @@ impl DecomposableTable for AndTable { fn subtable_polys(&self) -> Vec> { let memory_size = 1 << 16; - println!("{}", self.num_memories()); let mut evals = vec![]; (0..memory_size).for_each(|i| { let (lhs, rhs) = split_bits(i, 8); @@ -40,12 +39,12 @@ impl DecomposableTable for AndTable { } fn subtable_polys_terms(&self) -> Vec> { - let init = Prod(vec![Var(0), Var(16)]); + let init = Prod(vec![Var(0), Var(8)]); let mut terms = vec![init]; - (1..16).for_each(|i| { - let coeff = Pow(Box::new(Const(F::from(2))), i as u32); + (1..8).for_each(|i| { + let coeff = Const(F::from(1 << i)); let x = Var(i); - let y = Var(i + 16); + let y = Var(i + 8); let term = Prod(vec![coeff, x, y]); terms.push(term); }); diff --git a/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs b/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs index 9b67ba8..405718c 100644 --- a/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs +++ b/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs @@ -104,16 +104,14 @@ impl< fn chunks(table: &Box>) -> Vec> { let num_memories = table.num_memories(); let chunk_bits = table.chunk_bits(); - let subtable_polys = table.subtable_polys(); let s_new = table.subtable_polys_terms(); // key: chunk index, value: chunk let mut chunk_map: HashMap> = HashMap::new(); (0..num_memories).for_each(|memory_index| { let chunk_index = table.memory_to_chunk_index(memory_index); let chunk_bits = chunk_bits[chunk_index]; - let subtable_poly = &subtable_polys[table.memory_to_subtable_index(memory_index)]; let s = &s_new[table.memory_to_subtable_index(memory_index)]; - let memory = Memory::new(memory_index, subtable_poly.clone(), s.clone()); + let memory = Memory::new(memory_index, s.clone()); if chunk_map.get(&chunk_index).is_some() { chunk_map.entry(chunk_index).and_modify(|chunk| { chunk.add_memory(memory); From d24e4944fa8326c1ee9ea784ad7ad8cf7f9900c9 Mon Sep 17 00:00:00 2001 From: jeong0982 Date: Sat, 10 Feb 2024 17:28:27 +0900 Subject: [PATCH 22/27] Fix naming of `subtable_poly_new` --- .../src/backend/lookup/lasso/memory_checking/verifier.rs | 6 +++--- plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs b/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs index 469943b..0c9bd35 100644 --- a/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs +++ b/plonkish_backend/src/backend/lookup/lasso/memory_checking/verifier.rs @@ -84,7 +84,7 @@ impl Chunk { write_xs[i], hash(&dim_x, &e_poly_xs[i], &(read_ts_poly_x + F::ONE)) ); - let subtable_poly_y = memory.subtable_poly_new.evaluate(y); + let subtable_poly_y = memory.subtable_poly.evaluate(y); assert_eq!(init_ys[i], hash(&id_poly_y, &subtable_poly_y, &F::ZERO)); assert_eq!( final_read_ys[i], @@ -98,14 +98,14 @@ impl Chunk { #[derive(Clone, Debug)] pub(in crate::backend::lookup::lasso) struct Memory { memory_index: usize, - subtable_poly_new: MultilinearPolynomialTerms, + subtable_poly: MultilinearPolynomialTerms, } impl Memory { pub fn new(memory_index: usize, subtable_poly: MultilinearPolynomialTerms) -> Self { Self { memory_index, - subtable_poly_new: subtable_poly, + subtable_poly, } } } diff --git a/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs b/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs index 405718c..f8416d4 100644 --- a/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs +++ b/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs @@ -104,14 +104,14 @@ impl< fn chunks(table: &Box>) -> Vec> { let num_memories = table.num_memories(); let chunk_bits = table.chunk_bits(); - let s_new = table.subtable_polys_terms(); + let subtable_polys = table.subtable_polys_terms(); // key: chunk index, value: chunk let mut chunk_map: HashMap> = HashMap::new(); (0..num_memories).for_each(|memory_index| { let chunk_index = table.memory_to_chunk_index(memory_index); let chunk_bits = chunk_bits[chunk_index]; - let s = &s_new[table.memory_to_subtable_index(memory_index)]; - let memory = Memory::new(memory_index, s.clone()); + let subtable_poly = &subtable_polys[table.memory_to_subtable_index(memory_index)]; + let memory = Memory::new(memory_index, subtable_poly.clone()); if chunk_map.get(&chunk_index).is_some() { chunk_map.entry(chunk_index).and_modify(|chunk| { chunk.add_memory(memory); From 681c185edf62790c0eeb3e95ef5654fba0333ade Mon Sep 17 00:00:00 2001 From: DoHoonKim Date: Sat, 10 Feb 2024 18:09:44 +0900 Subject: [PATCH 23/27] Update variable names to be more clear --- plonkish_backend/src/backend.rs | 2 +- plonkish_backend/src/backend/hyperplonk.rs | 2 +- .../src/backend/hyperplonk/prover.rs | 12 +++---- .../src/backend/lookup/lasso/prover/mod.rs | 36 +++++++++---------- .../src/backend/lookup/lasso/prover/surge.rs | 16 ++++----- .../src/backend/lookup/lasso/test/and.rs | 4 +-- .../src/backend/lookup/lasso/test/range.rs | 4 +-- .../src/backend/lookup/lasso/verifier/mod.rs | 6 ++-- 8 files changed, 41 insertions(+), 41 deletions(-) diff --git a/plonkish_backend/src/backend.rs b/plonkish_backend/src/backend.rs index 0359fe9..953f47d 100644 --- a/plonkish_backend/src/backend.rs +++ b/plonkish_backend/src/backend.rs @@ -67,7 +67,7 @@ pub struct PlonkishCircuitInfo { /// which contains vector of tuples representing the input and table /// respectively. pub lookups: Vec, Expression)>>, - /// Represents Lasso lookup argument, which contains input, indices, and table + /// Represents Lasso lookup argument, which contains index expression, output expression and table info pub lasso_lookup: Option<(Expression, Expression, Box>)>, /// Each item inside outer vector repesents an closed permutation cycle, /// which contains vetor of tuples representing the polynomial index and diff --git a/plonkish_backend/src/backend/hyperplonk.rs b/plonkish_backend/src/backend/hyperplonk.rs index 7e37c0f..4077b67 100644 --- a/plonkish_backend/src/backend/hyperplonk.rs +++ b/plonkish_backend/src/backend/hyperplonk.rs @@ -46,7 +46,7 @@ where pub(crate) num_instances: Vec, pub(crate) num_witness_polys: Vec, pub(crate) num_challenges: Vec, - /// assume we have at most Just One Lookup Table + /// (index expression, output expression, table info) pub(crate) lasso_lookup: Option<(Expression, Expression, Box>)>, pub(crate) lookup_polys_offset: usize, pub(crate) lookup_points_offset: usize, diff --git a/plonkish_backend/src/backend/hyperplonk/prover.rs b/plonkish_backend/src/backend/hyperplonk/prover.rs index 2429255..e7f36bc 100644 --- a/plonkish_backend/src/backend/hyperplonk/prover.rs +++ b/plonkish_backend/src/backend/hyperplonk/prover.rs @@ -220,9 +220,9 @@ pub(super) fn prove_lasso_lookup< } let lasso_lookup = pp.lasso_lookup.as_ref().unwrap(); let (lookup, table) = ((&lasso_lookup.0, &lasso_lookup.1), &lasso_lookup.2); - let (lookup_input_poly, lookup_nz_poly) = LassoProver::::lookup_poly(&lookup, &polys); + let (lookup_index_poly, lookup_output_poly) = LassoProver::::lookup_poly(&lookup, &polys); - let num_vars = lookup_input_poly.num_vars(); + let num_vars = lookup_output_poly.num_vars(); // get subtable_polys let subtable_polys = table.subtable_polys(); @@ -234,8 +234,8 @@ pub(super) fn prove_lasso_lookup< pp.lookup_polys_offset, &table, subtable_polys, - lookup_input_poly, - &lookup_nz_poly, + lookup_output_poly, + &lookup_index_poly, transcript, )?; @@ -243,7 +243,7 @@ pub(super) fn prove_lasso_lookup< // squeeze `r` let r = transcript.squeeze_challenges(num_vars); - let (input_poly, dims, read_ts_polys, final_cts_polys, e_polys) = ( + let (lookup_output_poly, dims, read_ts_polys, final_cts_polys, e_polys) = ( &lookup_polys[0][0], &lookup_polys[1], &lookup_polys[2], @@ -256,7 +256,7 @@ pub(super) fn prove_lasso_lookup< lookup_opening_points, lookup_opening_evals, &table, - input_poly, + lookup_output_poly, &e_polys.iter().collect_vec(), &r, num_vars, diff --git a/plonkish_backend/src/backend/lookup/lasso/prover/mod.rs b/plonkish_backend/src/backend/lookup/lasso/prover/mod.rs index 93efb05..54c7482 100644 --- a/plonkish_backend/src/backend/lookup/lasso/prover/mod.rs +++ b/plonkish_backend/src/backend/lookup/lasso/prover/mod.rs @@ -210,19 +210,19 @@ impl< fn e_polys( table: &Box>, subtable_polys: &[&MultilinearPolynomial], - nz: &Vec<&[usize]>, + indices: &Vec<&[usize]>, ) -> Vec> { let num_chunks = table.chunk_bits().len(); let num_memories = table.num_memories(); - assert_eq!(nz.len(), num_chunks); - let num_reads = nz[0].len(); + assert_eq!(indices.len(), num_chunks); + let num_reads = indices[0].len(); (0..num_memories) .map(|i| { let mut e_poly = Vec::with_capacity(num_reads); let subtable_poly = subtable_polys[table.memory_to_subtable_index(i)]; - let nz = nz[table.memory_to_chunk_index(i)]; + let index = indices[table.memory_to_chunk_index(i)]; (0..num_reads).for_each(|j| { - e_poly.push(subtable_poly[nz[j]]); + e_poly.push(subtable_poly[index[j]]); }); MultilinearPolynomial::new(e_poly) }) @@ -278,7 +278,7 @@ impl< lookup_opening_points: &mut Vec>, lookup_opening_evals: &mut Vec>, table: &Box>, - input_poly: &Poly, + lookup_output_poly: &Poly, e_polys: &[&Poly], r: &[F], num_vars: usize, @@ -286,7 +286,7 @@ impl< ) -> Result<(), Error> { Surge::::prove_sum_check( table, - input_poly, + lookup_output_poly, &e_polys, r, num_vars, @@ -386,26 +386,26 @@ impl< lookup_polys_offset: usize, table: &Box>, subtable_polys: &[&MultilinearPolynomial], - lookup_input_poly: MultilinearPolynomial, - lookup_nz_poly: &MultilinearPolynomial, + lookup_output_poly: MultilinearPolynomial, + lookup_index_poly: &MultilinearPolynomial, transcript: &mut impl TranscriptWrite, ) -> Result<(Vec>>, Vec>), Error> { let num_chunks = table.chunk_bits().len(); - // commit to input_poly - let lookup_input_comm = Pcs::commit_and_write(&pp, &lookup_input_poly, transcript)?; + // commit to lookup_output_poly + let lookup_output_comm = Pcs::commit_and_write(&pp, &lookup_output_poly, transcript)?; // get surge and dims let mut surge = Surge::::new(); // commit to dims - let dims = surge.commit(&table, lookup_nz_poly); + let dims = surge.commit(&table, lookup_index_poly); let dim_comms = Pcs::batch_commit_and_write(pp, &dims, transcript)?; // get e_polys & read_ts_polys & final_cts_polys let e_polys = { - let nz = surge.nz(); - LassoProver::::e_polys(&table, subtable_polys, &nz) + let indices = surge.indices(); + LassoProver::::e_polys(&table, subtable_polys, &indices) }; let (read_ts_polys, final_cts_polys) = surge.counter_polys(&table); @@ -414,9 +414,9 @@ impl< let final_cts_comms = Pcs::batch_commit_and_write(&pp, &final_cts_polys, transcript)?; let e_comms = Pcs::batch_commit_and_write(&pp, e_polys.as_slice(), transcript)?; - let lookup_input_poly = Poly { + let lookup_output_poly = Poly { offset: lookup_polys_offset, - poly: lookup_input_poly, + poly: lookup_output_poly, }; let dims = dims @@ -457,14 +457,14 @@ impl< Ok(( vec![ - vec![lookup_input_poly], + vec![lookup_output_poly], dims, read_ts_polys, final_cts_polys, e_polys, ], vec![ - vec![lookup_input_comm], + vec![lookup_output_comm], dim_comms, read_ts_comms, final_cts_comms, diff --git a/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs b/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs index 519b5f3..d23b1b5 100644 --- a/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs +++ b/plonkish_backend/src/backend/lookup/lasso/prover/surge.rs @@ -46,7 +46,7 @@ impl< } } - pub fn nz(&'_ self) -> Vec<&[usize]> { + pub fn indices(&'_ self) -> Vec<&[usize]> { self.lookup_indices .iter() .map(|lookup_indices| lookup_indices.as_slice()) @@ -57,17 +57,17 @@ impl< pub fn commit( &mut self, table: &Box>, - nz_poly: &MultilinearPolynomial, + index_poly: &MultilinearPolynomial, ) -> Vec> { - let num_rows: usize = 1 << nz_poly.num_vars(); + let num_rows: usize = 1 << index_poly.num_vars(); let num_chunks = table.chunk_bits().len(); // get indices of non-zero columns of all rows where each index is chunked let indices = (0..num_rows) .map(|i| { - let mut index_bits = fe_to_bits_le(nz_poly[i]); + let mut index_bits = fe_to_bits_le(index_poly[i]); index_bits.truncate(table.chunk_bits().iter().sum()); assert_eq!( - usize_from_bits_le(&fe_to_bits_le(nz_poly[i])), + usize_from_bits_le(&fe_to_bits_le(index_poly[i])), usize_from_bits_le(&index_bits) ); @@ -134,7 +134,7 @@ impl< pub fn prove_sum_check( table: &Box>, - input_poly: &Poly, + lookup_output_poly: &Poly, e_polys: &[&Poly], r: &[F], num_vars: usize, @@ -144,7 +144,7 @@ impl< transcript: &mut impl TranscriptWrite, F>, ) -> Result<(), Error> { let claimed_sum = Self::sum_check_claim(&r, &table, &e_polys); - assert_eq!(claimed_sum, input_poly.evaluate(r)); + assert_eq!(claimed_sum, lookup_output_poly.evaluate(r)); transcript.write_field_element(&claimed_sum)?; @@ -179,7 +179,7 @@ impl< ) }) .chain([Evaluation::new( - input_poly.offset, + lookup_output_poly.offset, points_offset, claimed_sum, )]) diff --git a/plonkish_backend/src/backend/lookup/lasso/test/and.rs b/plonkish_backend/src/backend/lookup/lasso/test/and.rs index 78191ac..e64e798 100644 --- a/plonkish_backend/src/backend/lookup/lasso/test/and.rs +++ b/plonkish_backend/src/backend/lookup/lasso/test/and.rs @@ -228,11 +228,11 @@ mod test { let [_, _, _, _, _, _, w_l, w_r, w_o] = &array::from_fn(|poly| Query::new(poly, Rotation::cur())) .map(Expression::::Polynomial); - let lasso_lookup_input = w_o.clone(); let lasso_lookup_indices = Expression::DistributePowers( vec![w_l.clone(), w_r.clone()], Box::new(Expression::Constant(F::from_u128(1 << 64))), ); + let lasso_lookup_output = w_o.clone(); let chunk_bits = table.chunk_bits(); let num_vars = chunk_bits.iter().chain([&num_vars]).max().unwrap(); PlonkishCircuitInfo { @@ -243,7 +243,7 @@ mod test { num_challenges: vec![0], constraints: vec![], lookups: vec![vec![]], - lasso_lookup: Some((lasso_lookup_input, lasso_lookup_indices, table)), + lasso_lookup: Some((lasso_lookup_indices, lasso_lookup_output, table)), permutations, max_degree: Some(4), } diff --git a/plonkish_backend/src/backend/lookup/lasso/test/range.rs b/plonkish_backend/src/backend/lookup/lasso/test/range.rs index ef2cfe6..4d7b8f2 100644 --- a/plonkish_backend/src/backend/lookup/lasso/test/range.rs +++ b/plonkish_backend/src/backend/lookup/lasso/test/range.rs @@ -235,8 +235,8 @@ mod test { let [pi, q_l, q_r, q_m, q_o, q_c, w_l, w_r, w_o] = &array::from_fn(|poly| Query::new(poly, Rotation::cur())) .map(Expression::::Polynomial); - let lasso_lookup_input = w_l.clone(); let lasso_lookup_indices = w_l.clone(); + let lasso_lookup_output = w_l.clone(); let chunk_bits = table.chunk_bits(); let num_vars = chunk_bits.iter().chain([&num_vars]).max().unwrap(); PlonkishCircuitInfo { @@ -247,7 +247,7 @@ mod test { num_challenges: vec![0], constraints: vec![q_l * w_l + q_r * w_r + q_m * w_l * w_r + q_o * w_o + q_c + pi], lookups: vec![vec![]], - lasso_lookup: Some((lasso_lookup_input, lasso_lookup_indices, table)), + lasso_lookup: Some((lasso_lookup_output, lasso_lookup_indices, table)), permutations, max_degree: Some(4), } diff --git a/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs b/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs index f8416d4..c8b2824 100644 --- a/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs +++ b/plonkish_backend/src/backend/lookup/lasso/verifier/mod.rs @@ -35,10 +35,10 @@ impl< table: &Box>, transcript: &mut impl TranscriptRead, ) -> Result, Error> { - // read input_comm, dim_comms + // read output_comm, dim_comms let num_chunks = table.chunk_bits().len(); let num_memories = table.num_memories(); - let input_comm = Pcs::read_commitment(vp, transcript)?; + let output_comm = Pcs::read_commitment(vp, transcript)?; let dim_comms = Pcs::read_commitments(vp, num_chunks, transcript)?; // read read_ts_comms & final_cts_comms & e_comms @@ -46,7 +46,7 @@ impl< let final_cts_comms = Pcs::read_commitments(vp, num_chunks, transcript)?; let e_comms = Pcs::read_commitments(vp, num_memories, transcript)?; Ok(iter::empty() - .chain(vec![input_comm]) + .chain(vec![output_comm]) .chain(dim_comms) .chain(read_ts_comms) .chain(final_cts_comms) From 01d1877e21baa192488e7bc6c11ce7d4638d6887 Mon Sep 17 00:00:00 2001 From: DoHoonKim Date: Sat, 10 Feb 2024 19:24:17 +0900 Subject: [PATCH 24/27] Add comment related to shortcut fix --- plonkish_backend/src/backend.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/plonkish_backend/src/backend.rs b/plonkish_backend/src/backend.rs index 953f47d..6478e55 100644 --- a/plonkish_backend/src/backend.rs +++ b/plonkish_backend/src/backend.rs @@ -18,6 +18,8 @@ pub mod lookup; pub trait PlonkishBackend: Clone + Debug { type Pcs: PolynomialCommitmentScheme; + // FIXME : Add Serialize + DeserializeOwned later, currently removed as a shortcut + // to skip implementing those traits on Lasso related type type ProverParam: Clone + Debug; type VerifierParam: Clone + Debug; From 70a30eca0096d657c4e2253da8606a47e8461381 Mon Sep 17 00:00:00 2001 From: DoHoonKim Date: Sat, 10 Feb 2024 19:30:57 +0900 Subject: [PATCH 25/27] Add some documentation --- plonkish_backend/src/backend/hyperplonk.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/plonkish_backend/src/backend/hyperplonk.rs b/plonkish_backend/src/backend/hyperplonk.rs index 4077b67..1aec679 100644 --- a/plonkish_backend/src/backend/hyperplonk.rs +++ b/plonkish_backend/src/backend/hyperplonk.rs @@ -48,7 +48,10 @@ where pub(crate) num_challenges: Vec, /// (index expression, output expression, table info) pub(crate) lasso_lookup: Option<(Expression, Expression, Box>)>, + /// offset of polynomials related to Lasso lookup in batch opening + /// Lasso polynomials are tracked separately since Lasso invokes separate sumcheck pub(crate) lookup_polys_offset: usize, + /// offset of points at which polynomials related to Lasso lookup opened in batch opening pub(crate) lookup_points_offset: usize, pub(crate) num_permutation_z_polys: usize, pub(crate) num_vars: usize, From 91cccd22ace9041de7af6387de6da51b30a4a1b3 Mon Sep 17 00:00:00 2001 From: DoHoonKim Date: Sat, 10 Feb 2024 19:36:44 +0900 Subject: [PATCH 26/27] Remove obsolete file --- plonkish_backend/src/frontend/halo2.rs | 1 - plonkish_backend/src/frontend/halo2/lookup.rs | 22 ------------------- 2 files changed, 23 deletions(-) delete mode 100644 plonkish_backend/src/frontend/halo2/lookup.rs diff --git a/plonkish_backend/src/frontend/halo2.rs b/plonkish_backend/src/frontend/halo2.rs index 13d529e..ad3325a 100644 --- a/plonkish_backend/src/frontend/halo2.rs +++ b/plonkish_backend/src/frontend/halo2.rs @@ -21,7 +21,6 @@ use std::{ #[cfg(any(test, feature = "benchmark"))] pub mod circuit; -pub mod lookup; #[cfg(test)] mod test; diff --git a/plonkish_backend/src/frontend/halo2/lookup.rs b/plonkish_backend/src/frontend/halo2/lookup.rs deleted file mode 100644 index f115c78..0000000 --- a/plonkish_backend/src/frontend/halo2/lookup.rs +++ /dev/null @@ -1,22 +0,0 @@ -use halo2_curves::ff::PrimeField; -use halo2_proofs::plonk::ConstraintSystem; - -use crate::util::expression::Expression; - -/// This is a trait that decomposable tables provide implementations for. -/// This will be converted into `DecomposableTable` -pub trait SubtableStrategy< - F: PrimeField, - const TABLE_SIZE: usize, - const NUM_CHUNKS: usize, - const NUM_MEMORIES: usize, -> -{ - /// This is a configuration object that stores subtables - type Config: Clone; - - fn configure(meta: &mut ConstraintSystem) -> Self::Config; - - /// The `g` function that computes T[r] = g(T_1[r_1], ..., T_k[r_1], T_{k+1}[r_2], ..., T_{\alpha}[r_c]) - fn combine_lookups(&self, config: Self::Config) -> Expression; -} From 90adcbf703ca5cd7da4cf9792d015daad6f55609 Mon Sep 17 00:00:00 2001 From: DoHoonKim Date: Sun, 3 Mar 2024 15:53:09 +0900 Subject: [PATCH 27/27] Update examples to consider selector --- .../src/backend/lookup/lasso/test/and.rs | 48 +++++++------------ .../src/backend/lookup/lasso/test/range.rs | 36 +++++++------- 2 files changed, 37 insertions(+), 47 deletions(-) diff --git a/plonkish_backend/src/backend/lookup/lasso/test/and.rs b/plonkish_backend/src/backend/lookup/lasso/test/and.rs index e64e798..325d9b8 100644 --- a/plonkish_backend/src/backend/lookup/lasso/test/and.rs +++ b/plonkish_backend/src/backend/lookup/lasso/test/and.rs @@ -147,13 +147,13 @@ mod test { mut witness_rng: impl RngCore, ) -> (PlonkishCircuitInfo, impl PlonkishCircuit) { let size = 1 << num_vars; - let mut polys = [(); 9].map(|_| vec![F::ZERO; size]); + let mut polys = [(); 5].map(|_| vec![F::ZERO; size]); let instances = rand_vec(num_vars, &mut witness_rng); polys[0] = instance_polys(num_vars, [&instances])[0].evals().to_vec(); let mut permutation = Permutation::default(); - for poly in [6, 7, 8] { + for poly in [2, 3, 4] { permutation.copy((poly, 1), (poly, 1)); } for idx in 0..size - 1 { @@ -161,12 +161,12 @@ mod test { let [w_l, w_r, w_o] = if use_copy { let [l_copy_idx, r_copy_idx] = [(); 2].map(|_| { ( - rand_idx(6..9, &mut preprocess_rng), + rand_idx(2..5, &mut preprocess_rng), rand_idx(1..idx, &mut preprocess_rng), ) }); - permutation.copy(l_copy_idx, (6, idx)); - permutation.copy(r_copy_idx, (7, idx)); + permutation.copy(l_copy_idx, (2, idx)); + permutation.copy(r_copy_idx, (3, idx)); let w_l = polys[l_copy_idx.0][l_copy_idx.1]; let w_r = polys[r_copy_idx.0][r_copy_idx.1]; let w_o = usize_from_bits_le(&fe_to_bits_le(w_l)) @@ -179,36 +179,22 @@ mod test { [F::from(w_l), F::from(w_r), F::from(w_o)] }; - let q_c = F::random(&mut preprocess_rng); - let values = if preprocess_rng.next_u32().is_even() { - vec![ - (1, F::ONE), - (2, F::ONE), - (4, -F::ONE), - (5, q_c), - (6, w_l), - (7, w_r), - (8, w_o), - ] - } else { - vec![ - (3, F::ONE), - (4, -F::ONE), - (5, q_c), - (6, w_l), - (7, w_r), - (8, w_o), - ] - }; + let q_and = F::ONE; + let values = vec![ + (1, q_and), + (2, w_l), + (3, w_r), + (4, w_o), + ]; for (poly, value) in values { polys[poly][idx] = value; } } - let [_, q_l, q_r, q_m, q_o, q_c, w_l, w_r, w_o] = polys; + let [_, q_and, w_l, w_r, w_o] = polys; let circuit_info = lasso_lookup_circuit_info( num_vars, instances.len(), - [q_l, q_r, q_m, q_o, q_c], + [q_and], table, permutation.into_cycles(), ); @@ -221,11 +207,11 @@ mod test { fn lasso_lookup_circuit_info( num_vars: usize, num_instances: usize, - preprocess_polys: [Vec; 5], + preprocess_polys: [Vec; 1], table: Box>, permutations: Vec>, ) -> PlonkishCircuitInfo { - let [_, _, _, _, _, _, w_l, w_r, w_o] = + let [_, q_and, w_l, w_r, w_o] = &array::from_fn(|poly| Query::new(poly, Rotation::cur())) .map(Expression::::Polynomial); let lasso_lookup_indices = Expression::DistributePowers( @@ -243,7 +229,7 @@ mod test { num_challenges: vec![0], constraints: vec![], lookups: vec![vec![]], - lasso_lookup: Some((lasso_lookup_indices, lasso_lookup_output, table)), + lasso_lookup: Some((q_and * lasso_lookup_indices, q_and * lasso_lookup_output, table)), permutations, max_degree: Some(4), } diff --git a/plonkish_backend/src/backend/lookup/lasso/test/range.rs b/plonkish_backend/src/backend/lookup/lasso/test/range.rs index 4d7b8f2..9cda40c 100644 --- a/plonkish_backend/src/backend/lookup/lasso/test/range.rs +++ b/plonkish_backend/src/backend/lookup/lasso/test/range.rs @@ -167,55 +167,59 @@ mod test { mut witness_rng: impl RngCore, ) -> (PlonkishCircuitInfo, impl PlonkishCircuit) { let size = 1 << num_vars; - let mut polys = [(); 9].map(|_| vec![F::ZERO; size]); + let mut polys = [(); 10].map(|_| vec![F::ZERO; size]); let instances = rand_vec(num_vars, &mut witness_rng); polys[0] = instance_polys(num_vars, [&instances])[0].evals().to_vec(); let mut permutation = Permutation::default(); - for poly in [6, 7, 8] { + for poly in [7, 8, 9] { permutation.copy((poly, 1), (poly, 1)); } for idx in 0..size - 1 { let w_l = if preprocess_rng.next_u32().is_even() && idx > 1 { - let l_copy_idx = (6, rand_idx(1..idx, &mut preprocess_rng)); - permutation.copy(l_copy_idx, (6, idx)); + let l_copy_idx = (7, rand_idx(1..idx, &mut preprocess_rng)); + permutation.copy(l_copy_idx, (7, idx)); polys[l_copy_idx.0][l_copy_idx.1] } else { let value = witness_rng.next_u64() as usize; - F::from_u128(value.pow(2) as u128) + F::from_u128(value.pow(2) as u128); + F::from(value as u64).square() }; let w_r = F::from(witness_rng.next_u64()); let q_c = F::random(&mut preprocess_rng); + let q_range = F::ONE; let values = if preprocess_rng.next_u32().is_even() { vec![ (1, F::ONE), (2, F::ONE), (4, -F::ONE), (5, q_c), - (6, w_l), - (7, w_r), - (8, w_l + w_r + q_c + polys[0][idx]), + (6, q_range), + (7, w_l), + (8, w_r), + (9, w_l + w_r + q_c + polys[0][idx]), ] } else { vec![ (3, F::ONE), (4, -F::ONE), (5, q_c), - (6, w_l), - (7, w_r), - (8, w_l * w_r + q_c + polys[0][idx]), + (6, q_range), + (7, w_l), + (8, w_r), + (9, w_l * w_r + q_c + polys[0][idx]), ] }; for (poly, value) in values { polys[poly][idx] = value; } } - let [_, q_l, q_r, q_m, q_o, q_c, w_l, w_r, w_o] = polys; + let [_, q_l, q_r, q_m, q_o, q_c, q_range, w_l, w_r, w_o] = polys; let circuit_info = vanilla_plonk_with_lasso_lookup_circuit_info( num_vars, instances.len(), - [q_l, q_r, q_m, q_o, q_c], + [q_l, q_r, q_m, q_o, q_c, q_range], table, permutation.into_cycles(), ); @@ -228,11 +232,11 @@ mod test { fn vanilla_plonk_with_lasso_lookup_circuit_info( num_vars: usize, num_instances: usize, - preprocess_polys: [Vec; 5], + preprocess_polys: [Vec; 6], table: Box>, permutations: Vec>, ) -> PlonkishCircuitInfo { - let [pi, q_l, q_r, q_m, q_o, q_c, w_l, w_r, w_o] = + let [pi, q_l, q_r, q_m, q_o, q_c, q_range, w_l, w_r, w_o] = &array::from_fn(|poly| Query::new(poly, Rotation::cur())) .map(Expression::::Polynomial); let lasso_lookup_indices = w_l.clone(); @@ -247,7 +251,7 @@ mod test { num_challenges: vec![0], constraints: vec![q_l * w_l + q_r * w_r + q_m * w_l * w_r + q_o * w_o + q_c + pi], lookups: vec![vec![]], - lasso_lookup: Some((lasso_lookup_output, lasso_lookup_indices, table)), + lasso_lookup: Some((q_range * lasso_lookup_output, q_range * lasso_lookup_indices, table)), permutations, max_degree: Some(4), }