From 26a84ab10903953c72eaf2f701db8f3d08cf299e Mon Sep 17 00:00:00 2001 From: PatStiles Date: Thu, 2 May 2024 09:36:59 -0700 Subject: [PATCH 01/17] add kzg --- jolt-core/src/poly/commitment/kzg.rs | 238 +++++++++++++++++++++ jolt-core/src/poly/commitment/mod.rs | 2 + jolt-core/src/poly/commitment/zeromorph.rs | 1 + jolt-core/src/poly/field.rs | 5 + jolt-core/src/poly/unipoly.rs | 43 ++++ 5 files changed, 289 insertions(+) create mode 100644 jolt-core/src/poly/commitment/kzg.rs create mode 100644 jolt-core/src/poly/commitment/zeromorph.rs diff --git a/jolt-core/src/poly/commitment/kzg.rs b/jolt-core/src/poly/commitment/kzg.rs new file mode 100644 index 000000000..10b72e7b4 --- /dev/null +++ b/jolt-core/src/poly/commitment/kzg.rs @@ -0,0 +1,238 @@ +use ark_bn254::g1; +use ark_ec::scalar_mul::fixed_base::FixedBase; +use std::{borrow::Borrow, marker::PhantomData}; + +use crate::msm::VariableBaseMSM; +use crate::poly; +use crate::poly::unipoly::UniPoly; +use ark_ec::{pairing::Pairing, AffineRepr, CurveGroup}; +use ark_ff::PrimeField; +use ark_std::One; +use ark_std::UniformRand; +use rand_chacha::rand_core::RngCore; +use thiserror::Error; + +#[derive(Error, Debug)] +pub enum KZGError { + #[error("Length Error: SRS Length: {0}, Key Length: {0}")] + KeyLengthError(usize, usize), + #[error("Length Error: Commitment Key Length: {0}, Polynomial Degree {0}")] + CommitLengthError(usize, usize), + #[error("Failed to compute quotient polynomial due to polynomial division")] + PolyDivisionError, +} + +#[derive(Debug, Clone, Default)] +pub struct UniversalKzgSrs { + pub g1_powers: Vec, + pub g2_powers: Vec, +} + +#[derive(Clone, Debug)] +pub struct KZGProverKey { + /// generators + pub g1_powers: Vec, +} + +pub struct KZGVerifierKey { + /// The generator of G1. + pub g1: P::G1Affine, + /// The generator of G2. + pub g2: P::G2Affine, + /// tau times the above generator of G2. + pub tau_2: P::G2Affine, +} + +impl UniversalKzgSrs

{ + pub fn setup(max_degree: usize, rng: &mut R) -> UniversalKzgSrs

{ + let tau = P::ScalarField::rand(rng); + let g1 = P::G1::rand(rng); + let g2 = P::G2::rand(rng); + + let tau_powers: Vec<_> = (0..=max_degree) + .scan(tau, |state, _| { + let val = *state; + *state *= τ + Some(val) + }) + .collect(); + + let window_size = FixedBase::get_mul_window_size(max_degree); + let scalar_bits = P::ScalarField::MODULUS_BIT_SIZE as usize; + + //TODO: gate with rayon + let g1_table = FixedBase::get_window_table(scalar_bits, window_size, g1); + let g2_table = FixedBase::get_window_table(scalar_bits, window_size, g2); + let g1_powers_projective = FixedBase::msm(scalar_bits, window_size, &g1_table, &tau_powers); + let g2_powers_projective = FixedBase::msm(scalar_bits, window_size, &g2_table, &tau_powers); + let g1_powers = P::G1::normalize_batch(&g1_powers_projective); + let g2_powers = P::G2::normalize_batch(&g2_powers_projective); + + UniversalKzgSrs { + g1_powers, + g2_powers, + } + } + + pub fn get_prover_key(&self, key_size: usize) -> Result, KZGError> { + if self.g1_powers.len() < key_size { + return Err(KZGError::KeyLengthError(self.g1_powers.len(), key_size)); + } + Ok(self.g1_powers[..=key_size].to_vec()) + } + + pub fn get_verifier_key(&self, key_size: usize) -> Result, KZGError> { + if self.g1_powers.len() < key_size { + return Err(KZGError::KeyLengthError(self.g1_powers.len(), key_size)); + } + Ok(KZGVerifierKey { + g1: self.g1_powers[0], + g2: self.g2_powers[0], + tau_2: self.g2_powers[1], + }) + } + + pub fn trim(&self, key_size: usize) -> Result<(Vec, KZGVerifierKey

), KZGError> { + if self.g1_powers.len() < key_size { + return Err(KZGError::KeyLengthError(self.g1_powers.len(), key_size)); + } + let g1_powers = self.g1_powers[..=key_size].to_vec(); + + let pk = g1_powers; + let vk = KZGVerifierKey { + g1: self.g1_powers[0], + g2: self.g2_powers[0], + tau_2: self.g2_powers[1], + }; + Ok((pk, vk)) + } +} + +pub struct UnivariateKZG

{ + phantom: PhantomData

, +} + +impl UnivariateKZG

+where +

::ScalarField: poly::field::JoltField, +{ + pub fn commit_offset( + g1_powers: &Vec, + poly: &UniPoly, + offset: usize, + ) -> Result { + if poly.degree() > g1_powers.len() { + return Err(KZGError::CommitLengthError(poly.degree(), g1_powers.len())); + } + + let scalars = &poly.as_vec(); + let bases = g1_powers.as_slice(); + + let com = ::msm( + &bases[offset..scalars.len()], + &poly.as_vec()[offset..], + ) + .unwrap(); + + Ok(com.into_affine()) + } + + pub fn commit( + g1_powers: &Vec, + poly: &UniPoly, + ) -> Result { + if poly.degree() > g1_powers.len() { + return Err(KZGError::CommitLengthError(poly.degree(), g1_powers.len())); + } + let com = ::msm( + &g1_powers.as_slice()[..poly.as_vec().len()], + &poly.as_vec(), + ) + .unwrap(); + Ok(com.into_affine()) + } + + pub fn open( + g1_powers: impl Borrow>, + polynomial: &UniPoly, + point: &P::ScalarField, + ) -> Result<(P::G1Affine, P::ScalarField), KZGError> { + let g1_powers = g1_powers.borrow(); + let divisor = UniPoly::from_coeff(vec![-*point, P::ScalarField::one()]); + let witness_polynomial = polynomial + .divide_with_q_and_r(&divisor) + .map(|(q, _r)| q) + .ok_or(KZGError::PolyDivisionError)?; + let proof = ::msm( + &g1_powers.as_slice()[..witness_polynomial.as_vec().len()], + &witness_polynomial.as_vec(), + ) + .unwrap(); + let evaluation = polynomial.evaluate(point); + + Ok((proof.into_affine(), evaluation)) + } + + fn verify( + vk: impl Borrow>, + commitment: &P::G1Affine, + point: &P::ScalarField, + proof: &P::G1Affine, + evaluation: &P::ScalarField, + ) -> Result { + let vk = vk.borrow(); + + let lhs = P::pairing( + commitment.into_group() - vk.g1.into_group() * evaluation, + vk.g2, + ); + let rhs = P::pairing(proof, vk.tau_2.into_group() - (vk.g2 * point)); + Ok(lhs == rhs) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::poly::unipoly::UniPoly; + use ark_bn254::{Bn254, Fr}; + + use ark_std::{ + rand::{Rng, SeedableRng}, + UniformRand, + }; + use rand_chacha::{rand_core::RngCore, ChaCha20Rng}; + + fn random(degree: usize, mut rng: &mut R) -> UniPoly + where +

::ScalarField: poly::field::JoltField, + { + let coeffs = (0..=degree) + .map(|_| P::ScalarField::rand(&mut rng)) + .collect::>(); + UniPoly::from_coeff(coeffs) + } + + #[test] + fn commit_prove_verify() -> Result<(), KZGError> { + let seed = b"11111111111111111111111111111111"; + for _ in 0..100 { + let mut rng = &mut ChaCha20Rng::from_seed(*seed); + let degree = rng.gen_range(2..20); + + let pp = UniversalKzgSrs::::setup(degree, &mut rng); + let (ck, vk) = pp.trim(degree).unwrap(); + let p = random::(degree, rng); + let comm = UnivariateKZG::::commit(&ck, &p)?; + let point = Fr::rand(rng); + let (proof, value) = UnivariateKZG::::open(&ck, &p, &point)?; + assert!( + UnivariateKZG::::verify(&vk, &comm, &point, &proof, &value)?, + "proof was incorrect for max_degree = {}, polynomial_degree = {}", + degree, + p.degree(), + ); + } + Ok(()) + } +} diff --git a/jolt-core/src/poly/commitment/mod.rs b/jolt-core/src/poly/commitment/mod.rs index 1081322a8..61a62504e 100644 --- a/jolt-core/src/poly/commitment/mod.rs +++ b/jolt-core/src/poly/commitment/mod.rs @@ -1,3 +1,5 @@ pub mod commitment_scheme; pub mod hyrax; +pub mod kzg; pub mod pedersen; +pub mod zeromorph; diff --git a/jolt-core/src/poly/commitment/zeromorph.rs b/jolt-core/src/poly/commitment/zeromorph.rs new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/jolt-core/src/poly/commitment/zeromorph.rs @@ -0,0 +1 @@ + diff --git a/jolt-core/src/poly/field.rs b/jolt-core/src/poly/field.rs index a97021abc..94dba6338 100644 --- a/jolt-core/src/poly/field.rs +++ b/jolt-core/src/poly/field.rs @@ -52,6 +52,7 @@ pub trait JoltField: fn double(&self) -> Self; fn square(&self) -> Self; fn from_bytes(bytes: &[u8]) -> Self; + fn inverse(&self) -> Option; #[inline(always)] fn mul_0_optimized(self, other: Self) -> Self { if self.is_zero() || other.is_zero() { @@ -119,6 +120,10 @@ impl JoltField for ark_bn254::Fr { ::square(self) } + fn inverse(&self) -> Option { + ::inverse(&self) + } + fn from_bytes(bytes: &[u8]) -> Self { assert_eq!(bytes.len(), Self::NUM_BYTES); ark_bn254::Fr::from_le_bytes_mod_order(bytes) diff --git a/jolt-core/src/poly/unipoly.rs b/jolt-core/src/poly/unipoly.rs index 563082498..3f44091a1 100644 --- a/jolt-core/src/poly/unipoly.rs +++ b/jolt-core/src/poly/unipoly.rs @@ -50,6 +50,49 @@ impl UniPoly { gaussian_elimination(&mut vandermonde) } + /// Divide self by another polynomial, and returns the + /// quotient and remainder. + pub fn divide_with_q_and_r(&self, divisor: &Self) -> Option<(Self, Self)> { + if self.is_zero() { + Some((Self::zero(), Self::zero())) + } else if divisor.is_zero() { + None + } else if self.degree() < divisor.degree() { + Some((Self::zero(), self.clone())) + } else { + // Now we know that self.degree() >= divisor.degree(); + let mut quotient = vec![F::zero(); self.degree() - divisor.degree() + 1]; + let mut remainder: Self = self.clone(); + // Can unwrap here because we know self is not zero. + let divisor_leading_inv = divisor.leading_coefficient().unwrap().inverse().unwrap(); + while !remainder.is_zero() && remainder.degree() >= divisor.degree() { + let cur_q_coeff = *remainder.leading_coefficient().unwrap() * divisor_leading_inv; + let cur_q_degree = remainder.degree() - divisor.degree(); + quotient[cur_q_degree] = cur_q_coeff; + + for (i, div_coeff) in divisor.coeffs.iter().enumerate() { + remainder.coeffs[cur_q_degree + i] -= &(cur_q_coeff * div_coeff); + } + while let Some(true) = remainder.coeffs.last().map(|c| c == &F::zero()) { + remainder.coeffs.pop(); + } + } + Some((Self::from_coeff(quotient), remainder)) + } + } + + fn is_zero(&self) -> bool { + self.coeffs.is_empty() || self.coeffs.iter().all(|c| c == &F::zero()) + } + + fn leading_coefficient(&self) -> Option<&F> { + self.coeffs.last() + } + + fn zero() -> Self { + Self::from_coeff(Vec::new()) + } + pub fn degree(&self) -> usize { self.coeffs.len() - 1 } From 372d770a02ba3ba4327d4b1472639cea1849d4b6 Mon Sep 17 00:00:00 2001 From: PatStiles Date: Thu, 2 May 2024 22:04:48 -0700 Subject: [PATCH 02/17] port quotient construction methods and unipoly methods --- jolt-core/src/poly/commitment/kzg.rs | 1 - jolt-core/src/poly/commitment/zeromorph.rs | 710 +++++++++++++++++++++ jolt-core/src/poly/dense_mlpoly.rs | 2 +- jolt-core/src/poly/unipoly.rs | 53 ++ 4 files changed, 764 insertions(+), 2 deletions(-) diff --git a/jolt-core/src/poly/commitment/kzg.rs b/jolt-core/src/poly/commitment/kzg.rs index 10b72e7b4..a6faa4b3a 100644 --- a/jolt-core/src/poly/commitment/kzg.rs +++ b/jolt-core/src/poly/commitment/kzg.rs @@ -1,4 +1,3 @@ -use ark_bn254::g1; use ark_ec::scalar_mul::fixed_base::FixedBase; use std::{borrow::Borrow, marker::PhantomData}; diff --git a/jolt-core/src/poly/commitment/zeromorph.rs b/jolt-core/src/poly/commitment/zeromorph.rs index 8b1378917..ab4130a9c 100644 --- a/jolt-core/src/poly/commitment/zeromorph.rs +++ b/jolt-core/src/poly/commitment/zeromorph.rs @@ -1 +1,711 @@ +#![allow(clippy::too_many_arguments)] +#![allow(clippy::type_complexity)] +use std::{iter, marker::PhantomData}; + +use crate::poly::unipoly::UniPoly; +use crate::poly::{self, dense_mlpoly::DensePolynomial}; +use crate::utils::errors::ProofVerifyError; +use crate::utils::transcript::ProofTranscript; +use ark_bn254::Bn254; +use ark_ec::{pairing::Pairing, AffineRepr, CurveGroup}; +use ark_ff::{batch_inversion, Field}; +use ark_std::{iterable::Iterable, ops::Neg, One, Zero}; +use itertools::Itertools; +use lazy_static::lazy_static; +use merlin::Transcript; +use rand_chacha::{ + rand_core::{RngCore, SeedableRng}, + ChaCha20Rng, +}; +use serde::{Deserialize, Serialize}; +use std::sync::{Arc, Mutex}; +use thiserror::Error; + +#[cfg(feature = "ark-msm")] +use ark_ec::VariableBaseMSM; + +#[cfg(feature = "multicore")] +use rayon::prelude::*; + +use super::{ + commitment_scheme::CommitmentScheme, + kzg::{UnivariateKZG, UniversalKzgSrs}, +}; + +const MAX_VARS: usize = 17; + +lazy_static! { + pub static ref ZEROMORPH_SRS: Arc>> = + Arc::new(Mutex::new(ZeromorphSRS::setup( + 1 << (MAX_VARS + 1), + &mut ChaCha20Rng::from_seed(*b"ZEROMORPH_POLY_COMMITMENT_SCHEME") + ))); +} + +#[derive(Debug, Clone, Default)] +pub struct ZeromorphSRS(UniversalKzgSrs

); + +impl ZeromorphSRS

{ + pub fn setup(max_degree: usize, rng: &mut R) -> ZeromorphSRS

{ + ZeromorphSRS(UniversalKzgSrs::

::setup(max_degree, rng)) + } + + pub fn trim( + &self, + max_degree: usize, + ) -> Result<(ZeromorphProverKey

, ZeromorphVerifierKey

), ZeromorphError> { + let offset = self.0.g1_powers.len() - max_degree; + if self.0.g1_powers.len() >= max_degree + offset { + return Err(ZeromorphError::KeyLengthError( + max_degree, + self.0.g1_powers.len(), + )); + } + let offset = self.0.g1_powers.len() - max_degree; + let offset_g1_powers = self.0.g1_powers[offset..(offset + max_degree)].to_vec(); + Ok(( + ZeromorphProverKey { + g1_powers: self.0.g1_powers.clone(), + offset_g1_powers, + }, + ZeromorphVerifierKey { + g1: self.0.g1_powers[0], + g2: self.0.g2_powers[0], + tau_2: self.0.g2_powers[1], + tau_N_max_sub_2_N: self.0.g2_powers[offset], + }, + )) + } +} + +#[derive(Clone, Debug)] +pub struct ZeromorphProverKey { + pub g1_powers: Vec, + pub offset_g1_powers: Vec, +} + +#[derive(Copy, Clone, Debug)] +pub struct ZeromorphVerifierKey { + pub g1: P::G1Affine, + pub g2: P::G2Affine, + pub tau_2: P::G2Affine, + pub tau_N_max_sub_2_N: P::G2Affine, +} + +#[derive(Clone, Serialize, Deserialize, Debug)] +pub struct ZeromorphProof { + pub pi: P::G1Affine, + pub q_hat_com: P::G1Affine, + pub q_k_com: Vec, +} + +#[derive(Error, Debug)] +pub enum ZeromorphError { + #[error("Length Error: SRS Length: {0}, Key Length: {0}")] + KeyLengthError(usize, usize), +} + +fn compute_multilinear_quotients( + poly: &DensePolynomial, + u_challenge: &[P::ScalarField], +) -> (Vec>, P::ScalarField) +where +

::ScalarField: poly::field::JoltField, +{ + assert_eq!(poly.get_num_vars(), u_challenge.len()); + + let mut g = poly.Z.to_vec(); + let mut quotients: Vec<_> = u_challenge + .iter() + .enumerate() + .map(|(i, x_i)| { + let (g_lo, g_hi) = g.split_at_mut(1 << (poly.get_num_vars() - 1 - i)); + let mut quotient = vec![P::ScalarField::zero(); g_lo.len()]; + + #[cfg(feature = "multicore")] + let quotient_iter = quotient.par_iter_mut(); + + #[cfg(not(feature = "multicore"))] + let quotient_iter = quotient.iter_mut(); + + quotient_iter + .zip_eq(&*g_lo) + .zip_eq(&*g_hi) + .for_each(|((q, g_lo), g_hi)| { + *q = *g_hi - *g_lo; + }); + + #[cfg(feature = "multicore")] + let g_lo_iter = g_lo.par_iter_mut(); + + #[cfg(not(feature = "multicore"))] + let g_lo_iter = g_lo.iter_mut(); + g_lo_iter.zip_eq(g_hi).for_each(|(g_lo, g_hi)| { + *g_lo += (*g_hi - g_lo as &_) * x_i; + }); + + g.truncate(1 << (poly.get_num_vars() - 1 - i)); + + UniPoly::from_coeff(quotient) + }) + .collect(); + quotients.reverse(); + (quotients, g[0]) +} + +fn compute_batched_lifted_degree_quotient( + n: usize, + quotients: &Vec>, + y_challenge: &P::ScalarField, +) -> UniPoly +where +

::ScalarField: poly::field::JoltField, +{ + // Compute \hat{q} = \sum_k y^k * X^{N - d_k - 1} * q_k + let mut scalar = P::ScalarField::one(); // y^k + // Rather than explicitly computing the shifts of q_k by N - d_k - 1 (i.e. multiplying q_k by X^{N - d_k - 1}) + // then accumulating them, we simply accumulate y^k*q_k into \hat{q} at the index offset N - d_k - 1 + let q_hat = quotients.iter().enumerate().fold( + vec![P::ScalarField::zero(); n], + |mut q_hat, (idx, q)| { + #[cfg(feature = "multicore")] + let q_hat_iter = q_hat[n - (1 << idx)..].par_iter_mut(); + + #[cfg(not(feature = "multicore"))] + let q_hat_iter = q_hat[n - (1 << idx)..].iter_mut(); + q_hat_iter.zip(&q.as_vec()).for_each(|(q_hat, q)| { + *q_hat += scalar * q; + }); + scalar *= y_challenge; + q_hat + }, + ); + + UniPoly::from_coeff(q_hat) +} + +fn eval_and_quotient_scalars( + y_challenge: P::ScalarField, + x_challenge: P::ScalarField, + z_challenge: P::ScalarField, + challenges: &[P::ScalarField], +) -> (P::ScalarField, (Vec, Vec)) +where +

::ScalarField: poly::field::JoltField, +{ + let num_vars = challenges.len(); + + // squares of x = [x, x^2, .. x^{2^k}, .. x^{2^num_vars}] + let squares_of_x: Vec<_> = iter::successors(Some(x_challenge), |&x| Some(x.square())) + .take(num_vars + 1) + .collect(); + + // offsets of x = + let offsets_of_x = { + let mut offsets_of_x = squares_of_x + .iter() + .rev() + .skip(1) + .scan(P::ScalarField::one(), |acc, pow_x| { + *acc *= pow_x; + Some(*acc) + }) + .collect::>(); + offsets_of_x.reverse(); + offsets_of_x + }; + + let vs = { + let v_numer = squares_of_x[num_vars] - P::ScalarField::one(); + let mut v_denoms = squares_of_x + .iter() + .map(|squares_of_x| *squares_of_x - P::ScalarField::one()) + .collect::>(); + batch_inversion(&mut v_denoms); + v_denoms + .iter() + .map(|v_denom| v_numer * v_denom) + .collect::>() + }; + + let q_scalars = iter::successors(Some(P::ScalarField::one()), |acc| Some(*acc * y_challenge)) + .take(num_vars) + .zip_eq(offsets_of_x) + .zip(squares_of_x) + .zip(&vs) + .zip_eq(&vs[1..]) + .zip_eq(challenges.iter().rev()) + .map( + |(((((power_of_y, offset_of_x), square_of_x), v_i), v_j), u_i)| { + ( + -(power_of_y * offset_of_x), + -(z_challenge * (square_of_x * v_j - *u_i * v_i)), + ) + }, + ) + .unzip(); + + // -vs[0] * z = -z * (x^(2^num_vars) - 1) / (x - 1) = -z ฮฆ_n(x) + (-vs[0] * z_challenge, q_scalars) +} + +/* +#[derive(Clone)] +pub struct Zeromorph { + _phantom: PhantomData

, + } + +impl CommitmentScheme for Zeromorph

+where +

::ScalarField: poly::field::JoltField, +{ + type Field = P::ScalarField; + type Setup = ZeromorphSRS

; + type Commitment = P::G1Affine; + type Proof = ZeromorphProof

; + type BatchedProof = ZeromorphProof

; + + fn setup(shapes: &[CommitShape]) -> Self::Setup { + + } + + fn commit(poly: &DensePolynomial, setup: &Self::Setup) -> Self::Commitment { + + } + + fn batch_commit( + evals: &[&[Self::Field]], + gens: &Self::Setup, + batch_type: BatchType, + ) -> Vec { + + } + + fn commit_slice(evals: &[Self::Field], setup: &Self::Setup) -> Self::Commitment { + + } + + fn prove( + poly: &DensePolynomial, + opening_point: &[Self::Field], // point at which the polynomial is evaluated + transcript: &mut ProofTranscript, + ) -> Self::Proof; + fn batch_prove( + polynomials: &[&DensePolynomial], + opening_point: &[Self::Field], + openings: &[Self::Field], + batch_type: BatchType, + transcript: &mut ProofTranscript, + ) -> Self::BatchedProof { + + } + + fn verify( + proof: &Self::Proof, + setup: &Self::Setup, + transcript: &mut ProofTranscript, + opening_point: &[Self::Field], // point at which the polynomial is evaluated + opening: &Self::Field, // evaluation \widetilde{Z}(r) + commitment: &Self::Commitment, + ) -> Result<(), ProofVerifyError> { + + } + + fn batch_verify( + batch_proof: &Self::BatchedProof, + setup: &Self::Setup, + opening_point: &[Self::Field], + openings: &[Self::Field], + commitments: &[&Self::Commitment], + transcript: &mut ProofTranscript, + ) -> Result<(), ProofVerifyError> { + + } + + fn protocol_name() -> &'static [u8] { + b"zeromorph" + } + + fn batch_commit_polys( + polys: &[DensePolynomial], + setup: &Self::Setup, + batch_type: super::commitment_scheme::BatchType, + ) -> Vec { + let slices: Vec<&[Self::Field]> = polys.iter().map(|poly| poly.evals_ref()).collect(); + Self::batch_commit(&slices, setup, batch_type) + } + + fn batch_commit_polys_ref( + polys: &[&DensePolynomial], + setup: &Self::Setup, + batch_type: super::commitment_scheme::BatchType, + ) -> Vec { + let slices: Vec<&[Self::Field]> = polys.iter().map(|poly| poly.evals_ref()).collect(); + Self::batch_commit(&slices, setup, batch_type) + } +} +*/ + +#[cfg(test)] +mod test { + + use super::*; + use crate::utils::math::Math; + use ark_bn254::{Bn254, Fr}; + use ark_ff::{BigInt, Zero}; + use ark_std::{test_rng, UniformRand}; + + // Evaluate Phi_k(x) = \sum_{i=0}^k x^i using the direct inefficent formula + fn phi(challenge: &P::ScalarField, subscript: usize) -> P::ScalarField { + let len = (1 << subscript) as u64; + (0..len) + .into_iter() + .fold(P::ScalarField::zero(), |mut acc, i| { + //Note this is ridiculous DevX + acc += challenge.pow(BigInt::<1>::from(i)); + acc + }) + } + + /* + #[test] + fn prove_verify_single() { + let max_vars = 8; + let mut rng = test_rng(); + let srs = ZEROMORPH_SRS.lock().unwrap(); + + for num_vars in 3..max_vars { + // Setup + let (pk, vk) = { + let poly_size = 1 << (num_vars + 1); + srs.trim(poly_size - 1).unwrap() + }; + let polys = DensePolynomial::new( + (0..(1 << num_vars)) + .map(|_| Fr::rand(&mut rng)) + .collect::>(), + ); + let challenges = (0..num_vars) + .map(|_| Fr::rand(&mut rng)) + .collect::>(); + let evals = polys.evaluate(&challenges); + + // Commit and open + let commitments = Zeromorph::::commit(&[polys.clone()], &pk.g1_powers).unwrap(); + + let mut prover_transcript = Transcript::new(b"example"); + let proof = Zeromorph::::prove( + &[polys], + &[evals], + &challenges, + &pk, + &mut prover_transcript, + ) + .unwrap(); + + let mut verifier_transcript = Transcript::new(b"example"); + Zeromorph::::verify( + &commitments, + &[evals], + &challenges, + &vk, + &mut verifier_transcript, + proof, + ) + .unwrap(); + + //TODO: check both random oracles are synced + } + } + + #[test] + fn prove_verify_batched() { + let max_vars = 16; + let mut rng = test_rng(); + let num_polys = 8; + let srs = ZEROMORPH_SRS.lock().unwrap(); + + for num_vars in 3..max_vars { + // Setup + let (pk, vk) = { + let poly_size = 1 << (num_vars + 1); + srs.trim(poly_size - 1).unwrap() + }; + let polys: Vec> = (0..num_polys) + .map(|_| { + DensePolynomial::new( + (0..(1 << num_vars)) + .map(|_| Fr::rand(&mut rng)) + .collect::>(), + ) + }) + .collect::>(); + let challenges = (0..num_vars) + .into_iter() + .map(|_| Fr::rand(&mut rng)) + .collect::>(); + let evals = polys + .clone() + .into_iter() + .map(|poly| poly.evaluate(&challenges)) + .collect::>(); + + // Commit and open + let commitments = Zeromorph::::commit(&polys, &pk.g1_powers).unwrap(); + + let mut prover_transcript = Transcript::new(b"example"); + let proof = + Zeromorph::::prove(&polys, &evals, &challenges, &pk, &mut prover_transcript).unwrap(); + + let mut verifier_transcript = Transcript::new(b"example"); + Zeromorph::::verify( + &commitments, + &evals, + &challenges, + &vk, + &mut verifier_transcript, + proof, + ) + .unwrap(); + + //TODO: check both random oracles are synced + } + } + */ + + /// Test for computing qk given multilinear f + /// Given ๐‘“(๐‘‹โ‚€, โ€ฆ, ๐‘‹โ‚™โ‚‹โ‚), and `(๐‘ข, ๐‘ฃ)` such that \f(\u) = \v, compute `qโ‚–(๐‘‹โ‚€, โ€ฆ, ๐‘‹โ‚–โ‚‹โ‚)` + /// such that the following identity holds: + /// + /// `๐‘“(๐‘‹โ‚€, โ€ฆ, ๐‘‹โ‚™โ‚‹โ‚) โˆ’ ๐‘ฃ = โˆ‘โ‚–โ‚Œโ‚€โฟโปยน (๐‘‹โ‚– โˆ’ ๐‘ขโ‚–) qโ‚–(๐‘‹โ‚€, โ€ฆ, ๐‘‹โ‚–โ‚‹โ‚)` + #[test] + fn quotient_construction() { + // Define size params + let num_vars = 4; + let n: u64 = 1 << num_vars; + + // Construct a random multilinear polynomial f, and (u,v) such that f(u) = v + let mut rng = test_rng(); + let multilinear_f = + DensePolynomial::new((0..n).map(|_| Fr::rand(&mut rng)).collect::>()); + let u_challenge = (0..num_vars) + .into_iter() + .map(|_| Fr::rand(&mut rng)) + .collect::>(); + let v_evaluation = multilinear_f.evaluate(&u_challenge); + + // Compute multilinear quotients `qโ‚–(๐‘‹โ‚€, โ€ฆ, ๐‘‹โ‚–โ‚‹โ‚)` + let (quotients, constant_term) = + compute_multilinear_quotients::(&multilinear_f, &u_challenge); + + // Assert the constant term is equal to v_evaluation + assert_eq!( + constant_term, v_evaluation, + "The constant term equal to the evaluation of the polynomial at challenge point." + ); + + //To demonstrate that q_k was properly constructd we show that the identity holds at a random multilinear challenge + // i.e. ๐‘“(๐‘ง) โˆ’ ๐‘ฃ โˆ’ โˆ‘โ‚–โ‚Œโ‚€แตˆโปยน (๐‘งโ‚– โˆ’ ๐‘ขโ‚–)๐‘žโ‚–(๐‘ง) = 0 + let z_challenge = (0..num_vars) + .map(|_| Fr::rand(&mut rng)) + .collect::>(); + + let mut res = multilinear_f.evaluate(&z_challenge); + res -= v_evaluation; + + for (k, q_k_uni) in quotients.iter().enumerate() { + let z_partial = &z_challenge[&z_challenge.len() - k..]; + //This is a weird consequence of how things are done.. the univariate polys are of the multilinear commitment in lagrange basis. Therefore we evaluate as multilinear + let q_k = DensePolynomial::new(q_k_uni.as_vec()); + let q_k_eval = q_k.evaluate(z_partial); + + res -= (z_challenge[z_challenge.len() - k - 1] + - u_challenge[z_challenge.len() - k - 1]) + * q_k_eval; + } + assert!(res.is_zero()); + } + + /// Test for construction of batched lifted degree quotient: + /// ฬ‚q = โˆ‘โ‚–โ‚Œโ‚€โฟโปยน yแต Xแตโปแตˆแตโปยน ฬ‚qโ‚–, ๐‘‘โ‚– = deg(ฬ‚q), ๐‘š = ๐‘ + #[test] + fn batched_lifted_degree_quotient() { + let num_vars = 3; + let n = 1 << num_vars; + + // Define mock qโ‚– with deg(qโ‚–) = 2แตโปยน + let q_0 = UniPoly::from_coeff(vec![Fr::one()]); + let q_1 = UniPoly::from_coeff(vec![Fr::from(2u64), Fr::from(3u64)]); + let q_2 = UniPoly::from_coeff(vec![ + Fr::from(4u64), + Fr::from(5u64), + Fr::from(6u64), + Fr::from(7u64), + ]); + let quotients = vec![q_0, q_1, q_2]; + + let mut rng = test_rng(); + let y_challenge = Fr::rand(&mut rng); + + //Compute batched quptient ฬ‚q + let batched_quotient = + compute_batched_lifted_degree_quotient::(n, "ients, &y_challenge); + + //Explicitly define q_k_lifted = X^{N-2^k} * q_k and compute the expected batched result + let q_0_lifted = UniPoly::from_coeff(vec![ + Fr::zero(), + Fr::zero(), + Fr::zero(), + Fr::zero(), + Fr::zero(), + Fr::zero(), + Fr::zero(), + Fr::one(), + ]); + let q_1_lifted = UniPoly::from_coeff(vec![ + Fr::zero(), + Fr::zero(), + Fr::zero(), + Fr::zero(), + Fr::zero(), + Fr::zero(), + Fr::from(2u64), + Fr::from(3u64), + ]); + let q_2_lifted = UniPoly::from_coeff(vec![ + Fr::zero(), + Fr::zero(), + Fr::zero(), + Fr::zero(), + Fr::from(4u64), + Fr::from(5u64), + Fr::from(6u64), + Fr::from(7u64), + ]); + + //Explicitly compute ฬ‚q i.e. RLC of lifted polys + let mut batched_quotient_expected = UniPoly::from_coeff(vec![Fr::zero(); n]); + + batched_quotient_expected += &q_0_lifted; + batched_quotient_expected += &(q_1_lifted * y_challenge); + batched_quotient_expected += &(q_2_lifted * (y_challenge * y_challenge)); + assert_eq!(batched_quotient, batched_quotient_expected); + } + + /// evaluated quotient \zeta_x + /// + /// ๐œ = ๐‘“ โˆ’ โˆ‘โ‚–โ‚Œโ‚€โฟโปยน๐‘ฆแต๐‘ฅสทหขโปสทโบยน๐‘“โ‚– = ๐‘“ โˆ’ โˆ‘_{d โˆˆ {dโ‚€, ..., dโ‚™โ‚‹โ‚}} X^{d* - d + 1} โˆ’ โˆ‘{kโˆถ dโ‚–=d} yแต fโ‚– , where d* = lifted degree + /// + /// ๐œ = ฬ‚q - โˆ‘โ‚–โ‚Œโ‚€โฟโปยน yแต Xแตโปแตˆแตโปยน ฬ‚qโ‚–, m = N + #[test] + fn partially_evaluated_quotient_zeta() { + let num_vars = 3; + let n: u64 = 1 << num_vars; + + let mut rng = test_rng(); + let x_challenge = Fr::rand(&mut rng); + let y_challenge = Fr::rand(&mut rng); + + let challenges: Vec<_> = (0..num_vars).map(|_| Fr::rand(&mut rng)).collect(); + let z_challenge = Fr::rand(&mut rng); + + let (_, (zeta_x_scalars, _)) = + eval_and_quotient_scalars::(y_challenge, x_challenge, z_challenge, &challenges); + + // To verify we manually compute zeta using the computed powers and expected + // ๐œ = ฬ‚q - โˆ‘โ‚–โ‚Œโ‚€โฟโปยน yแต Xแตโปแตˆแตโปยน ฬ‚qโ‚–, m = N + assert_eq!( + zeta_x_scalars[0], + -x_challenge.pow(BigInt::<1>::from((n - 1) as u64)) + ); + + assert_eq!( + zeta_x_scalars[1], + -y_challenge * x_challenge.pow(BigInt::<1>::from((n - 1 - 1) as u64)) + ); + + assert_eq!( + zeta_x_scalars[2], + -y_challenge * y_challenge * x_challenge.pow(BigInt::<1>::from((n - 3 - 1) as u64)) + ); + } + + /// Test efficiently computing ๐›ทโ‚–(x) = โˆ‘แตขโ‚Œโ‚€แตโปยนxโฑ + /// ๐›ทโ‚–(๐‘ฅ) = โˆ‘แตขโ‚Œโ‚€แตโปยน๐‘ฅโฑ = (๐‘ฅยฒ^แต โˆ’ 1) / (๐‘ฅ โˆ’ 1) + #[test] + fn phi_n_x_evaluation() { + const N: u64 = 8u64; + let log_N = (N as usize).log_2(); + + // ๐›ทโ‚–(๐‘ฅ) + let mut rng = test_rng(); + let x_challenge = Fr::rand(&mut rng); + + let efficient = (x_challenge.pow(BigInt::<1>::from((1 << log_N) as u64)) - Fr::one()) + / (x_challenge - Fr::one()); + let expected: Fr = phi::(&x_challenge, log_N); + assert_eq!(efficient, expected); + } + + /// Test efficiently computing ๐›ทโ‚–(x) = โˆ‘แตขโ‚Œโ‚€แตโปยนxโฑ + /// ๐›ทโ‚™โ‚‹โ‚–โ‚‹โ‚(๐‘ฅยฒ^แตโบยน) = (๐‘ฅยฒ^โฟ โˆ’ 1) / (๐‘ฅยฒ^แตโบยน โˆ’ 1) + #[test] + fn phi_n_k_1_x_evaluation() { + const N: u64 = 8u64; + let log_N = (N as usize).log_2(); + + // ๐›ทโ‚–(๐‘ฅ) + let mut rng = test_rng(); + let x_challenge = Fr::rand(&mut rng); + let k = 2; + + //๐‘ฅยฒ^แตโบยน + let x_pow = x_challenge.pow(BigInt::<1>::from((1 << (k + 1)) as u64)); + + //(๐‘ฅยฒ^โฟ โˆ’ 1) / (๐‘ฅยฒ^แตโบยน โˆ’ 1) + let efficient = (x_challenge.pow(BigInt::<1>::from((1 << log_N) as u64)) - Fr::one()) + / (x_pow - Fr::one()); + let expected: Fr = phi::(&x_challenge, log_N - k - 1); + assert_eq!(efficient, expected); + } + + /// Test construction of ๐‘โ‚“ + /// ๐‘โ‚“ = ฬ‚๐‘“ โˆ’ ๐‘ฃ โˆ‘โ‚–โ‚Œโ‚€โฟโปยน(๐‘ฅยฒ^แต๐›ทโ‚™โ‚‹โ‚–โ‚‹โ‚(๐‘ฅแตโบยน)โˆ’ ๐‘ขโ‚–๐›ทโ‚™โ‚‹โ‚–(๐‘ฅยฒ^แต)) ฬ‚qโ‚– + #[test] + fn partially_evaluated_quotient_z_x() { + let num_vars = 3; + + // Construct a random multilinear polynomial f, and (u,v) such that f(u) = v. + let mut rng = test_rng(); + let challenges: Vec<_> = (0..num_vars) + .into_iter() + .map(|_| Fr::rand(&mut rng)) + .collect(); + + let u_rev = { + let mut res = challenges.clone(); + res.reverse(); + res + }; + + let x_challenge = Fr::rand(&mut rng); + let y_challenge = Fr::rand(&mut rng); + let z_challenge = Fr::rand(&mut rng); + + // Construct Z_x scalars + let (_, (_, z_x_scalars)) = + eval_and_quotient_scalars::(y_challenge, x_challenge, z_challenge, &challenges); + + for k in 0..num_vars { + let x_pow_2k = x_challenge.pow(BigInt::<1>::from((1 << k) as u64)); // x^{2^k} + let x_pow_2kp1 = x_challenge.pow(BigInt::<1>::from((1 << (k + 1)) as u64)); // x^{2^{k+1}} + // x^{2^k} * \Phi_{n-k-1}(x^{2^{k+1}}) - u_k * \Phi_{n-k}(x^{2^k}) + let mut scalar = x_pow_2k * &phi::(&x_pow_2kp1, num_vars - k - 1) + - u_rev[k] * &phi::(&x_pow_2k, num_vars - k); + scalar *= z_challenge; + scalar *= Fr::from(-1); + assert_eq!(z_x_scalars[k], scalar); + } + } +} diff --git a/jolt-core/src/poly/dense_mlpoly.rs b/jolt-core/src/poly/dense_mlpoly.rs index 699485622..ddae1349e 100644 --- a/jolt-core/src/poly/dense_mlpoly.rs +++ b/jolt-core/src/poly/dense_mlpoly.rs @@ -13,7 +13,7 @@ use std::ops::AddAssign; pub struct DensePolynomial { num_vars: usize, // the number of variables in the multilinear polynomial len: usize, - Z: Vec, // evaluations of the polynomial in all the 2^num_vars Boolean inputs + pub Z: Vec, // evaluations of the polynomial in all the 2^num_vars Boolean inputs } impl DensePolynomial { diff --git a/jolt-core/src/poly/unipoly.rs b/jolt-core/src/poly/unipoly.rs index 3f44091a1..acd27871b 100644 --- a/jolt-core/src/poly/unipoly.rs +++ b/jolt-core/src/poly/unipoly.rs @@ -1,8 +1,13 @@ #![allow(dead_code)] +use std::cmp::Ordering; +use std::ops::{AddAssign, Mul}; + +use crate::jolt::vm::Jolt; use crate::poly::field::JoltField; use crate::utils::gaussian_elimination::gaussian_elimination; use crate::utils::transcript::{AppendToTranscript, ProofTranscript}; use ark_serialize::*; +use rayon::iter::{IntoParallelIterator, IntoParallelRefMutIterator, ParallelIterator}; // ax^2 + bx + c stored as vec![c,b,a] // ax^3 + bx^2 + cx + d stored as vec![d,c,b,a] @@ -128,6 +133,54 @@ impl UniPoly { } } +impl AddAssign<&F> for UniPoly { + fn add_assign(&mut self, rhs: &F) { + #[cfg(feature = "multicore")] + let iter = self.coeffs.par_iter_mut(); + #[cfg(not(feature = "multicore"))] + let iter = self.coeffs.iter_mut(); + iter.for_each(|c| *c += rhs); + } +} + +impl AddAssign<&Self> for UniPoly { + fn add_assign(&mut self, rhs: &Self) { + let ordering = self.coeffs.len().cmp(&rhs.coeffs.len()); + #[allow(clippy::disallowed_methods)] + for (lhs, rhs) in self.coeffs.iter_mut().zip(&rhs.coeffs) { + *lhs += rhs; + } + if matches!(ordering, Ordering::Less) { + self.coeffs + .extend(rhs.coeffs[self.coeffs.len()..].iter().cloned()); + } + } +} + +impl Mul for UniPoly { + type Output = Self; + + fn mul(self, rhs: F) -> Self { + #[cfg(feature = "multicore")] + let iter = self.coeffs.into_par_iter(); + #[cfg(not(feature = "multicore"))] + let iter = self.coeffs.iter(); + Self::from_coeff(iter.map(|c| c * rhs).collect::>()) + } +} + +impl Mul<&F> for UniPoly { + type Output = Self; + + fn mul(self, rhs: &F) -> Self { + #[cfg(feature = "multicore")] + let iter = self.coeffs.into_par_iter(); + #[cfg(not(feature = "multicore"))] + let iter = self.coeffs.iter(); + Self::from_coeff(iter.map(|c| c * rhs).collect::>()) + } +} + impl CompressedUniPoly { // we require eval(0) + eval(1) = hint, so we can solve for the linear term as: // linear_term = hint - 2 * constant_term - deg2 term - deg3 term From 20d12d6bebd831944b85660fd1a96a0a1effc095 Mon Sep 17 00:00:00 2001 From: PatStiles Date: Fri, 3 May 2024 11:58:03 -0700 Subject: [PATCH 03/17] zm skeleton --- jolt-core/src/poly/commitment/zeromorph.rs | 62 +++++++++++----------- 1 file changed, 30 insertions(+), 32 deletions(-) diff --git a/jolt-core/src/poly/commitment/zeromorph.rs b/jolt-core/src/poly/commitment/zeromorph.rs index ab4130a9c..f95112a83 100644 --- a/jolt-core/src/poly/commitment/zeromorph.rs +++ b/jolt-core/src/poly/commitment/zeromorph.rs @@ -6,10 +6,11 @@ use std::{iter, marker::PhantomData}; use crate::poly::unipoly::UniPoly; use crate::poly::{self, dense_mlpoly::DensePolynomial}; use crate::utils::errors::ProofVerifyError; -use crate::utils::transcript::ProofTranscript; +use crate::utils::transcript::{AppendToTranscript, ProofTranscript}; use ark_bn254::Bn254; use ark_ec::{pairing::Pairing, AffineRepr, CurveGroup}; use ark_ff::{batch_inversion, Field}; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use ark_std::{iterable::Iterable, ops::Neg, One, Zero}; use itertools::Itertools; use lazy_static::lazy_static; @@ -28,6 +29,7 @@ use ark_ec::VariableBaseMSM; #[cfg(feature = "multicore")] use rayon::prelude::*; +use super::commitment_scheme::{BatchType, CommitShape}; use super::{ commitment_scheme::CommitmentScheme, kzg::{UnivariateKZG, UniversalKzgSrs}, @@ -79,6 +81,17 @@ impl ZeromorphSRS

{ } } +pub struct ZeromorphCommitment(P::G1); + +impl AppendToTranscript for ZeromorphCommitment

+where + Self: CurveGroup +{ + fn append_to_transcript(&self, label: &'static [u8], transcript: &mut ProofTranscript) { + transcript.append_point(b"poly_commitment_share", self); + } +} + #[derive(Clone, Debug)] pub struct ZeromorphProverKey { pub g1_powers: Vec, @@ -93,7 +106,8 @@ pub struct ZeromorphVerifierKey { pub tau_N_max_sub_2_N: P::G2Affine, } -#[derive(Clone, Serialize, Deserialize, Debug)] +#[derive(Clone, CanonicalSerialize, + CanonicalDeserialize, Debug)] pub struct ZeromorphProof { pub pi: P::G1Affine, pub q_hat_com: P::G1Affine, @@ -132,7 +146,7 @@ where quotient_iter .zip_eq(&*g_lo) .zip_eq(&*g_hi) - .for_each(|((q, g_lo), g_hi)| { + .for_each(|((mut q, g_lo), g_hi)| { *q = *g_hi - *g_lo; }); @@ -250,7 +264,6 @@ where (-vs[0] * z_challenge, q_scalars) } -/* #[derive(Clone)] pub struct Zeromorph { _phantom: PhantomData

, @@ -259,19 +272,20 @@ pub struct Zeromorph { impl CommitmentScheme for Zeromorph

where

::ScalarField: poly::field::JoltField, + ZeromorphCommitment

: CurveGroup { type Field = P::ScalarField; type Setup = ZeromorphSRS

; - type Commitment = P::G1Affine; + type Commitment = ZeromorphCommitment

; type Proof = ZeromorphProof

; type BatchedProof = ZeromorphProof

; fn setup(shapes: &[CommitShape]) -> Self::Setup { - + todo!() } fn commit(poly: &DensePolynomial, setup: &Self::Setup) -> Self::Commitment { - + todo!() } fn batch_commit( @@ -279,18 +293,21 @@ where gens: &Self::Setup, batch_type: BatchType, ) -> Vec { - + todo!() } fn commit_slice(evals: &[Self::Field], setup: &Self::Setup) -> Self::Commitment { - + todo!() } fn prove( poly: &DensePolynomial, opening_point: &[Self::Field], // point at which the polynomial is evaluated transcript: &mut ProofTranscript, - ) -> Self::Proof; + ) -> Self::Proof { + todo!() + } + fn batch_prove( polynomials: &[&DensePolynomial], opening_point: &[Self::Field], @@ -298,7 +315,7 @@ where batch_type: BatchType, transcript: &mut ProofTranscript, ) -> Self::BatchedProof { - + todo!() } fn verify( @@ -309,7 +326,7 @@ where opening: &Self::Field, // evaluation \widetilde{Z}(r) commitment: &Self::Commitment, ) -> Result<(), ProofVerifyError> { - + todo!() } fn batch_verify( @@ -320,32 +337,13 @@ where commitments: &[&Self::Commitment], transcript: &mut ProofTranscript, ) -> Result<(), ProofVerifyError> { - + todo!() } fn protocol_name() -> &'static [u8] { b"zeromorph" } - - fn batch_commit_polys( - polys: &[DensePolynomial], - setup: &Self::Setup, - batch_type: super::commitment_scheme::BatchType, - ) -> Vec { - let slices: Vec<&[Self::Field]> = polys.iter().map(|poly| poly.evals_ref()).collect(); - Self::batch_commit(&slices, setup, batch_type) - } - - fn batch_commit_polys_ref( - polys: &[&DensePolynomial], - setup: &Self::Setup, - batch_type: super::commitment_scheme::BatchType, - ) -> Vec { - let slices: Vec<&[Self::Field]> = polys.iter().map(|poly| poly.evals_ref()).collect(); - Self::batch_commit(&slices, setup, batch_type) - } } -*/ #[cfg(test)] mod test { From a4033dbc3688d78e29021460f88c23571189eacf Mon Sep 17 00:00:00 2001 From: PatStiles Date: Sun, 12 May 2024 23:16:13 -0300 Subject: [PATCH 04/17] IT VERIFIES! --- jolt-core/src/poly/commitment/kzg.rs | 10 +- jolt-core/src/poly/commitment/zeromorph.rs | 530 +++++++++++++++++---- jolt-core/src/poly/dense_mlpoly.rs | 7 + jolt-core/src/poly/unipoly.rs | 73 ++- 4 files changed, 515 insertions(+), 105 deletions(-) diff --git a/jolt-core/src/poly/commitment/kzg.rs b/jolt-core/src/poly/commitment/kzg.rs index a6faa4b3a..a8e8d5d67 100644 --- a/jolt-core/src/poly/commitment/kzg.rs +++ b/jolt-core/src/poly/commitment/kzg.rs @@ -33,6 +33,8 @@ pub struct KZGProverKey { pub g1_powers: Vec, } +#[derive(Clone, Debug)] +pub struct KZGCommitment(P::G1Affine); pub struct KZGVerifierKey { /// The generator of G1. pub g1: P::G1Affine, @@ -139,7 +141,7 @@ where pub fn commit( g1_powers: &Vec, poly: &UniPoly, - ) -> Result { + ) -> Result, KZGError> { if poly.degree() > g1_powers.len() { return Err(KZGError::CommitLengthError(poly.degree(), g1_powers.len())); } @@ -148,7 +150,7 @@ where &poly.as_vec(), ) .unwrap(); - Ok(com.into_affine()) + Ok(KZGCommitment(com.into_affine())) } pub fn open( @@ -174,7 +176,7 @@ where fn verify( vk: impl Borrow>, - commitment: &P::G1Affine, + commitment: &KZGCommitment

, point: &P::ScalarField, proof: &P::G1Affine, evaluation: &P::ScalarField, @@ -182,7 +184,7 @@ where let vk = vk.borrow(); let lhs = P::pairing( - commitment.into_group() - vk.g1.into_group() * evaluation, + commitment.0.into_group() - vk.g1.into_group() * evaluation, vk.g2, ); let rhs = P::pairing(proof, vk.tau_2.into_group() - (vk.g2 * point)); diff --git a/jolt-core/src/poly/commitment/zeromorph.rs b/jolt-core/src/poly/commitment/zeromorph.rs index f95112a83..920815d39 100644 --- a/jolt-core/src/poly/commitment/zeromorph.rs +++ b/jolt-core/src/poly/commitment/zeromorph.rs @@ -5,107 +5,237 @@ use std::{iter, marker::PhantomData}; use crate::poly::unipoly::UniPoly; use crate::poly::{self, dense_mlpoly::DensePolynomial}; -use crate::utils::errors::ProofVerifyError; use crate::utils::transcript::{AppendToTranscript, ProofTranscript}; +use crate::msm::VariableBaseMSM; use ark_bn254::Bn254; -use ark_ec::{pairing::Pairing, AffineRepr, CurveGroup}; -use ark_ff::{batch_inversion, Field}; +use ark_ec::scalar_mul::fixed_base::FixedBase; +use ark_ec::{pairing::Pairing, CurveGroup, AffineRepr}; +use ark_ff::{PrimeField, batch_inversion, Field}; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; -use ark_std::{iterable::Iterable, ops::Neg, One, Zero}; +use ark_std::{UniformRand, iterable::Iterable, One, Zero}; use itertools::Itertools; use lazy_static::lazy_static; -use merlin::Transcript; use rand_chacha::{ - rand_core::{RngCore, SeedableRng}, + rand_core::SeedableRng, ChaCha20Rng, }; -use serde::{Deserialize, Serialize}; -use std::sync::{Arc, Mutex}; +use rand_core::{CryptoRng, RngCore}; +use std::sync::Arc; use thiserror::Error; #[cfg(feature = "ark-msm")] use ark_ec::VariableBaseMSM; -#[cfg(feature = "multicore")] use rayon::prelude::*; -use super::commitment_scheme::{BatchType, CommitShape}; -use super::{ - commitment_scheme::CommitmentScheme, - kzg::{UnivariateKZG, UniversalKzgSrs}, -}; +//use super::commitment_scheme::{ BatchType, CommitShape, CommitmentScheme}; -const MAX_VARS: usize = 17; +#[derive(Clone, Debug)] +pub struct SRS { + pub g1_powers: Vec, + pub g2_powers: Vec +} + +impl SRS

{ + pub fn setup(mut rng: &mut R, max_degree: usize) -> Self { + let beta = P::ScalarField::rand(&mut rng); + let g1 = P::G1::rand(&mut rng); + let g2 = P::G2::rand(&mut rng); + + let beta_powers: Vec = (0..=max_degree).scan(beta, |acc, _| { + let val = *acc; + *acc *= beta; + Some(val) + }).collect(); + + let window_size = FixedBase::get_mul_window_size(max_degree); + let scalar_bits = P::ScalarField::MODULUS_BIT_SIZE as usize; + + //TODO: gate with rayon + let (g1_powers_projective, g2_powers_projective) = rayon::join( + || { + let g1_table = FixedBase::get_window_table(scalar_bits, window_size, g1); + FixedBase::msm(scalar_bits, window_size, &g1_table, &beta_powers) + }, + || { + let g2_table = FixedBase::get_window_table(scalar_bits, window_size, g2); + FixedBase::msm(scalar_bits, window_size, &g2_table, &beta_powers) + } + ); + + let (g1_powers, g2_powers) = rayon::join( + || { + P::G1::normalize_batch(&g1_powers_projective) + + }, + || { + P::G2::normalize_batch(&g2_powers_projective) + }); + + Self { g1_powers, g2_powers } + } + + pub fn trim(params: Arc, supported_size: usize) -> (KZGProverKey

, KZGVerifierKey

) { + assert!(params.g1_powers.len() > 0, "max_degree is 0"); + let g1 = params.g1_powers[0]; + let g2 = params.g2_powers[0]; + let beta_g2 = params.g2_powers[1]; + let pk = KZGProverKey::new(params, 0, supported_size + 1); + let vk = KZGVerifierKey {g1, g2, beta_g2}; + (pk, vk) + } -lazy_static! { - pub static ref ZEROMORPH_SRS: Arc>> = - Arc::new(Mutex::new(ZeromorphSRS::setup( - 1 << (MAX_VARS + 1), - &mut ChaCha20Rng::from_seed(*b"ZEROMORPH_POLY_COMMITMENT_SCHEME") - ))); } -#[derive(Debug, Clone, Default)] -pub struct ZeromorphSRS(UniversalKzgSrs

); +// Abstraction around SRS preventing copying. Arc of SRS +#[derive(Clone, Debug)] +pub struct KZGProverKey { + srs: Arc>, + // offset to read into SRS + offset: usize, + // max size of srs + supported_size: usize, +} -impl ZeromorphSRS

{ - pub fn setup(max_degree: usize, rng: &mut R) -> ZeromorphSRS

{ - ZeromorphSRS(UniversalKzgSrs::

::setup(max_degree, rng)) +impl KZGProverKey

{ + pub fn new( + srs: Arc>, + offset: usize, + supported_size: usize, + ) -> Self { + assert!( + srs.g1_powers.len() >= offset + supported_size, + "not enough powers (req: {} from offset {}) in the SRS (length: {})", + supported_size, + offset, + srs.g1_powers.len() + ); + Self { + srs, + offset, + supported_size, } + } - pub fn trim( - &self, - max_degree: usize, - ) -> Result<(ZeromorphProverKey

, ZeromorphVerifierKey

), ZeromorphError> { - let offset = self.0.g1_powers.len() - max_degree; - if self.0.g1_powers.len() >= max_degree + offset { - return Err(ZeromorphError::KeyLengthError( - max_degree, - self.0.g1_powers.len(), - )); - } - let offset = self.0.g1_powers.len() - max_degree; - let offset_g1_powers = self.0.g1_powers[offset..(offset + max_degree)].to_vec(); - Ok(( - ZeromorphProverKey { - g1_powers: self.0.g1_powers.clone(), - offset_g1_powers, - }, - ZeromorphVerifierKey { - g1: self.0.g1_powers[0], - g2: self.0.g2_powers[0], - tau_2: self.0.g2_powers[1], - tau_N_max_sub_2_N: self.0.g2_powers[offset], - }, - )) - } + pub fn g1_powers(&self) -> &[P::G1Affine] { + &self.srs.g1_powers[self.offset..self.offset + self.supported_size] + } } -pub struct ZeromorphCommitment(P::G1); +// Abstraction around SRS preventing copying. Arc of SRS +#[derive(Clone, Copy, Debug)] +pub struct KZGVerifierKey { + pub g1: P::G1Affine, + pub g2: P::G2Affine, + pub beta_g2: P::G2Affine +} -impl AppendToTranscript for ZeromorphCommitment

+#[derive(Debug, Clone, Eq, PartialEq, Default)] +pub struct UVKZGPCS { + _phantom: PhantomData

, +} + +impl UVKZGPCS

where - Self: CurveGroup +

::ScalarField: poly::field::JoltField, { - fn append_to_transcript(&self, label: &'static [u8], transcript: &mut ProofTranscript) { - transcript.append_point(b"poly_commitment_share", self); + fn commit_offset(pk: &KZGProverKey

, poly: &UniPoly, offset: usize) -> Result { + if poly.degree() > pk.g1_powers().len() { + return Err(ZeromorphError::KeyLengthError(poly.degree(), pk.g1_powers().len())) + } + + let scalars = poly.as_vec(); + let bases = pk.g1_powers(); + let c = ::msm(&bases[offset..scalars.len()], &poly.as_vec()[offset..]).unwrap(); + + Ok(c.into_affine()) + } + + pub fn commit(pk: &KZGProverKey

, poly: &UniPoly) -> Result { + if poly.degree() > pk.g1_powers().len() { + return Err(ZeromorphError::KeyLengthError(poly.degree(), pk.g1_powers().len())) + } + let c = ::msm( &pk.g1_powers()[..poly.as_vec().len()], &poly.as_vec().as_slice()).unwrap(); + Ok(c.into_affine()) + } + + fn open( + pk: &KZGProverKey

, + poly: &UniPoly, + point: &P::ScalarField + ) -> Result<(P::G1Affine, P::ScalarField), ZeromorphError> + where +

::ScalarField: poly::field::JoltField + { + let divisor = UniPoly::from_coeff(vec![-*point, P::ScalarField::one()]); + let (witness_poly, _) = poly.divide_with_q_and_r(&divisor).unwrap(); + let proof = ::msm(&pk.g1_powers()[..witness_poly.as_vec().len()], &witness_poly.as_vec().as_slice()).unwrap(); + let evaluation = poly.evaluate(point); + Ok((proof.into_affine(), evaluation)) + } + +} + +const MAX_VARS: usize = 17; + +lazy_static! { + pub static ref ZEROMORPH_SRS: ZeromorphSRS = + ZeromorphSRS(Arc::new(SRS::setup( + &mut ChaCha20Rng::from_seed(*b"ZEROMORPH_POLY_COMMITMENT_SCHEME"), + 1 << (MAX_VARS + 1) + ))); +} + +pub struct ZeromorphSRS(Arc>); + +impl ZeromorphSRS

{ + pub fn setup(mut rng: &mut R, max_degree: usize) -> Self { + Self(Arc::new(SRS::setup(rng, max_degree))) + } + + pub fn trim(self, max_degree: usize) -> (ZeromorphProverKey

, ZeromorphVerifierKey

) { + //TODO: remove into() + let (commit_pp, kzg_vk) = SRS::trim(self.0.clone(), max_degree); + let offset = self.0.g1_powers.len() - max_degree; + let tau_N_max_sub_2_N = self.0.g2_powers[offset]; + let open_pp = KZGProverKey::new(self.0, offset, max_degree); + ( + ZeromorphProverKey {commit_pp, open_pp}, + ZeromorphVerifierKey {kzg_vk, tau_N_max_sub_2_N} + ) } } +//TODO: adapt interface to have prover and verifier key #[derive(Clone, Debug)] pub struct ZeromorphProverKey { - pub g1_powers: Vec, - pub offset_g1_powers: Vec, + pub commit_pp: KZGProverKey

, + pub open_pp: KZGProverKey

, } #[derive(Copy, Clone, Debug)] pub struct ZeromorphVerifierKey { - pub g1: P::G1Affine, - pub g2: P::G2Affine, - pub tau_2: P::G2Affine, + pub kzg_vk: KZGVerifierKey

, pub tau_N_max_sub_2_N: P::G2Affine, } +#[derive(Error, Debug)] +pub enum ZeromorphError { + #[error("Length Error: SRS Length: {0}, Key Length: {0}")] + KeyLengthError(usize, usize), +} + +pub struct ZeromorphCommitment(P::G1Affine); + +impl AppendToTranscript for ZeromorphCommitment

+where + Self: CurveGroup +{ + fn append_to_transcript(&self, label: &'static [u8], transcript: &mut ProofTranscript) { + transcript.append_point(b"poly_commitment_share", self); + } +} + #[derive(Clone, CanonicalSerialize, CanonicalDeserialize, Debug)] pub struct ZeromorphProof { @@ -114,28 +244,23 @@ pub struct ZeromorphProof { pub q_k_com: Vec, } -#[derive(Error, Debug)] -pub enum ZeromorphError { - #[error("Length Error: SRS Length: {0}, Key Length: {0}")] - KeyLengthError(usize, usize), -} - fn compute_multilinear_quotients( poly: &DensePolynomial, - u_challenge: &[P::ScalarField], + point: &[P::ScalarField], ) -> (Vec>, P::ScalarField) where

::ScalarField: poly::field::JoltField, { - assert_eq!(poly.get_num_vars(), u_challenge.len()); + let num_var = poly.get_num_vars(); + assert_eq!(num_var, point.len()); - let mut g = poly.Z.to_vec(); - let mut quotients: Vec<_> = u_challenge + let mut remainder = poly.Z.to_vec(); + let mut quotients: Vec<_> = point .iter() .enumerate() .map(|(i, x_i)| { - let (g_lo, g_hi) = g.split_at_mut(1 << (poly.get_num_vars() - 1 - i)); - let mut quotient = vec![P::ScalarField::zero(); g_lo.len()]; + let (remainder_lo, remainder_hi) = remainder.split_at_mut(1 << (num_var - 1 - i)); + let mut quotient = vec![P::ScalarField::zero(); remainder_lo.len()]; #[cfg(feature = "multicore")] let quotient_iter = quotient.par_iter_mut(); @@ -144,50 +269,53 @@ where let quotient_iter = quotient.iter_mut(); quotient_iter - .zip_eq(&*g_lo) - .zip_eq(&*g_hi) - .for_each(|((mut q, g_lo), g_hi)| { - *q = *g_hi - *g_lo; + .zip_eq(&*remainder_lo) + .zip_eq(&*remainder_hi) + .for_each(|((mut q, r_lo), r_hi)| { + *q = *r_hi - *r_lo; }); #[cfg(feature = "multicore")] - let g_lo_iter = g_lo.par_iter_mut(); + let remainder_lo_iter = remainder_lo.par_iter_mut(); #[cfg(not(feature = "multicore"))] - let g_lo_iter = g_lo.iter_mut(); - g_lo_iter.zip_eq(g_hi).for_each(|(g_lo, g_hi)| { - *g_lo += (*g_hi - g_lo as &_) * x_i; + let remainder_lo_iter = remainder_lo.iter_mut(); + remainder_lo_iter.zip_eq(remainder_hi).for_each(|(r_lo, r_hi)| { + *r_lo += (*r_hi - r_lo as &_) * x_i; }); - g.truncate(1 << (poly.get_num_vars() - 1 - i)); + remainder.truncate(1 << (num_var - 1 - i)); UniPoly::from_coeff(quotient) }) .collect(); quotients.reverse(); - (quotients, g[0]) + (quotients, remainder[0]) } +// Compute the batched, lifted-degree quotient `\hat{q}` fn compute_batched_lifted_degree_quotient( - n: usize, - quotients: &Vec>, + quotients: &[UniPoly], y_challenge: &P::ScalarField, -) -> UniPoly +) -> (UniPoly, usize) where

::ScalarField: poly::field::JoltField, { + let num_vars = quotients.len(); + // Compute \hat{q} = \sum_k y^k * X^{N - d_k - 1} * q_k let mut scalar = P::ScalarField::one(); // y^k - // Rather than explicitly computing the shifts of q_k by N - d_k - 1 (i.e. multiplying q_k by X^{N - d_k - 1}) - // then accumulating them, we simply accumulate y^k*q_k into \hat{q} at the index offset N - d_k - 1 + + // Rather than explicitly computing the shifts of q_k by N - d_k - 1 (i.e. multiplying q_k by X^{N - d_k - 1}) + // then accumulating them, we simply accumulate y^k*q_k into \hat{q} at the index offset N - d_k - 1 let q_hat = quotients.iter().enumerate().fold( - vec![P::ScalarField::zero(); n], + vec![P::ScalarField::zero(); 1 << num_vars], |mut q_hat, (idx, q)| { #[cfg(feature = "multicore")] - let q_hat_iter = q_hat[n - (1 << idx)..].par_iter_mut(); + let q_hat_iter = q_hat[(1 << num_vars) - (1 << idx)..].par_iter_mut(); #[cfg(not(feature = "multicore"))] - let q_hat_iter = q_hat[n - (1 << idx)..].iter_mut(); + let q_hat_iter = q_hat[(1 << num_vars) - (1 << idx)..].iter_mut(); q_hat_iter.zip(&q.as_vec()).for_each(|(q_hat, q)| { *q_hat += scalar * q; }); @@ -196,7 +324,7 @@ where }, ); - UniPoly::from_coeff(q_hat) + (UniPoly::from_coeff(q_hat), 1 << (num_vars - 1)) } fn eval_and_quotient_scalars( @@ -269,22 +397,145 @@ pub struct Zeromorph { _phantom: PhantomData

, } +impl Zeromorph

+where +

::ScalarField: poly::field::JoltField, +{ + pub fn protocol_name() -> &'static [u8] { + b"Zeromorph" + } + + //IDEAS; + // - extra sampling from transcript??? -> no adding randomness has to due with information leakage not verification + + pub fn commit(pp: &ZeromorphProverKey

, poly: &DensePolynomial) -> Result { + if pp.commit_pp.g1_powers().len() < poly.Z.len() { + return Err(ZeromorphError::KeyLengthError(pp.commit_pp.g1_powers().len(), poly.Z.len())) + } + UVKZGPCS::commit(&pp.commit_pp, &UniPoly::from_coeff(poly.Z.clone())) + } + + pub fn open(pp: &ZeromorphProverKey

, comm: &P::G1Affine, poly: &DensePolynomial, point: &[P::ScalarField], eval: &P::ScalarField, transcript: &mut ProofTranscript) -> Result, ZeromorphError> { + transcript.append_protocol_name(Self::protocol_name()); + + if pp.commit_pp.g1_powers().len() < poly.Z.len() { + return Err(ZeromorphError::KeyLengthError(pp.commit_pp.g1_powers().len(), poly.Z.len())) + } + + assert_eq!(Self::commit(pp, poly).unwrap(), *comm); + assert_eq!(poly.evaluate(point), *eval); + + let (quotients, remainder): (Vec>, P::ScalarField) = compute_multilinear_quotients::

(poly, point); + assert_eq!(quotients.len(), poly.get_num_vars()); + assert_eq!(remainder, *eval); + + // Compute the multilinear quotients q_k = q_k(X_0, ..., X_{k-1}) + let q_k_com: Vec = quotients.par_iter().map(|q| UVKZGPCS::commit(&pp.commit_pp, q).unwrap()).collect(); + let q_comms: Vec = q_k_com.clone().into_iter().map(|c| c.into_group()).collect(); + //transcript.append_points(b"q_comms", &q_comms); + q_comms.iter().for_each(|c| transcript.append_point(b"quo", c)); + + // Sample challenge y + let y_challenge: P::ScalarField = transcript.challenge_scalar(b"y"); + + // Compute the batched, lifted-degree quotient `\hat{q}` + // qq_hat = โˆ‘_{i=0}^{num_vars-1} y^i * X^(2^num_vars - d_k - 1) * q_i(x) + let (q_hat, offset) = compute_batched_lifted_degree_quotient::

("ients, &y_challenge); + + // Compute and absorb the commitment C_q = [\hat{q}] + let q_hat_com = UVKZGPCS::commit_offset(&pp.commit_pp, &q_hat, offset)?; + transcript.append_point(b"q_hat", &q_hat_com.into_group()); + + // Get x and z challenges + let x_challenge = transcript.challenge_scalar(b"x"); + let z_challenge = transcript.challenge_scalar(b"z"); + + // Compute batched degree and ZM-identity quotient polynomial pi + let (eval_scalar, (degree_check_q_scalars, zmpoly_q_scalars)): (P::ScalarField, (Vec, Vec)) = eval_and_quotient_scalars::

(y_challenge, x_challenge, z_challenge, point); + // f = z * poly.Z + q_hat + (-z * ฮฆ_n(x) * e) + โˆ‘_k (q_scalars_k * q_k) + let mut f = UniPoly::from_coeff(poly.Z.clone()); + f *= &z_challenge; + f += &q_hat; + f[0] += eval_scalar * eval; + quotients + .into_iter() + .zip(degree_check_q_scalars) + .zip(zmpoly_q_scalars) + .for_each(|((mut q, degree_check_scalar), zm_poly_scalar)| { + q *= &(degree_check_scalar + zm_poly_scalar); + f += &q; + }); + debug_assert_eq!(f.evaluate(&x_challenge), P::ScalarField::zero()); + + // Compute and send proof commitment pi + let (pi, _) = UVKZGPCS::open(&pp.open_pp, &f, &x_challenge)?; + + Ok(ZeromorphProof { pi, q_hat_com, q_k_com }) + } + + pub fn verify(vk: &ZeromorphVerifierKey

, comm: &P::G1Affine, point: &[P::ScalarField], eval: &P::ScalarField, proof: &ZeromorphProof

, transcript: &mut ProofTranscript) -> Result { + transcript.append_protocol_name(Self::protocol_name()); + + // Receive commitments [q_k] + //TODO: remove clone + let q_comms: Vec = proof.q_k_com.clone().into_iter().map(|c| c.into_group()).collect(); + q_comms.iter().for_each(|c| transcript.append_point(b"quo", c)); + + // Challenge y + let y_challenge: P::ScalarField = transcript.challenge_scalar(b"y"); + + // Receive commitment C_q_hat + transcript.append_point(b"q_hat", &proof.q_hat_com.into_group()); + + // Get x and z challenges + let x_challenge = transcript.challenge_scalar(b"x"); + let z_challenge = transcript.challenge_scalar(b"z"); + + // Compute batched degree and ZM-identity quotient polynomial pi + let (eval_scalar, (mut q_scalars, zmpoly_q_scalars)): (P::ScalarField, (Vec, Vec)) = eval_and_quotient_scalars::

(y_challenge, x_challenge, z_challenge, point); + q_scalars + .iter_mut() + .zip_eq(zmpoly_q_scalars) + .for_each(|(scalar, zm_poly_q_scalar)| { + *scalar += zm_poly_q_scalar; + }); + let scalars = [vec![P::ScalarField::one(), z_challenge, eval_scalar * eval], q_scalars].concat(); + let bases = [ + vec![proof.q_hat_com, *comm, vk.kzg_vk.g1], + //TODO: eliminate + proof.q_k_com.clone() + ].concat(); + let c = ::msm(&bases, &scalars).unwrap().into_affine(); + + let pairing = P::multi_pairing( + &[c, proof.pi], + &[(-vk.tau_N_max_sub_2_N.into_group()).into_affine(), (vk.kzg_vk.beta_g2.into_group() - (vk.kzg_vk.g2 * x_challenge)).into()] + ); + Ok(pairing.0.is_one().into()) + + } +} + +/* impl CommitmentScheme for Zeromorph

where

::ScalarField: poly::field::JoltField, ZeromorphCommitment

: CurveGroup { type Field = P::ScalarField; - type Setup = ZeromorphSRS

; + type Setup = Vec<(ZeromorphProverKey

, ZeromorphVerifierKey

)>; type Commitment = ZeromorphCommitment

; type Proof = ZeromorphProof

; type BatchedProof = ZeromorphProof

; fn setup(shapes: &[CommitShape]) -> Self::Setup { + //TODO: Does using lazy_static! lead to large problems todo!() } fn commit(poly: &DensePolynomial, setup: &Self::Setup) -> Self::Commitment { + // TODO: assert lengths are valid + //ZeromorphCommitment(UnivariateKZG::

::commit(setup, &UniPoly::from_coeff(poly.Z.clone())).unwrap()) todo!() } @@ -293,7 +544,14 @@ where gens: &Self::Setup, batch_type: BatchType, ) -> Vec { - todo!() + // TODO: assert lengths are valid + #[cfg(feature = "multicore")] + let iter = evals.par_iter(); + #[cfg(not(feature = "multicore"))] + let iter = evals.iter(); + iter + .map(|poly| ZeromorphCommitment(UnivariateKZG::

::commit(gens, &UniPoly::from_coeff(poly.Z.clone())).unwrap())) + .collect::>() } fn commit_slice(evals: &[Self::Field], setup: &Self::Setup) -> Self::Commitment { @@ -344,15 +602,17 @@ where b"zeromorph" } } +*/ #[cfg(test)] mod test { - use super::*; use crate::utils::math::Math; use ark_bn254::{Bn254, Fr}; use ark_ff::{BigInt, Zero}; - use ark_std::{test_rng, UniformRand}; + use ark_std::{rand::Rng, test_rng, UniformRand}; + use ark_ec::AffineRepr; + use rand_core::SeedableRng; // Evaluate Phi_k(x) = \sum_{i=0}^k x^i using the direct inefficent formula fn phi(challenge: &P::ScalarField, subscript: usize) -> P::ScalarField { @@ -547,8 +807,8 @@ mod test { let y_challenge = Fr::rand(&mut rng); //Compute batched quptient ฬ‚q - let batched_quotient = - compute_batched_lifted_degree_quotient::(n, "ients, &y_challenge); + let (batched_quotient, _) = + compute_batched_lifted_degree_quotient::("ients, &y_challenge); //Explicitly define q_k_lifted = X^{N-2^k} * q_k and compute the expected batched result let q_0_lifted = UniPoly::from_coeff(vec![ @@ -706,4 +966,76 @@ mod test { assert_eq!(z_x_scalars[k], scalar); } } + + fn kzg_verify( + vk: &KZGVerifierKey

, + commitment: &P::G1Affine, + point: &P::ScalarField, + proof: &P::G1Affine, + evaluation: &P::ScalarField, + ) -> Result { + + let lhs = P::pairing( + commitment.into_group() - vk.g1.into_group() * evaluation, + vk.g2, + ); + let rhs = P::pairing(proof, vk.beta_g2.into_group() - (vk.g2 * point)); + Ok(lhs == rhs) + } + + fn random(degree: usize, mut rng: &mut R) -> UniPoly + where +

::ScalarField: poly::field::JoltField, + { + let coeffs = (0..=degree) + .map(|_| P::ScalarField::rand(&mut rng)) + .collect::>(); + UniPoly::from_coeff(coeffs) + } + + #[test] + fn kzg_commit_prove_verify() -> Result<(), ZeromorphError> { + let seed = b"11111111111111111111111111111111"; + for _ in 0..100 { + let mut rng = &mut ChaCha20Rng::from_seed(*seed); + let degree = rng.gen_range(2..20); + + let pp = Arc::new(SRS::::setup(&mut rng, degree)); + let (ck, vk) = SRS::trim(pp, degree); + let p = random::(degree, rng); + let comm = UVKZGPCS::::commit(&ck, &p)?; + let point = Fr::rand(rng); + let (proof, value) = UVKZGPCS::::open(&ck, &p, &point)?; + assert!( + kzg_verify(&vk, &comm, &point, &proof, &value)?, + "proof was incorrect for max_degree = {}, polynomial_degree = {}", + degree, + p.degree(), + ); + } + Ok(()) + } + + #[test] + fn zeromorph_commit_prove_verify() + { + for num_vars in [4, 5, 6] { + let mut rng = rand_chacha::ChaCha20Rng::seed_from_u64(num_vars as u64); + + let poly = DensePolynomial::random(num_vars, &mut rng); + let point: Vec<::ScalarField> = (0..num_vars).map(|_| ::ScalarField::rand(&mut rng)).collect(); + let eval = poly.evaluate(&point); + + let srs = ZeromorphSRS::::setup(&mut rng, 1 << num_vars); + let (pk, vk) = srs.trim(1 << num_vars); + let commitment = Zeromorph::::commit(&pk, &poly).unwrap(); + + let mut prover_transcript = ProofTranscript::new(b"TestEval"); + let proof = Zeromorph::::open(&pk, &commitment, &poly, &point, &eval, &mut prover_transcript).unwrap(); + + // Verify proof. + let mut verifier_transcript = ProofTranscript::new(b"TestEval"); + assert!(Zeromorph::::verify(&vk, &commitment, &point, &eval, &proof, &mut verifier_transcript).unwrap()) + } + } } diff --git a/jolt-core/src/poly/dense_mlpoly.rs b/jolt-core/src/poly/dense_mlpoly.rs index ddae1349e..723dc1723 100644 --- a/jolt-core/src/poly/dense_mlpoly.rs +++ b/jolt-core/src/poly/dense_mlpoly.rs @@ -6,6 +6,7 @@ use crate::utils::{self, compute_dotproduct, compute_dotproduct_low_optimized}; use crate::poly::field::JoltField; use crate::utils::math::Math; use core::ops::Index; +use rand_core::{CryptoRng, RngCore}; use rayon::prelude::*; use std::ops::AddAssign; @@ -272,6 +273,12 @@ impl DensePolynomial { .collect::>(), ) } + + pub fn random(num_vars: usize, mut rng: &mut R) -> Self { + Self::new(std::iter::from_fn(|| Some(F::random(&mut rng))).take(1 << num_vars).collect()) + + + } } impl Clone for DensePolynomial { diff --git a/jolt-core/src/poly/unipoly.rs b/jolt-core/src/poly/unipoly.rs index acd27871b..14d15098d 100644 --- a/jolt-core/src/poly/unipoly.rs +++ b/jolt-core/src/poly/unipoly.rs @@ -1,12 +1,13 @@ #![allow(dead_code)] use std::cmp::Ordering; -use std::ops::{AddAssign, Mul}; +use std::ops::{AddAssign, Index, IndexMut, Mul, MulAssign}; use crate::jolt::vm::Jolt; use crate::poly::field::JoltField; use crate::utils::gaussian_elimination::gaussian_elimination; use crate::utils::transcript::{AppendToTranscript, ProofTranscript}; use ark_serialize::*; +use rand_core::{CryptoRng, RngCore}; use rayon::iter::{IntoParallelIterator, IntoParallelRefMutIterator, ParallelIterator}; // ax^2 + bx + c stored as vec![c,b,a] @@ -131,6 +132,21 @@ impl UniPoly { coeffs_except_linear_term, } } + + pub fn divide_minus_u(&self, u: F) -> Self { + let d = self.coeffs.len(); + + // Compute h(x) = f(x)/(x - u) + let mut h = vec![F::zero(); d]; + for i in (1..d).rev() { + h[i - 1] = self.coeffs[i] + h[i] * u; + } + Self::from_coeff(h) + } + + pub fn random(num_vars: usize, mut rng: &mut R) -> Self { + Self::from_coeff(std::iter::from_fn(|| Some(F::random(&mut rng))).take(num_vars).collect()) + } } impl AddAssign<&F> for UniPoly { @@ -181,6 +197,26 @@ impl Mul<&F> for UniPoly { } } +impl Index for UniPoly { + type Output = F; + + fn index(&self, index: usize) -> &Self::Output { + &self.coeffs[index] + } +} + +impl IndexMut for UniPoly { + fn index_mut(&mut self, index: usize) -> &mut Self::Output { + &mut self.coeffs[index] + } + } + +impl MulAssign<&F> for UniPoly { + fn mul_assign(&mut self, rhs: &F) { + self.coeffs.par_iter_mut().for_each(|c| *c *= rhs); + } +} + impl CompressedUniPoly { // we require eval(0) + eval(1) = hint, so we can solve for the linear term as: // linear_term = hint - 2 * constant_term - deg2 term - deg3 term @@ -210,9 +246,10 @@ impl AppendToTranscript for UniPoly { #[cfg(test)] mod tests { - use super::*; use ark_bn254::Fr; + use rand_chacha::ChaCha20Rng; + use rand_core::SeedableRng; #[test] fn test_from_evals_quad() { @@ -276,4 +313,36 @@ mod tests { let e4 = F::from_u64(109u64).unwrap(); assert_eq!(poly.evaluate(&F::from_u64(4u64).unwrap()), e4); } + + pub fn naive_mul(ours: &UniPoly, other: &UniPoly) -> UniPoly { + if ours.is_zero() || other.is_zero() { + UniPoly::zero() + } else { + let mut result = vec![F::zero(); ours.degree() + other.degree() + 1]; + for (i, self_coeff) in ours.coeffs.iter().enumerate() { + for (j, other_coeff) in other.coeffs.iter().enumerate() { + result[i + j] += &(*self_coeff * other_coeff); + } + } + UniPoly::from_coeff(result) + } + } + + #[test] + fn test_divide_poly() { + let rng = &mut ChaCha20Rng::from_seed([0u8; 32]); + + for a_degree in 0..50 { + for b_degree in 0..50 { + let dividend = UniPoly::::random(a_degree, rng); + let divisor = UniPoly::::random(b_degree, rng); + + if let Some((quotient, remainder)) = UniPoly::divide_with_q_and_r(÷nd, &divisor) { + let mut prod = naive_mul(&divisor, "ient); + prod += &remainder; + assert_eq!(dividend, prod) + } + } + } + } } From 74c6ac7858d2a0ad669156cc53301f0bc8dbfcd7 Mon Sep 17 00:00:00 2001 From: PatStiles Date: Thu, 16 May 2024 10:10:45 -0300 Subject: [PATCH 05/17] batched commits work --- jolt-core/src/poly/commitment/kzg.rs | 4 +- jolt-core/src/poly/commitment/mod.rs | 1 - jolt-core/src/poly/commitment/zeromorph.rs | 1035 +++++++++++++------- jolt-core/src/poly/dense_mlpoly.rs | 23 +- jolt-core/src/poly/unipoly.rs | 55 +- jolt-core/src/utils/errors.rs | 2 + 6 files changed, 708 insertions(+), 412 deletions(-) diff --git a/jolt-core/src/poly/commitment/kzg.rs b/jolt-core/src/poly/commitment/kzg.rs index a8e8d5d67..6045d10bb 100644 --- a/jolt-core/src/poly/commitment/kzg.rs +++ b/jolt-core/src/poly/commitment/kzg.rs @@ -174,7 +174,7 @@ where Ok((proof.into_affine(), evaluation)) } - fn verify( + fn _verify( vk: impl Borrow>, commitment: &KZGCommitment

, point: &P::ScalarField, @@ -228,7 +228,7 @@ mod tests { let point = Fr::rand(rng); let (proof, value) = UnivariateKZG::::open(&ck, &p, &point)?; assert!( - UnivariateKZG::::verify(&vk, &comm, &point, &proof, &value)?, + UnivariateKZG::::_verify(&vk, &comm, &point, &proof, &value)?, "proof was incorrect for max_degree = {}, polynomial_degree = {}", degree, p.degree(), diff --git a/jolt-core/src/poly/commitment/mod.rs b/jolt-core/src/poly/commitment/mod.rs index 61a62504e..eed9b953e 100644 --- a/jolt-core/src/poly/commitment/mod.rs +++ b/jolt-core/src/poly/commitment/mod.rs @@ -1,5 +1,4 @@ pub mod commitment_scheme; pub mod hyrax; -pub mod kzg; pub mod pedersen; pub mod zeromorph; diff --git a/jolt-core/src/poly/commitment/zeromorph.rs b/jolt-core/src/poly/commitment/zeromorph.rs index 920815d39..c0fca7f35 100644 --- a/jolt-core/src/poly/commitment/zeromorph.rs +++ b/jolt-core/src/poly/commitment/zeromorph.rs @@ -3,207 +3,228 @@ use std::{iter, marker::PhantomData}; +use crate::msm::VariableBaseMSM; use crate::poly::unipoly::UniPoly; use crate::poly::{self, dense_mlpoly::DensePolynomial}; +use crate::utils::errors::ProofVerifyError; use crate::utils::transcript::{AppendToTranscript, ProofTranscript}; -use crate::msm::VariableBaseMSM; use ark_bn254::Bn254; use ark_ec::scalar_mul::fixed_base::FixedBase; -use ark_ec::{pairing::Pairing, CurveGroup, AffineRepr}; -use ark_ff::{PrimeField, batch_inversion, Field}; +use ark_ec::{pairing::Pairing, AffineRepr, CurveGroup}; +use ark_ff::{batch_inversion, Field, PrimeField}; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; -use ark_std::{UniformRand, iterable::Iterable, One, Zero}; +use ark_std::{One, UniformRand, Zero}; use itertools::Itertools; use lazy_static::lazy_static; -use rand_chacha::{ - rand_core::SeedableRng, - ChaCha20Rng, -}; +use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use rand_core::{CryptoRng, RngCore}; use std::sync::Arc; -use thiserror::Error; #[cfg(feature = "ark-msm")] use ark_ec::VariableBaseMSM; use rayon::prelude::*; +use super::commitment_scheme::{BatchType, CommitShape, CommitmentScheme}; + //use super::commitment_scheme::{ BatchType, CommitShape, CommitmentScheme}; #[derive(Clone, Debug)] pub struct SRS { - pub g1_powers: Vec, - pub g2_powers: Vec + pub g1_powers: Vec, + pub g2_powers: Vec, } impl SRS

{ - pub fn setup(mut rng: &mut R, max_degree: usize) -> Self { - let beta = P::ScalarField::rand(&mut rng); - let g1 = P::G1::rand(&mut rng); - let g2 = P::G2::rand(&mut rng); - - let beta_powers: Vec = (0..=max_degree).scan(beta, |acc, _| { - let val = *acc; - *acc *= beta; - Some(val) - }).collect(); - - let window_size = FixedBase::get_mul_window_size(max_degree); - let scalar_bits = P::ScalarField::MODULUS_BIT_SIZE as usize; - - //TODO: gate with rayon - let (g1_powers_projective, g2_powers_projective) = rayon::join( - || { - let g1_table = FixedBase::get_window_table(scalar_bits, window_size, g1); - FixedBase::msm(scalar_bits, window_size, &g1_table, &beta_powers) - }, - || { - let g2_table = FixedBase::get_window_table(scalar_bits, window_size, g2); - FixedBase::msm(scalar_bits, window_size, &g2_table, &beta_powers) - } - ); + pub fn setup(mut rng: &mut R, max_degree: usize) -> Self { + let beta = P::ScalarField::rand(&mut rng); + let g1 = P::G1::rand(&mut rng); + let g2 = P::G2::rand(&mut rng); + + let beta_powers: Vec = (0..=max_degree) + .scan(beta, |acc, _| { + let val = *acc; + *acc *= beta; + Some(val) + }) + .collect(); - let (g1_powers, g2_powers) = rayon::join( - || { - P::G1::normalize_batch(&g1_powers_projective) + let window_size = FixedBase::get_mul_window_size(max_degree); + let scalar_bits = P::ScalarField::MODULUS_BIT_SIZE as usize; - }, - || { - P::G2::normalize_batch(&g2_powers_projective) - }); + //TODO: gate with rayon + let (g1_powers_projective, g2_powers_projective) = rayon::join( + || { + let g1_table = FixedBase::get_window_table(scalar_bits, window_size, g1); + FixedBase::msm(scalar_bits, window_size, &g1_table, &beta_powers) + }, + || { + let g2_table = FixedBase::get_window_table(scalar_bits, window_size, g2); + FixedBase::msm(scalar_bits, window_size, &g2_table, &beta_powers) + }, + ); - Self { g1_powers, g2_powers } - } + let (g1_powers, g2_powers) = rayon::join( + || P::G1::normalize_batch(&g1_powers_projective), + || P::G2::normalize_batch(&g2_powers_projective), + ); - pub fn trim(params: Arc, supported_size: usize) -> (KZGProverKey

, KZGVerifierKey

) { - assert!(params.g1_powers.len() > 0, "max_degree is 0"); - let g1 = params.g1_powers[0]; - let g2 = params.g2_powers[0]; - let beta_g2 = params.g2_powers[1]; - let pk = KZGProverKey::new(params, 0, supported_size + 1); - let vk = KZGVerifierKey {g1, g2, beta_g2}; - (pk, vk) - } + Self { + g1_powers, + g2_powers, + } + } + pub fn trim(params: Arc, supported_size: usize) -> (KZGProverKey

, KZGVerifierKey

) { + assert!(params.g1_powers.len() > 0, "max_degree is 0"); + let g1 = params.g1_powers[0]; + let g2 = params.g2_powers[0]; + let beta_g2 = params.g2_powers[1]; + let pk = KZGProverKey::new(params, 0, supported_size + 1); + let vk = KZGVerifierKey { g1, g2, beta_g2 }; + (pk, vk) + } } // Abstraction around SRS preventing copying. Arc of SRS #[derive(Clone, Debug)] pub struct KZGProverKey { - srs: Arc>, - // offset to read into SRS - offset: usize, - // max size of srs - supported_size: usize, -} - -impl KZGProverKey

{ - pub fn new( srs: Arc>, + // offset to read into SRS offset: usize, + // max size of srs supported_size: usize, - ) -> Self { - assert!( - srs.g1_powers.len() >= offset + supported_size, - "not enough powers (req: {} from offset {}) in the SRS (length: {})", - supported_size, - offset, - srs.g1_powers.len() - ); - Self { - srs, - offset, - supported_size, +} + +impl KZGProverKey

{ + pub fn new(srs: Arc>, offset: usize, supported_size: usize) -> Self { + assert!( + srs.g1_powers.len() >= offset + supported_size, + "not enough powers (req: {} from offset {}) in the SRS (length: {})", + supported_size, + offset, + srs.g1_powers.len() + ); + Self { + srs, + offset, + supported_size, + } } - } - pub fn g1_powers(&self) -> &[P::G1Affine] { - &self.srs.g1_powers[self.offset..self.offset + self.supported_size] - } + pub fn g1_powers(&self) -> &[P::G1Affine] { + &self.srs.g1_powers[self.offset..self.offset + self.supported_size] + } } // Abstraction around SRS preventing copying. Arc of SRS #[derive(Clone, Copy, Debug)] pub struct KZGVerifierKey { - pub g1: P::G1Affine, - pub g2: P::G2Affine, - pub beta_g2: P::G2Affine + pub g1: P::G1Affine, + pub g2: P::G2Affine, + pub beta_g2: P::G2Affine, } #[derive(Debug, Clone, Eq, PartialEq, Default)] pub struct UVKZGPCS { - _phantom: PhantomData

, + _phantom: PhantomData

, } -impl UVKZGPCS

+impl UVKZGPCS

where

::ScalarField: poly::field::JoltField, { - fn commit_offset(pk: &KZGProverKey

, poly: &UniPoly, offset: usize) -> Result { - if poly.degree() > pk.g1_powers().len() { - return Err(ZeromorphError::KeyLengthError(poly.degree(), pk.g1_powers().len())) - } + fn commit_offset( + pk: &KZGProverKey

, + poly: &UniPoly, + offset: usize, + ) -> Result { + if poly.degree() > pk.g1_powers().len() { + return Err(ProofVerifyError::KeyLengthError( + poly.degree(), + pk.g1_powers().len(), + )); + } - let scalars = poly.as_vec(); - let bases = pk.g1_powers(); - let c = ::msm(&bases[offset..scalars.len()], &poly.as_vec()[offset..]).unwrap(); + let scalars = poly.as_vec(); + let bases = pk.g1_powers(); + let c = ::msm( + &bases[offset..scalars.len()], + &poly.as_vec()[offset..], + ) + .unwrap(); - Ok(c.into_affine()) - } + Ok(c.into_affine()) + } - pub fn commit(pk: &KZGProverKey

, poly: &UniPoly) -> Result { - if poly.degree() > pk.g1_powers().len() { - return Err(ZeromorphError::KeyLengthError(poly.degree(), pk.g1_powers().len())) + pub fn commit( + pk: &KZGProverKey

, + poly: &UniPoly, + ) -> Result { + if poly.degree() > pk.g1_powers().len() { + return Err(ProofVerifyError::KeyLengthError( + poly.degree(), + pk.g1_powers().len(), + )); + } + let c = ::msm( + &pk.g1_powers()[..poly.as_vec().len()], + &poly.as_vec().as_slice(), + ) + .unwrap(); + Ok(c.into_affine()) } - let c = ::msm( &pk.g1_powers()[..poly.as_vec().len()], &poly.as_vec().as_slice()).unwrap(); - Ok(c.into_affine()) - } - - fn open( - pk: &KZGProverKey

, - poly: &UniPoly, - point: &P::ScalarField - ) -> Result<(P::G1Affine, P::ScalarField), ZeromorphError> - where -

::ScalarField: poly::field::JoltField - { - let divisor = UniPoly::from_coeff(vec![-*point, P::ScalarField::one()]); - let (witness_poly, _) = poly.divide_with_q_and_r(&divisor).unwrap(); - let proof = ::msm(&pk.g1_powers()[..witness_poly.as_vec().len()], &witness_poly.as_vec().as_slice()).unwrap(); - let evaluation = poly.evaluate(point); - Ok((proof.into_affine(), evaluation)) - } + fn open( + pk: &KZGProverKey

, + poly: &UniPoly, + point: &P::ScalarField, + ) -> Result<(P::G1Affine, P::ScalarField), ProofVerifyError> + where +

::ScalarField: poly::field::JoltField, + { + let divisor = UniPoly::from_coeff(vec![-*point, P::ScalarField::one()]); + let (witness_poly, _) = poly.divide_with_q_and_r(&divisor).unwrap(); + let proof = ::msm( + &pk.g1_powers()[..witness_poly.as_vec().len()], + &witness_poly.as_vec().as_slice(), + ) + .unwrap(); + let evaluation = poly.evaluate(point); + Ok((proof.into_affine(), evaluation)) + } } const MAX_VARS: usize = 17; lazy_static! { - pub static ref ZEROMORPH_SRS: ZeromorphSRS = - ZeromorphSRS(Arc::new(SRS::setup( - &mut ChaCha20Rng::from_seed(*b"ZEROMORPH_POLY_COMMITMENT_SCHEME"), - 1 << (MAX_VARS + 1) - ))); + pub static ref ZEROMORPH_SRS: ZeromorphSRS = ZeromorphSRS(Arc::new(SRS::setup( + &mut ChaCha20Rng::from_seed(*b"ZEROMORPH_POLY_COMMITMENT_SCHEME"), + 1 << (MAX_VARS + 1) + ))); } pub struct ZeromorphSRS(Arc>); impl ZeromorphSRS

{ - pub fn setup(mut rng: &mut R, max_degree: usize) -> Self { - Self(Arc::new(SRS::setup(rng, max_degree))) - } - - pub fn trim(self, max_degree: usize) -> (ZeromorphProverKey

, ZeromorphVerifierKey

) { - //TODO: remove into() - let (commit_pp, kzg_vk) = SRS::trim(self.0.clone(), max_degree); - let offset = self.0.g1_powers.len() - max_degree; - let tau_N_max_sub_2_N = self.0.g2_powers[offset]; - let open_pp = KZGProverKey::new(self.0, offset, max_degree); - ( - ZeromorphProverKey {commit_pp, open_pp}, - ZeromorphVerifierKey {kzg_vk, tau_N_max_sub_2_N} - ) - } + pub fn setup(rng: &mut R, max_degree: usize) -> Self { + Self(Arc::new(SRS::setup(rng, max_degree))) + } + + pub fn trim(self, max_degree: usize) -> (ZeromorphProverKey

, ZeromorphVerifierKey

) { + //TODO: remove into() + let (commit_pp, kzg_vk) = SRS::trim(self.0.clone(), max_degree); + let offset = self.0.g1_powers.len() - max_degree; + let tau_N_max_sub_2_N = self.0.g2_powers[offset]; + let open_pp = KZGProverKey::new(self.0, offset, max_degree); + ( + ZeromorphProverKey { commit_pp, open_pp }, + ZeromorphVerifierKey { + kzg_vk, + tau_N_max_sub_2_N, + }, + ) + } } //TODO: adapt interface to have prover and verifier key @@ -219,25 +240,19 @@ pub struct ZeromorphVerifierKey { pub tau_N_max_sub_2_N: P::G2Affine, } -#[derive(Error, Debug)] -pub enum ZeromorphError { - #[error("Length Error: SRS Length: {0}, Key Length: {0}")] - KeyLengthError(usize, usize), -} - +#[derive(Debug, PartialEq)] pub struct ZeromorphCommitment(P::G1Affine); -impl AppendToTranscript for ZeromorphCommitment

+impl AppendToTranscript for ZeromorphCommitment

where - Self: CurveGroup + Self: CurveGroup, { - fn append_to_transcript(&self, label: &'static [u8], transcript: &mut ProofTranscript) { - transcript.append_point(b"poly_commitment_share", self); - } + fn append_to_transcript(&self, _label: &'static [u8], transcript: &mut ProofTranscript) { + transcript.append_point(b"poly_commitment_share", self); + } } -#[derive(Clone, CanonicalSerialize, - CanonicalDeserialize, Debug)] +#[derive(Clone, CanonicalSerialize, CanonicalDeserialize, Debug)] pub struct ZeromorphProof { pub pi: P::G1Affine, pub q_hat_com: P::G1Affine, @@ -271,7 +286,7 @@ where quotient_iter .zip_eq(&*remainder_lo) .zip_eq(&*remainder_hi) - .for_each(|((mut q, r_lo), r_hi)| { + .for_each(|((q, r_lo), r_hi)| { *q = *r_hi - *r_lo; }); @@ -280,9 +295,11 @@ where #[cfg(not(feature = "multicore"))] let remainder_lo_iter = remainder_lo.iter_mut(); - remainder_lo_iter.zip_eq(remainder_hi).for_each(|(r_lo, r_hi)| { - *r_lo += (*r_hi - r_lo as &_) * x_i; - }); + remainder_lo_iter + .zip_eq(remainder_hi) + .for_each(|(r_lo, r_hi)| { + *r_lo += (*r_hi - r_lo as &_) * x_i; + }); remainder.truncate(1 << (num_var - 1 - i)); @@ -395,175 +412,329 @@ where #[derive(Clone)] pub struct Zeromorph { _phantom: PhantomData

, - } +} -impl Zeromorph

+impl Zeromorph

where

::ScalarField: poly::field::JoltField, { - pub fn protocol_name() -> &'static [u8] { - b"Zeromorph" - } + pub fn protocol_name() -> &'static [u8] { + b"Zeromorph" + } + + //IDEAS; + // - extra sampling from transcript??? -> no adding randomness has to due with information leakage not verification + + pub fn commit( + pp: &ZeromorphProverKey

, + poly: &DensePolynomial, + ) -> Result, ProofVerifyError> { + if pp.commit_pp.g1_powers().len() < poly.Z.len() { + return Err(ProofVerifyError::KeyLengthError( + pp.commit_pp.g1_powers().len(), + poly.Z.len(), + )); + } + Ok(ZeromorphCommitment( + UVKZGPCS::commit(&pp.commit_pp, &UniPoly::from_coeff(poly.Z.clone())).unwrap(), + )) + } + + //TODO: change interface to create commitment for poly??? + pub fn open( + pp: &ZeromorphProverKey

, + poly: &DensePolynomial, + point: &[P::ScalarField], + // Can be calculated + eval: &P::ScalarField, + transcript: &mut ProofTranscript, + ) -> Result, ProofVerifyError> { + transcript.append_protocol_name(Self::protocol_name()); + + if pp.commit_pp.g1_powers().len() < poly.Z.len() { + return Err(ProofVerifyError::KeyLengthError( + pp.commit_pp.g1_powers().len(), + poly.Z.len(), + )); + } + + //assert_eq!(Self::commit(pp, poly).unwrap(), *comm); + assert_eq!(poly.evaluate(point), *eval); - //IDEAS; - // - extra sampling from transcript??? -> no adding randomness has to due with information leakage not verification + let (quotients, remainder): (Vec>, P::ScalarField) = + compute_multilinear_quotients::

(poly, point); + assert_eq!(quotients.len(), poly.get_num_vars()); + assert_eq!(remainder, *eval); - pub fn commit(pp: &ZeromorphProverKey

, poly: &DensePolynomial) -> Result { - if pp.commit_pp.g1_powers().len() < poly.Z.len() { - return Err(ZeromorphError::KeyLengthError(pp.commit_pp.g1_powers().len(), poly.Z.len())) + // Compute the multilinear quotients q_k = q_k(X_0, ..., X_{k-1}) + let q_k_com: Vec = quotients + .par_iter() + .map(|q| UVKZGPCS::commit(&pp.commit_pp, q).unwrap()) + .collect(); + let q_comms: Vec = q_k_com + .clone() + .into_iter() + .map(|c| c.into_group()) + .collect(); + //transcript.append_points(b"q_comms", &q_comms); + q_comms + .iter() + .for_each(|c| transcript.append_point(b"quo", c)); + + // Sample challenge y + let y_challenge: P::ScalarField = transcript.challenge_scalar(b"y"); + + // Compute the batched, lifted-degree quotient `\hat{q}` + // qq_hat = โˆ‘_{i=0}^{num_vars-1} y^i * X^(2^num_vars - d_k - 1) * q_i(x) + let (q_hat, offset) = compute_batched_lifted_degree_quotient::

("ients, &y_challenge); + + // Compute and absorb the commitment C_q = [\hat{q}] + let q_hat_com = UVKZGPCS::commit_offset(&pp.commit_pp, &q_hat, offset)?; + transcript.append_point(b"q_hat", &q_hat_com.into_group()); + + // Get x and z challenges + let x_challenge = transcript.challenge_scalar(b"x"); + let z_challenge = transcript.challenge_scalar(b"z"); + + // Compute batched degree and ZM-identity quotient polynomial pi + let (eval_scalar, (degree_check_q_scalars, zmpoly_q_scalars)): ( + P::ScalarField, + (Vec, Vec), + ) = eval_and_quotient_scalars::

(y_challenge, x_challenge, z_challenge, point); + // f = z * poly.Z + q_hat + (-z * ฮฆ_n(x) * e) + โˆ‘_k (q_scalars_k * q_k) + let mut f = UniPoly::from_coeff(poly.Z.clone()); + f *= &z_challenge; + f += &q_hat; + f[0] += eval_scalar * eval; + quotients + .into_iter() + .zip(degree_check_q_scalars) + .zip(zmpoly_q_scalars) + .for_each(|((mut q, degree_check_scalar), zm_poly_scalar)| { + q *= &(degree_check_scalar + zm_poly_scalar); + f += &q; + }); + debug_assert_eq!(f.evaluate(&x_challenge), P::ScalarField::zero()); + + // Compute and send proof commitment pi + let (pi, _) = UVKZGPCS::open(&pp.open_pp, &f, &x_challenge)?; + + Ok(ZeromorphProof { + pi, + q_hat_com, + q_k_com, + }) } - UVKZGPCS::commit(&pp.commit_pp, &UniPoly::from_coeff(poly.Z.clone())) - } - pub fn open(pp: &ZeromorphProverKey

, comm: &P::G1Affine, poly: &DensePolynomial, point: &[P::ScalarField], eval: &P::ScalarField, transcript: &mut ProofTranscript) -> Result, ZeromorphError> { - transcript.append_protocol_name(Self::protocol_name()); + //Batch together polynomials -> Then commit + // polys[0..m] + // commitments[0..m] + // evals[0..m] + fn batch_open( + pk: &ZeromorphProverKey

, + polynomials: &[DensePolynomial], + point: &[P::ScalarField], + evals: &[P::ScalarField], + transcript: &mut ProofTranscript, + ) -> ZeromorphProof

{ + let num_vars = point.len(); + let n = 1 << num_vars; + // Generate batching challenge \rho and powers 1,...,\rho^{m-1} + let rho: P::ScalarField = transcript.challenge_scalar(b"rho"); + // Compute batching of unshifted polynomials f_i, and batched eval v_i: + let mut scalar = P::ScalarField::one(); + let (f_batched, batched_evaluation) = (0..polynomials.len()).fold( + ( + DensePolynomial::new(vec![P::ScalarField::zero(); n]), + P::ScalarField::zero(), + ), + |(mut f_batched, mut batched_evaluation), i| { + f_batched += &(polynomials[i].clone() * scalar); + batched_evaluation += scalar * evals[i]; + scalar *= rho; + (f_batched, batched_evaluation) + }, + ); + let pi_poly = DensePolynomial::new(f_batched.Z.clone()); + Zeromorph::

::open(&pk, &pi_poly, &point, &batched_evaluation, transcript).unwrap() + } - if pp.commit_pp.g1_powers().len() < poly.Z.len() { - return Err(ZeromorphError::KeyLengthError(pp.commit_pp.g1_powers().len(), poly.Z.len())) + fn batch_verify( + vk: &ZeromorphVerifierKey

, + commitments: &[ZeromorphCommitment

], + point: &[P::ScalarField], + evals: &[P::ScalarField], + batch_proof: &ZeromorphProof

, + transcript: &mut ProofTranscript, + ) -> Result { + // Compute batching of unshifted polynomials f_i: + // Compute powers of batching challenge rho + let rho: P::ScalarField = transcript.challenge_scalar(b"rho"); + let mut scalar = P::ScalarField::one(); + let (batched_eval, batched_commitment) = evals.iter().zip_eq(commitments.iter()).fold( + (P::ScalarField::zero(), P::G1::zero()), + |(mut batched_evaluation, mut batched_commitment), (opening, commitment)| { + batched_evaluation += scalar * opening; + batched_commitment += commitment.0 * scalar; + scalar *= rho; + (batched_evaluation, batched_commitment) + }, + ); + Zeromorph::

::verify( + &vk, + &ZeromorphCommitment(batched_commitment.into_affine()), + &point, + &batched_eval, + &batch_proof, + transcript, + ) } - assert_eq!(Self::commit(pp, poly).unwrap(), *comm); - assert_eq!(poly.evaluate(point), *eval); - - let (quotients, remainder): (Vec>, P::ScalarField) = compute_multilinear_quotients::

(poly, point); - assert_eq!(quotients.len(), poly.get_num_vars()); - assert_eq!(remainder, *eval); - - // Compute the multilinear quotients q_k = q_k(X_0, ..., X_{k-1}) - let q_k_com: Vec = quotients.par_iter().map(|q| UVKZGPCS::commit(&pp.commit_pp, q).unwrap()).collect(); - let q_comms: Vec = q_k_com.clone().into_iter().map(|c| c.into_group()).collect(); - //transcript.append_points(b"q_comms", &q_comms); - q_comms.iter().for_each(|c| transcript.append_point(b"quo", c)); - - // Sample challenge y - let y_challenge: P::ScalarField = transcript.challenge_scalar(b"y"); - - // Compute the batched, lifted-degree quotient `\hat{q}` - // qq_hat = โˆ‘_{i=0}^{num_vars-1} y^i * X^(2^num_vars - d_k - 1) * q_i(x) - let (q_hat, offset) = compute_batched_lifted_degree_quotient::

("ients, &y_challenge); - - // Compute and absorb the commitment C_q = [\hat{q}] - let q_hat_com = UVKZGPCS::commit_offset(&pp.commit_pp, &q_hat, offset)?; - transcript.append_point(b"q_hat", &q_hat_com.into_group()); - - // Get x and z challenges - let x_challenge = transcript.challenge_scalar(b"x"); - let z_challenge = transcript.challenge_scalar(b"z"); - - // Compute batched degree and ZM-identity quotient polynomial pi - let (eval_scalar, (degree_check_q_scalars, zmpoly_q_scalars)): (P::ScalarField, (Vec, Vec)) = eval_and_quotient_scalars::

(y_challenge, x_challenge, z_challenge, point); - // f = z * poly.Z + q_hat + (-z * ฮฆ_n(x) * e) + โˆ‘_k (q_scalars_k * q_k) - let mut f = UniPoly::from_coeff(poly.Z.clone()); - f *= &z_challenge; - f += &q_hat; - f[0] += eval_scalar * eval; - quotients - .into_iter() - .zip(degree_check_q_scalars) - .zip(zmpoly_q_scalars) - .for_each(|((mut q, degree_check_scalar), zm_poly_scalar)| { - q *= &(degree_check_scalar + zm_poly_scalar); - f += &q; - }); - debug_assert_eq!(f.evaluate(&x_challenge), P::ScalarField::zero()); - - // Compute and send proof commitment pi - let (pi, _) = UVKZGPCS::open(&pp.open_pp, &f, &x_challenge)?; - - Ok(ZeromorphProof { pi, q_hat_com, q_k_com }) - } - - pub fn verify(vk: &ZeromorphVerifierKey

, comm: &P::G1Affine, point: &[P::ScalarField], eval: &P::ScalarField, proof: &ZeromorphProof

, transcript: &mut ProofTranscript) -> Result { - transcript.append_protocol_name(Self::protocol_name()); - - // Receive commitments [q_k] - //TODO: remove clone - let q_comms: Vec = proof.q_k_com.clone().into_iter().map(|c| c.into_group()).collect(); - q_comms.iter().for_each(|c| transcript.append_point(b"quo", c)); - - // Challenge y - let y_challenge: P::ScalarField = transcript.challenge_scalar(b"y"); - - // Receive commitment C_q_hat - transcript.append_point(b"q_hat", &proof.q_hat_com.into_group()); - - // Get x and z challenges - let x_challenge = transcript.challenge_scalar(b"x"); - let z_challenge = transcript.challenge_scalar(b"z"); - - // Compute batched degree and ZM-identity quotient polynomial pi - let (eval_scalar, (mut q_scalars, zmpoly_q_scalars)): (P::ScalarField, (Vec, Vec)) = eval_and_quotient_scalars::

(y_challenge, x_challenge, z_challenge, point); - q_scalars - .iter_mut() - .zip_eq(zmpoly_q_scalars) - .for_each(|(scalar, zm_poly_q_scalar)| { - *scalar += zm_poly_q_scalar; - }); - let scalars = [vec![P::ScalarField::one(), z_challenge, eval_scalar * eval], q_scalars].concat(); - let bases = [ - vec![proof.q_hat_com, *comm, vk.kzg_vk.g1], - //TODO: eliminate - proof.q_k_com.clone() - ].concat(); - let c = ::msm(&bases, &scalars).unwrap().into_affine(); - - let pairing = P::multi_pairing( - &[c, proof.pi], - &[(-vk.tau_N_max_sub_2_N.into_group()).into_affine(), (vk.kzg_vk.beta_g2.into_group() - (vk.kzg_vk.g2 * x_challenge)).into()] - ); - Ok(pairing.0.is_one().into()) - - } + //Change api + pub fn verify( + vk: &ZeromorphVerifierKey

, + comm: &ZeromorphCommitment

, + point: &[P::ScalarField], + eval: &P::ScalarField, + proof: &ZeromorphProof

, + transcript: &mut ProofTranscript, + ) -> Result { + transcript.append_protocol_name(Self::protocol_name()); + + // Receive commitments [q_k] + //TODO: remove clone + let q_comms: Vec = proof + .q_k_com + .clone() + .into_iter() + .map(|c| c.into_group()) + .collect(); + q_comms + .iter() + .for_each(|c| transcript.append_point(b"quo", c)); + + // Challenge y + let y_challenge: P::ScalarField = transcript.challenge_scalar(b"y"); + + // Receive commitment C_q_hat + transcript.append_point(b"q_hat", &proof.q_hat_com.into_group()); + + // Get x and z challenges + let x_challenge = transcript.challenge_scalar(b"x"); + let z_challenge = transcript.challenge_scalar(b"z"); + + // Compute batched degree and ZM-identity quotient polynomial pi + let (eval_scalar, (mut q_scalars, zmpoly_q_scalars)): ( + P::ScalarField, + (Vec, Vec), + ) = eval_and_quotient_scalars::

(y_challenge, x_challenge, z_challenge, point); + q_scalars + .iter_mut() + .zip_eq(zmpoly_q_scalars) + .for_each(|(scalar, zm_poly_q_scalar)| { + *scalar += zm_poly_q_scalar; + }); + let scalars = [ + vec![P::ScalarField::one(), z_challenge, eval_scalar * eval], + q_scalars, + ] + .concat(); + let bases = [ + vec![proof.q_hat_com, comm.0, vk.kzg_vk.g1], + //TODO: eliminate + proof.q_k_com.clone(), + ] + .concat(); + let c = ::msm(&bases, &scalars) + .unwrap() + .into_affine(); + + let pairing = P::multi_pairing( + &[c, proof.pi], + &[ + (-vk.tau_N_max_sub_2_N.into_group()).into_affine(), + (vk.kzg_vk.beta_g2.into_group() - (vk.kzg_vk.g2 * x_challenge)).into(), + ], + ); + Ok(pairing.is_zero()) + } } +//TODO: have setup be a dummy if lazy_static isn't in use -> just pass in lazy static and select needed keys from for portions it is needed. +//TODO: find commitments in interface +//TODO: test batched commitments outside interface first thing tomorrow. +//TODO: implement multilinear poly * scalar + /* impl CommitmentScheme for Zeromorph

where

::ScalarField: poly::field::JoltField, - ZeromorphCommitment

: CurveGroup + ZeromorphCommitment

: CurveGroup, { - type Field = P::ScalarField; - type Setup = Vec<(ZeromorphProverKey

, ZeromorphVerifierKey

)>; - type Commitment = ZeromorphCommitment

; + type Field = P::ScalarField; + type Setup = (ZeromorphProverKey

, ZeromorphVerifierKey

); + type Commitment = ZeromorphCommitment

; type Proof = ZeromorphProof

; type BatchedProof = ZeromorphProof

; - fn setup(shapes: &[CommitShape]) -> Self::Setup { - //TODO: Does using lazy_static! lead to large problems - todo!() + fn setup(_shapes: &[CommitShape]) -> Self::Setup { + todo!() } fn commit(poly: &DensePolynomial, setup: &Self::Setup) -> Self::Commitment { - // TODO: assert lengths are valid - //ZeromorphCommitment(UnivariateKZG::

::commit(setup, &UniPoly::from_coeff(poly.Z.clone())).unwrap()) - todo!() + assert!(setup.0.commit_pp.g1_powers().len() < poly.Z.len()); + ZeromorphCommitment( + UVKZGPCS::commit(&setup.0.commit_pp, &UniPoly::from_coeff(poly.Z.clone())).unwrap(), + ) } fn batch_commit( evals: &[&[Self::Field]], gens: &Self::Setup, - batch_type: BatchType, + _batch_type: BatchType, ) -> Vec { - // TODO: assert lengths are valid - #[cfg(feature = "multicore")] - let iter = evals.par_iter(); - #[cfg(not(feature = "multicore"))] - let iter = evals.iter(); - iter - .map(|poly| ZeromorphCommitment(UnivariateKZG::

::commit(gens, &UniPoly::from_coeff(poly.Z.clone())).unwrap())) - .collect::>() + // TODO: assert lengths are valid + #[cfg(feature = "multicore")] + let iter = evals.par_iter(); + #[cfg(not(feature = "multicore"))] + let iter = evals.iter(); + iter.enumerate() + .map(|(i, evals)| { + assert!( + gens.0.commit_pp.g1_powers().len() < evals.len(), + "COMMIT KEY LENGTH ERROR" + ); + ZeromorphCommitment( + UVKZGPCS::commit(&gens.0.commit_pp, &UniPoly::from_coeff(evals.to_vec())) + .unwrap(), + ) + }) + .collect::>() } fn commit_slice(evals: &[Self::Field], setup: &Self::Setup) -> Self::Commitment { - todo!() + ZeromorphCommitment( + UVKZGPCS::commit(&setup.0.commit_pp, &UniPoly::from_coeff(evals.to_vec())).unwrap(), + ) } fn prove( + setup: &Self::Setup, poly: &DensePolynomial, opening_point: &[Self::Field], // point at which the polynomial is evaluated + commitment: &Self::Commitment, + eval: &Self::Field, transcript: &mut ProofTranscript, ) -> Self::Proof { - todo!() + Zeromorph::

::open( + &setup.0, + &commitment, + &poly, + &opening_point, + &eval, + transcript, + ) + .unwrap() } fn batch_prove( @@ -573,7 +744,33 @@ where batch_type: BatchType, transcript: &mut ProofTranscript, ) -> Self::BatchedProof { - todo!() + // Generate batching challenge \rho and powers 1,...,\rho^{m-1} + let rho: P::ScalarField = transcript.challenge_scalar(b"rho"); + let mut scalar = P::ScalarField::one(); + // Compute batching of unshifted polynomials f_i: + let mut scalar = P::ScalarField::one(); + let (f_batched, batched_evaluation) = (0..polynomials.len()).fold( + ( + DensePolynomial::new(vec![P::ScalarField::zero(); n]), + P::ScalarField::zero(), + ), + |(mut f_batched, mut batched_evaluation), i| { + f_batched += polynomials[i].clone() * scalar; + batched_evaluation += scalar * openings[i]; + scalar *= rho; + (f_batched, batched_evaluation) + }, + ); + let mut pi_poly = UniPoly::from_coeff(f_batched.Z.clone()); + Zeromorph::

::open( + &setup.0, + &commitment, + &pi_poly, + &opening_point, + &batched_evaluation, + transcript, + ) + .unwrap() } fn verify( @@ -584,7 +781,14 @@ where opening: &Self::Field, // evaluation \widetilde{Z}(r) commitment: &Self::Commitment, ) -> Result<(), ProofVerifyError> { - todo!() + Zeromorph::

::verify( + &setup.1, + &commitment, + &opening_point, + &opening, + &proof, + transcript, + ) } fn batch_verify( @@ -595,7 +799,28 @@ where commitments: &[&Self::Commitment], transcript: &mut ProofTranscript, ) -> Result<(), ProofVerifyError> { - todo!() + // Compute batching of unshifted polynomials f_i: + // Compute powers of batching challenge rho + let rho: P::ScalarField = transcript.challenge_scalar(b"rho"); + let mut scalar = P::ScalarField::one(); + let (batched_opening, batched_commitment) = + openings.iter().zip_eq(commitments.iter()).fold( + (P::ScalarField::zero(), P::G1::zero()), + |(mut batched_evaluation, mut batched_commitment), (opening, commitment)| { + batched_evaluation += scalar * opening; + batched_commitment += commitment.0 * scalar; + scalar *= rho; + (batched_evaluation, batched_commitment) + }, + ); + Zeromorph::

::verify( + &setup.1, + &ZeromorphCommitment(batched_commitment.into_affine()), + &opening_point, + &batched_opening, + &batch_proof, + transcript, + ) } fn protocol_name() -> &'static [u8] { @@ -609,9 +834,9 @@ mod test { use super::*; use crate::utils::math::Math; use ark_bn254::{Bn254, Fr}; + use ark_ec::AffineRepr; use ark_ff::{BigInt, Zero}; use ark_std::{rand::Rng, test_rng, UniformRand}; - use ark_ec::AffineRepr; use rand_core::SeedableRng; // Evaluate Phi_k(x) = \sum_{i=0}^k x^i using the direct inefficent formula @@ -627,56 +852,6 @@ mod test { } /* - #[test] - fn prove_verify_single() { - let max_vars = 8; - let mut rng = test_rng(); - let srs = ZEROMORPH_SRS.lock().unwrap(); - - for num_vars in 3..max_vars { - // Setup - let (pk, vk) = { - let poly_size = 1 << (num_vars + 1); - srs.trim(poly_size - 1).unwrap() - }; - let polys = DensePolynomial::new( - (0..(1 << num_vars)) - .map(|_| Fr::rand(&mut rng)) - .collect::>(), - ); - let challenges = (0..num_vars) - .map(|_| Fr::rand(&mut rng)) - .collect::>(); - let evals = polys.evaluate(&challenges); - - // Commit and open - let commitments = Zeromorph::::commit(&[polys.clone()], &pk.g1_powers).unwrap(); - - let mut prover_transcript = Transcript::new(b"example"); - let proof = Zeromorph::::prove( - &[polys], - &[evals], - &challenges, - &pk, - &mut prover_transcript, - ) - .unwrap(); - - let mut verifier_transcript = Transcript::new(b"example"); - Zeromorph::::verify( - &commitments, - &[evals], - &challenges, - &vk, - &mut verifier_transcript, - proof, - ) - .unwrap(); - - //TODO: check both random oracles are synced - } - } - #[test] fn prove_verify_batched() { let max_vars = 16; @@ -968,13 +1143,12 @@ mod test { } fn kzg_verify( - vk: &KZGVerifierKey

, - commitment: &P::G1Affine, - point: &P::ScalarField, - proof: &P::G1Affine, - evaluation: &P::ScalarField, - ) -> Result { - + vk: &KZGVerifierKey

, + commitment: &P::G1Affine, + point: &P::ScalarField, + proof: &P::G1Affine, + evaluation: &P::ScalarField, + ) -> Result { let lhs = P::pairing( commitment.into_group() - vk.g1.into_group() * evaluation, vk.g2, @@ -994,48 +1168,149 @@ mod test { } #[test] - fn kzg_commit_prove_verify() -> Result<(), ZeromorphError> { - let seed = b"11111111111111111111111111111111"; - for _ in 0..100 { - let mut rng = &mut ChaCha20Rng::from_seed(*seed); - let degree = rng.gen_range(2..20); - - let pp = Arc::new(SRS::::setup(&mut rng, degree)); - let (ck, vk) = SRS::trim(pp, degree); - let p = random::(degree, rng); - let comm = UVKZGPCS::::commit(&ck, &p)?; - let point = Fr::rand(rng); - let (proof, value) = UVKZGPCS::::open(&ck, &p, &point)?; - assert!( - kzg_verify(&vk, &comm, &point, &proof, &value)?, - "proof was incorrect for max_degree = {}, polynomial_degree = {}", - degree, - p.degree(), - ); - } - Ok(()) + fn kzg_commit_prove_verify() -> Result<(), ProofVerifyError> { + let seed = b"11111111111111111111111111111111"; + for _ in 0..100 { + let mut rng = &mut ChaCha20Rng::from_seed(*seed); + let degree = rng.gen_range(2..20); + + let pp = Arc::new(SRS::::setup(&mut rng, degree)); + let (ck, vk) = SRS::trim(pp, degree); + let p = random::(degree, rng); + let comm = UVKZGPCS::::commit(&ck, &p)?; + let point = Fr::rand(rng); + let (proof, value) = UVKZGPCS::::open(&ck, &p, &point)?; + assert!( + kzg_verify(&vk, &comm, &point, &proof, &value)?, + "proof was incorrect for max_degree = {}, polynomial_degree = {}", + degree, + p.degree(), + ); + } + Ok(()) } #[test] - fn zeromorph_commit_prove_verify() - { - for num_vars in [4, 5, 6] { - let mut rng = rand_chacha::ChaCha20Rng::seed_from_u64(num_vars as u64); - - let poly = DensePolynomial::random(num_vars, &mut rng); - let point: Vec<::ScalarField> = (0..num_vars).map(|_| ::ScalarField::rand(&mut rng)).collect(); - let eval = poly.evaluate(&point); - - let srs = ZeromorphSRS::::setup(&mut rng, 1 << num_vars); - let (pk, vk) = srs.trim(1 << num_vars); - let commitment = Zeromorph::::commit(&pk, &poly).unwrap(); - - let mut prover_transcript = ProofTranscript::new(b"TestEval"); - let proof = Zeromorph::::open(&pk, &commitment, &poly, &point, &eval, &mut prover_transcript).unwrap(); + fn zeromorph_commit_prove_verify() { + for num_vars in [4, 5, 6] { + let mut rng = rand_chacha::ChaCha20Rng::seed_from_u64(num_vars as u64); + + let poly = DensePolynomial::random(num_vars, &mut rng); + let point: Vec<::ScalarField> = (0..num_vars) + .map(|_| ::ScalarField::rand(&mut rng)) + .collect(); + let eval = poly.evaluate(&point); + + let srs = ZeromorphSRS::::setup(&mut rng, 1 << num_vars); + let (pk, vk) = srs.trim(1 << num_vars); + let commitment = Zeromorph::::commit(&pk, &poly).unwrap(); + + let mut prover_transcript = ProofTranscript::new(b"TestEval"); + let proof = Zeromorph::::open(&pk, &poly, &point, &eval, &mut prover_transcript) + .unwrap(); + let p_transcipt_squeeze: ::ScalarField = + prover_transcript.challenge_scalar(b"c"); + + // Verify proof. + let mut verifier_transcript = ProofTranscript::new(b"TestEval"); + Zeromorph::::verify( + &vk, + &commitment, + &point, + &eval, + &proof, + &mut verifier_transcript, + ) + .unwrap(); + let v_transcipt_squeeze: ::ScalarField = + verifier_transcript.challenge_scalar(b"c"); + + assert_eq!(p_transcipt_squeeze, v_transcipt_squeeze); + + // evaluate bad proof for soundness + let altered_verifier_point = point + .iter() + .map(|s| *s + ::ScalarField::one()) + .collect::>(); + let altered_verifier_eval = poly.evaluate(&altered_verifier_point); + let mut verifier_transcript = ProofTranscript::new(b"TestEval"); + assert!(!Zeromorph::::verify( + &vk, + &commitment, + &altered_verifier_point, + &altered_verifier_eval, + &proof, + &mut verifier_transcript, + ) + .unwrap()); + } + } - // Verify proof. - let mut verifier_transcript = ProofTranscript::new(b"TestEval"); - assert!(Zeromorph::::verify(&vk, &commitment, &point, &eval, &proof, &mut verifier_transcript).unwrap()) - } + #[test] + fn batched_zeromorph_commit_prove_verify() { + for num_polys in [1, 2, 5, 7] { + for num_vars in [4, 5, 6] { + let mut rng = rand_chacha::ChaCha20Rng::seed_from_u64(num_vars as u64); + + let polys: Vec> = (0..num_polys) + .map(|_| DensePolynomial::random(num_vars, &mut rng)) + .collect(); + let point: Vec = (0..num_vars).map(|_| Fr::rand(&mut rng)).collect(); + let evals: Vec = polys.iter().map(|poly| poly.evaluate(&point)).collect(); + + let srs = ZeromorphSRS::::setup(&mut rng, 1 << num_vars); + let (pk, vk) = srs.trim(1 << num_vars); + let commitments: Vec> = polys + .iter() + .map(|poly| Zeromorph::::commit(&pk, &poly).unwrap()) + .collect(); + + let mut prover_transcript = ProofTranscript::new(b"TestEval"); + let proof = Zeromorph::::batch_open( + &pk, + &polys, + &point, + &evals, + &mut prover_transcript, + ); + let p_transcipt_squeeze: ::ScalarField = + prover_transcript.challenge_scalar(b"c"); + + // Verify proof. + let mut verifier_transcript = ProofTranscript::new(b"TestEval"); + Zeromorph::::batch_verify( + &vk, + &commitments, + &point, + &evals, + &proof, + &mut verifier_transcript, + ) + .unwrap(); + let v_transcipt_squeeze: ::ScalarField = + verifier_transcript.challenge_scalar(b"c"); + + assert_eq!(p_transcipt_squeeze, v_transcipt_squeeze); + + // evaluate bad proof for completeness + let altered_verifier_point = point + .iter() + .map(|s| *s + ::ScalarField::one()) + .collect::>(); + let altered_verifier_evals: Vec = polys + .iter() + .map(|poly| poly.evaluate(&altered_verifier_point)) + .collect(); + let mut verifier_transcript = ProofTranscript::new(b"TestEval"); + assert!(!Zeromorph::::batch_verify( + &vk, + &commitments, + &altered_verifier_point, + &altered_verifier_evals, + &proof, + &mut verifier_transcript, + ).unwrap()); + } + } } } diff --git a/jolt-core/src/poly/dense_mlpoly.rs b/jolt-core/src/poly/dense_mlpoly.rs index 723dc1723..47f2fd04d 100644 --- a/jolt-core/src/poly/dense_mlpoly.rs +++ b/jolt-core/src/poly/dense_mlpoly.rs @@ -8,7 +8,7 @@ use crate::utils::math::Math; use core::ops::Index; use rand_core::{CryptoRng, RngCore}; use rayon::prelude::*; -use std::ops::AddAssign; +use std::ops::{AddAssign, Mul}; #[derive(Debug, PartialEq)] pub struct DensePolynomial { @@ -275,9 +275,11 @@ impl DensePolynomial { } pub fn random(num_vars: usize, mut rng: &mut R) -> Self { - Self::new(std::iter::from_fn(|| Some(F::random(&mut rng))).take(1 << num_vars).collect()) - - + Self::new( + std::iter::from_fn(|| Some(F::random(&mut rng))) + .take(1 << num_vars) + .collect(), + ) } } @@ -316,6 +318,19 @@ impl AddAssign<&DensePolynomial> for DensePolynomial { } } +impl Mul for DensePolynomial { + type Output = Self; + + fn mul(self, rhs: F) -> Self::Output { + let evals: Vec = self.Z.iter().map(|a| *a * rhs).collect(); + Self { + num_vars: self.num_vars, + len: self.len, + Z: evals, + } + } +} + #[cfg(test)] mod tests { use crate::poly::commitment::hyrax::matrix_dimensions; diff --git a/jolt-core/src/poly/unipoly.rs b/jolt-core/src/poly/unipoly.rs index 14d15098d..b6ef4194e 100644 --- a/jolt-core/src/poly/unipoly.rs +++ b/jolt-core/src/poly/unipoly.rs @@ -2,7 +2,6 @@ use std::cmp::Ordering; use std::ops::{AddAssign, Index, IndexMut, Mul, MulAssign}; -use crate::jolt::vm::Jolt; use crate::poly::field::JoltField; use crate::utils::gaussian_elimination::gaussian_elimination; use crate::utils::transcript::{AppendToTranscript, ProofTranscript}; @@ -139,13 +138,17 @@ impl UniPoly { // Compute h(x) = f(x)/(x - u) let mut h = vec![F::zero(); d]; for i in (1..d).rev() { - h[i - 1] = self.coeffs[i] + h[i] * u; + h[i - 1] = self.coeffs[i] + h[i] * u; } Self::from_coeff(h) } pub fn random(num_vars: usize, mut rng: &mut R) -> Self { - Self::from_coeff(std::iter::from_fn(|| Some(F::random(&mut rng))).take(num_vars).collect()) + Self::from_coeff( + std::iter::from_fn(|| Some(F::random(&mut rng))) + .take(num_vars) + .collect(), + ) } } @@ -199,24 +202,24 @@ impl Mul<&F> for UniPoly { impl Index for UniPoly { type Output = F; - + fn index(&self, index: usize) -> &Self::Output { - &self.coeffs[index] + &self.coeffs[index] } } impl IndexMut for UniPoly { fn index_mut(&mut self, index: usize) -> &mut Self::Output { - &mut self.coeffs[index] + &mut self.coeffs[index] } - } +} impl MulAssign<&F> for UniPoly { fn mul_assign(&mut self, rhs: &F) { self.coeffs.par_iter_mut().for_each(|c| *c *= rhs); } } - + impl CompressedUniPoly { // we require eval(0) + eval(1) = hint, so we can solve for the linear term as: // linear_term = hint - 2 * constant_term - deg2 term - deg3 term @@ -316,33 +319,35 @@ mod tests { pub fn naive_mul(ours: &UniPoly, other: &UniPoly) -> UniPoly { if ours.is_zero() || other.is_zero() { - UniPoly::zero() + UniPoly::zero() } else { - let mut result = vec![F::zero(); ours.degree() + other.degree() + 1]; - for (i, self_coeff) in ours.coeffs.iter().enumerate() { - for (j, other_coeff) in other.coeffs.iter().enumerate() { - result[i + j] += &(*self_coeff * other_coeff); + let mut result = vec![F::zero(); ours.degree() + other.degree() + 1]; + for (i, self_coeff) in ours.coeffs.iter().enumerate() { + for (j, other_coeff) in other.coeffs.iter().enumerate() { + result[i + j] += &(*self_coeff * other_coeff); + } } - } - UniPoly::from_coeff(result) + UniPoly::from_coeff(result) } - } + } #[test] fn test_divide_poly() { let rng = &mut ChaCha20Rng::from_seed([0u8; 32]); for a_degree in 0..50 { - for b_degree in 0..50 { - let dividend = UniPoly::::random(a_degree, rng); - let divisor = UniPoly::::random(b_degree, rng); - - if let Some((quotient, remainder)) = UniPoly::divide_with_q_and_r(÷nd, &divisor) { - let mut prod = naive_mul(&divisor, "ient); - prod += &remainder; - assert_eq!(dividend, prod) + for b_degree in 0..50 { + let dividend = UniPoly::::random(a_degree, rng); + let divisor = UniPoly::::random(b_degree, rng); + + if let Some((quotient, remainder)) = + UniPoly::divide_with_q_and_r(÷nd, &divisor) + { + let mut prod = naive_mul(&divisor, "ient); + prod += &remainder; + assert_eq!(dividend, prod) + } } - } } } } diff --git a/jolt-core/src/utils/errors.rs b/jolt-core/src/utils/errors.rs index 52333f696..85cca8659 100644 --- a/jolt-core/src/utils/errors.rs +++ b/jolt-core/src/utils/errors.rs @@ -14,4 +14,6 @@ pub enum ProofVerifyError { DecompressionError([u8; 32]), #[error("R1CS proof verification failed: {0}")] SpartanError(String), + #[error("Length Error: SRS Length: {0}, Key Length: {0}")] + KeyLengthError(usize, usize), } From 59e97a0891914684c469cefb402439a97a89430b Mon Sep 17 00:00:00 2001 From: PatStiles Date: Fri, 17 May 2024 23:29:05 -0300 Subject: [PATCH 06/17] jolt-zeromorph works --- jolt-core/src/jolt/vm/bytecode.rs | 9 +- jolt-core/src/jolt/vm/instruction_lookups.rs | 10 +- jolt-core/src/jolt/vm/mod.rs | 5 +- jolt-core/src/jolt/vm/read_write_memory.rs | 12 +- jolt-core/src/jolt/vm/rv32i_vm.rs | 82 +++++++- .../src/jolt/vm/timestamp_range_check.rs | 4 + jolt-core/src/lasso/memory_checking.rs | 3 + jolt-core/src/lasso/surge.rs | 9 +- .../src/poly/commitment/commitment_scheme.rs | 2 + jolt-core/src/poly/commitment/hyrax.rs | 2 + jolt-core/src/poly/commitment/zeromorph.rs | 194 +++++------------- jolt-core/src/poly/structured_poly.rs | 1 + jolt-core/src/r1cs/snark.rs | 3 +- jolt-core/src/r1cs/spartan.rs | 2 + 14 files changed, 179 insertions(+), 159 deletions(-) diff --git a/jolt-core/src/jolt/vm/bytecode.rs b/jolt-core/src/jolt/vm/bytecode.rs index d270c1cac..d761aa17b 100644 --- a/jolt-core/src/jolt/vm/bytecode.rs +++ b/jolt-core/src/jolt/vm/bytecode.rs @@ -624,6 +624,7 @@ where #[tracing::instrument(skip_all, name = "BytecodeReadWriteOpenings::prove_openings")] fn prove_openings( + generators: &C::Setup, polynomials: &BytecodePolynomials, opening_point: &[F], openings: &Self, @@ -634,6 +635,7 @@ where combined_openings.extend(openings.v_read_write_openings.iter()); C::batch_prove( + generators, &[ &polynomials.a_read_write, &polynomials.t_read, @@ -705,12 +707,13 @@ where #[tracing::instrument(skip_all, name = "BytecodeInitFinalOpenings::prove_openings")] fn prove_openings( + generators: &C::Setup, polynomials: &BytecodePolynomials, opening_point: &[F], _openings: &Self, transcript: &mut ProofTranscript, ) -> Self::Proof { - C::prove(&polynomials.t_final, opening_point, transcript) + C::prove(generators, &polynomials.t_final, opening_point, transcript) } fn compute_verifier_openings( @@ -859,7 +862,7 @@ mod tests { let generators = HyraxScheme::::setup(&commitment_shapes); let commitments = polys.commit(&generators); - let proof = BytecodeProof::prove_memory_checking(&preprocessing, &polys, &mut transcript); + let proof = BytecodeProof::prove_memory_checking(&generators, &preprocessing, &polys, &mut transcript); let mut transcript = ProofTranscript::new(b"test_transcript"); BytecodeProof::verify_memory_checking( @@ -921,7 +924,7 @@ mod tests { let mut transcript = ProofTranscript::new(b"test_transcript"); - let proof = BytecodeProof::prove_memory_checking(&preprocessing, &polys, &mut transcript); + let proof = BytecodeProof::prove_memory_checking(&generators, &preprocessing, &polys, &mut transcript); let mut transcript = ProofTranscript::new(b"test_transcript"); BytecodeProof::verify_memory_checking( diff --git a/jolt-core/src/jolt/vm/instruction_lookups.rs b/jolt-core/src/jolt/vm/instruction_lookups.rs index b58d85cda..500800c1d 100644 --- a/jolt-core/src/jolt/vm/instruction_lookups.rs +++ b/jolt-core/src/jolt/vm/instruction_lookups.rs @@ -146,6 +146,7 @@ where #[tracing::instrument(skip_all, name = "PrimarySumcheckOpenings::prove_openings")] fn prove_openings( + generators: &C::Setup, polynomials: &InstructionPolynomials, opening_point: &[F], openings: &Self, @@ -165,6 +166,7 @@ where primary_sumcheck_openings.push(openings.lookup_outputs_opening); C::batch_prove( + generators, &primary_sumcheck_polys, opening_point, &primary_sumcheck_openings, @@ -262,6 +264,7 @@ where #[tracing::instrument(skip_all, name = "InstructionReadWriteOpenings::prove_openings")] fn prove_openings( + generators: &C::Setup, polynomials: &InstructionPolynomials, opening_point: &[F], openings: &Self, @@ -284,6 +287,7 @@ where .concat(); C::batch_prove( + generators, &read_write_polys, opening_point, &read_write_openings, @@ -364,12 +368,14 @@ where #[tracing::instrument(skip_all, name = "InstructionFinalOpenings::prove_openings")] fn prove_openings( + generators: &C::Setup, polynomials: &InstructionPolynomials, opening_point: &[F], openings: &Self, transcript: &mut ProofTranscript, ) -> Self::Proof { C::batch_prove( + generators, &polynomials.final_cts.iter().collect::>(), opening_point, &openings.final_openings, @@ -802,6 +808,7 @@ where #[tracing::instrument(skip_all, name = "InstructionLookups::prove")] pub fn prove( + generators: &CS::Setup, polynomials: &InstructionPolynomials, preprocessing: &InstructionLookupsPreprocessing, transcript: &mut ProofTranscript, @@ -835,6 +842,7 @@ where lookup_outputs_opening: outputs_eval, }; let sumcheck_opening_proof = PrimarySumcheckOpenings::prove_openings( + generators, polynomials, &r_primary_sumcheck, &sumcheck_openings, @@ -848,7 +856,7 @@ where opening_proof: sumcheck_opening_proof, }; - let memory_checking = Self::prove_memory_checking(preprocessing, polynomials, transcript); + let memory_checking = Self::prove_memory_checking(generators, preprocessing, polynomials, transcript); InstructionLookupsProof { _instructions: PhantomData, diff --git a/jolt-core/src/jolt/vm/mod.rs b/jolt-core/src/jolt/vm/mod.rs index fe6bcb5ef..5fd228116 100644 --- a/jolt-core/src/jolt/vm/mod.rs +++ b/jolt-core/src/jolt/vm/mod.rs @@ -376,18 +376,21 @@ pub trait Jolt, const C: usize, c jolt_commitments.append_to_transcript(&mut transcript); let bytecode_proof = BytecodeProof::prove_memory_checking( + &preprocessing.generators, &preprocessing.bytecode, &jolt_polynomials.bytecode, &mut transcript, ); let instruction_proof = InstructionLookupsProof::prove( + &preprocessing.generators, &jolt_polynomials.instruction_lookups, &preprocessing.instruction_lookups, &mut transcript, ); let memory_proof = ReadWriteMemoryProof::prove( + &preprocessing.generators, &preprocessing.read_write_memory, &jolt_polynomials, &program_io, @@ -397,7 +400,7 @@ pub trait Jolt, const C: usize, c drop_in_background_thread(jolt_polynomials); let r1cs_proof = - R1CSProof::prove(spartan_key, witness_segments, &mut transcript).expect("proof failed"); + R1CSProof::prove(&preprocessing.generators, spartan_key, witness_segments, &mut transcript).expect("proof failed"); let jolt_proof = JoltProof { trace_length, diff --git a/jolt-core/src/jolt/vm/read_write_memory.rs b/jolt-core/src/jolt/vm/read_write_memory.rs index d0731b706..93f03434a 100644 --- a/jolt-core/src/jolt/vm/read_write_memory.rs +++ b/jolt-core/src/jolt/vm/read_write_memory.rs @@ -1040,6 +1040,7 @@ where #[tracing::instrument(skip_all, name = "MemoryReadWriteOpenings::prove_openings")] fn prove_openings( + generators: &C::Setup, polynomials: &JoltPolynomials, opening_point: &[F], openings: &Self, @@ -1067,6 +1068,7 @@ where .chain(openings.t_write_ram_opening.into_iter()) .collect::>(); C::batch_prove( + generators, &read_write_polys, opening_point, &read_write_openings, @@ -1160,12 +1162,14 @@ where #[tracing::instrument(skip_all, name = "MemoryInitFinalOpenings::prove_openings")] fn prove_openings( + generators: &C::Setup, polynomials: &JoltPolynomials, opening_point: &[F], openings: &Self, transcript: &mut ProofTranscript, ) -> Self::Proof { let v_t_opening_proof = C::batch_prove( + generators, &[ &polynomials.read_write_memory.v_final, &polynomials.read_write_memory.t_final, @@ -1500,6 +1504,7 @@ where C: CommitmentScheme, { fn prove_outputs( + generators: &C::Setup, polynomials: &ReadWriteMemory, program_io: &JoltDevice, transcript: &mut ProofTranscript, @@ -1565,7 +1570,7 @@ where transcript, ); - let sumcheck_opening_proof = C::prove(&polynomials.v_final, &r_sumcheck, transcript); + let sumcheck_opening_proof = C::prove(&generators, &polynomials.v_final, &r_sumcheck, transcript); Self { num_rounds, @@ -1684,21 +1689,24 @@ where { #[tracing::instrument(skip_all, name = "ReadWriteMemoryProof::prove")] pub fn prove( + generators: &C::Setup, preprocessing: &ReadWriteMemoryPreprocessing, polynomials: &JoltPolynomials, program_io: &JoltDevice, transcript: &mut ProofTranscript, ) -> Self { let memory_checking_proof = - ReadWriteMemoryProof::prove_memory_checking(preprocessing, polynomials, transcript); + ReadWriteMemoryProof::prove_memory_checking(generators, preprocessing, polynomials, transcript); let output_proof = OutputSumcheckProof::prove_outputs( + generators, &polynomials.read_write_memory, program_io, transcript, ); let timestamp_validity_proof = TimestampValidityProof::prove( + generators, &polynomials.timestamp_range_check, &polynomials.read_write_memory.t_read, transcript, diff --git a/jolt-core/src/jolt/vm/rv32i_vm.rs b/jolt-core/src/jolt/vm/rv32i_vm.rs index 93130a43f..18fc1d714 100644 --- a/jolt-core/src/jolt/vm/rv32i_vm.rs +++ b/jolt-core/src/jolt/vm/rv32i_vm.rs @@ -156,14 +156,16 @@ pub type RV32IJoltProof = JoltProof #[cfg(test)] mod tests { - use ark_bn254::{Fr, G1Projective}; + use ark_bn254::{Bn254, Fr, G1Projective}; use std::collections::HashSet; use crate::host; use crate::jolt::instruction::JoltInstruction; use crate::jolt::vm::rv32i_vm::{Jolt, RV32IJoltVM, C, M}; + use crate::poly::commitment::commitment_scheme::CommitmentScheme; use crate::poly::commitment::hyrax::HyraxScheme; + use crate::poly::commitment::zeromorph::Zeromorph; use std::sync::Mutex; use strum::{EnumCount, IntoEnumIterator}; @@ -173,15 +175,14 @@ mod tests { static ref SHA3_FILE_LOCK: Mutex<()> = Mutex::new(()); } - #[test] - fn instruction_set_subtables() { + fn test_instruction_set_subtables() { let mut subtable_set: HashSet<_> = HashSet::new(); for instruction in - , C, M>>::InstructionSet::iter() + >::InstructionSet::iter() { for (subtable, _) in instruction.subtables::(C, M) { // panics if subtable cannot be cast to enum variant - let _ = , C, M>>::Subtables::from( + let _ = >::Subtables::from( subtable.subtable_id(), ); subtable_set.insert(subtable.subtable_id()); @@ -189,13 +190,19 @@ mod tests { } assert_eq!( subtable_set.len(), - , C, M>>::Subtables::COUNT, + >::Subtables::COUNT, "Unused enum variants in Subtables" ); } #[test] - fn fib_e2e() { + fn instruction_set_subtables() { + test_instruction_set_subtables::>(); + test_instruction_set_subtables::>(); + } + + #[test] + fn fib_e2e_hyrax() { let _guard = FIB_FILE_LOCK.lock().unwrap(); let mut program = host::Program::new("fibonacci-guest"); @@ -221,7 +228,36 @@ mod tests { } #[test] - fn sha3_e2e() { + fn fib_e2e_zeromorph() { + let _guard = FIB_FILE_LOCK.lock().unwrap(); + + let mut program = host::Program::new("fibonacci-guest"); + program.set_input(&9u32); + let (bytecode, memory_init) = program.decode(); + let (io_device, bytecode_trace, instruction_trace, memory_trace, circuit_flags) = + program.trace(); + + let preprocessing = + RV32IJoltVM::preprocess(bytecode.clone(), memory_init, 1 << 20, 1 << 20, 1 << 20); + let (proof, commitments) = + , C, M>>::prove( + io_device, + bytecode_trace, + memory_trace, + instruction_trace, + circuit_flags, + preprocessing.clone(), + ); + let verification_result = RV32IJoltVM::verify(preprocessing, proof, commitments); + assert!( + verification_result.is_ok(), + "Verification failed with error: {:?}", + verification_result.err() + ); + } + + #[test] + fn sha3_e2e_hyrax() { let _guard = SHA3_FILE_LOCK.lock().unwrap(); let mut program = host::Program::new("sha3-guest"); @@ -246,4 +282,34 @@ mod tests { verification_result.err() ); } + + #[test] + fn sha3_e2e_zeromorph() { + let _guard = SHA3_FILE_LOCK.lock().unwrap(); + + let mut program = host::Program::new("sha3-guest"); + program.set_input(&[5u8; 32]); + let (bytecode, memory_init) = program.decode(); + let (io_device, bytecode_trace, instruction_trace, memory_trace, circuit_flags) = + program.trace(); + + let preprocessing = + RV32IJoltVM::preprocess(bytecode.clone(), memory_init, 1 << 20, 1 << 20, 1 << 20); + let (jolt_proof, jolt_commitments) = + , C, M>>::prove( + io_device, + bytecode_trace, + memory_trace, + instruction_trace, + circuit_flags, + preprocessing.clone(), + ); + + let verification_result = RV32IJoltVM::verify(preprocessing, jolt_proof, jolt_commitments); + assert!( + verification_result.is_ok(), + "Verification failed with error: {:?}", + verification_result.err() + ); + } } diff --git a/jolt-core/src/jolt/vm/timestamp_range_check.rs b/jolt-core/src/jolt/vm/timestamp_range_check.rs index a30079386..0ef2b9d4b 100644 --- a/jolt-core/src/jolt/vm/timestamp_range_check.rs +++ b/jolt-core/src/jolt/vm/timestamp_range_check.rs @@ -229,6 +229,7 @@ where } fn prove_openings( + _generators: &C::Setup, _polynomials: &RangeCheckPolynomials, _opening_point: &[F], _openings: &RangeCheckOpenings, @@ -266,6 +267,7 @@ where type InitFinalOpenings = RangeCheckOpenings; fn prove_memory_checking( + generators: &C::Setup, _: &NoPreprocessing, _polynomials: &RangeCheckPolynomials, _transcript: &mut ProofTranscript, @@ -611,6 +613,7 @@ where { #[tracing::instrument(skip_all, name = "TimestampValidityProof::prove")] pub fn prove( + generators: &C::Setup, range_check_polys: &RangeCheckPolynomials, t_read_polynomials: &[DensePolynomial; MEMORY_OPS_PER_INSTRUCTION], transcript: &mut ProofTranscript, @@ -635,6 +638,7 @@ where .collect::>(); let opening_proof = C::batch_prove( + generators, &polys, &r_grand_product, &openings, diff --git a/jolt-core/src/lasso/memory_checking.rs b/jolt-core/src/lasso/memory_checking.rs index 0bc0a6808..483321fc3 100644 --- a/jolt-core/src/lasso/memory_checking.rs +++ b/jolt-core/src/lasso/memory_checking.rs @@ -101,6 +101,7 @@ where #[tracing::instrument(skip_all, name = "MemoryCheckingProver::prove_memory_checking")] /// Generates a memory checking proof for the given committed polynomials. fn prove_memory_checking( + generators: &C::Setup, preprocessing: &Self::Preprocessing, polynomials: &Polynomials, transcript: &mut ProofTranscript, @@ -116,6 +117,7 @@ where let read_write_openings = Self::ReadWriteOpenings::open(polynomials, &r_read_write); let read_write_opening_proof = Self::ReadWriteOpenings::prove_openings( + generators, polynomials, &r_read_write, &read_write_openings, @@ -123,6 +125,7 @@ where ); let init_final_openings = Self::InitFinalOpenings::open(polynomials, &r_init_final); let init_final_opening_proof = Self::InitFinalOpenings::prove_openings( + generators, polynomials, &r_init_final, &init_final_openings, diff --git a/jolt-core/src/lasso/surge.rs b/jolt-core/src/lasso/surge.rs index ec6d3c866..ff8d71d64 100644 --- a/jolt-core/src/lasso/surge.rs +++ b/jolt-core/src/lasso/surge.rs @@ -89,12 +89,14 @@ where #[tracing::instrument(skip_all, name = "PrimarySumcheckOpenings::prove_openings")] fn prove_openings( + generators: &PCS::Setup, polynomials: &SurgePolys, opening_point: &[F], E_poly_openings: &Vec, transcript: &mut ProofTranscript, ) -> Self::Proof { PCS::batch_prove( + generators, &polynomials.E_polys.iter().collect::>(), opening_point, E_poly_openings, @@ -152,6 +154,7 @@ where #[tracing::instrument(skip_all, name = "SurgeReadWriteOpenings::prove_openings")] fn prove_openings( + generators: &PCS::Setup, polynomials: &SurgePolys, opening_point: &[F], openings: &Self, @@ -171,6 +174,7 @@ where .concat(); PCS::batch_prove( + generators, &read_write_polys, opening_point, &read_write_openings, @@ -248,12 +252,14 @@ where #[tracing::instrument(skip_all, name = "SurgeFinalOpenings::prove_openings")] fn prove_openings( + generators: &PCS::Setup, polynomials: &SurgePolys, opening_point: &[F], openings: &Self, transcript: &mut ProofTranscript, ) -> Self::Proof { PCS::batch_prove( + generators, &polynomials.final_cts.iter().collect::>(), opening_point, &openings.final_openings, @@ -597,6 +603,7 @@ where let sumcheck_openings = PrimarySumcheckOpenings::open(&polynomials, &r_z); // TODO: use return value from prove_arbitrary? let sumcheck_opening_proof = PrimarySumcheckOpenings::prove_openings( + generators, &polynomials, &r_z, &sumcheck_openings, @@ -612,7 +619,7 @@ where }; let memory_checking = - SurgeProof::prove_memory_checking(preprocessing, &polynomials, transcript); + SurgeProof::prove_memory_checking(generators, preprocessing, &polynomials, transcript); SurgeProof { commitment, diff --git a/jolt-core/src/poly/commitment/commitment_scheme.rs b/jolt-core/src/poly/commitment/commitment_scheme.rs index 7cd623b28..a4bb5da63 100644 --- a/jolt-core/src/poly/commitment/commitment_scheme.rs +++ b/jolt-core/src/poly/commitment/commitment_scheme.rs @@ -63,11 +63,13 @@ pub trait CommitmentScheme: Clone + Sync + Send + 'static { Self::batch_commit(&slices, setup, batch_type) } fn prove( + setup: &Self::Setup, poly: &DensePolynomial, opening_point: &[Self::Field], // point at which the polynomial is evaluated transcript: &mut ProofTranscript, ) -> Self::Proof; fn batch_prove( + setup: &Self::Setup, polynomials: &[&DensePolynomial], opening_point: &[Self::Field], openings: &[Self::Field], diff --git a/jolt-core/src/poly/commitment/hyrax.rs b/jolt-core/src/poly/commitment/hyrax.rs index 8106bf4c7..6de77f7e4 100644 --- a/jolt-core/src/poly/commitment/hyrax.rs +++ b/jolt-core/src/poly/commitment/hyrax.rs @@ -82,6 +82,7 @@ impl> CommitmentScheme for HyraxSch HyraxCommitment::commit_slice(eval_slice, generators) } fn prove( + _setup: &Self::Setup, poly: &DensePolynomial, opening_point: &[Self::Field], transcript: &mut ProofTranscript, @@ -90,6 +91,7 @@ impl> CommitmentScheme for HyraxSch HyraxOpeningProof::prove(poly, opening_point, 1, transcript) } fn batch_prove( + _setup: &Self::Setup, polynomials: &[&DensePolynomial], opening_point: &[Self::Field], openings: &[Self::Field], diff --git a/jolt-core/src/poly/commitment/zeromorph.rs b/jolt-core/src/poly/commitment/zeromorph.rs index c0fca7f35..c9323d00b 100644 --- a/jolt-core/src/poly/commitment/zeromorph.rs +++ b/jolt-core/src/poly/commitment/zeromorph.rs @@ -240,15 +240,13 @@ pub struct ZeromorphVerifierKey { pub tau_N_max_sub_2_N: P::G2Affine, } -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, CanonicalSerialize, CanonicalDeserialize)] pub struct ZeromorphCommitment(P::G1Affine); impl AppendToTranscript for ZeromorphCommitment

-where - Self: CurveGroup, { fn append_to_transcript(&self, _label: &'static [u8], transcript: &mut ProofTranscript) { - transcript.append_point(b"poly_commitment_share", self); + transcript.append_point(b"poly_commitment_share", &self.0.into_group()); } } @@ -284,8 +282,8 @@ where let quotient_iter = quotient.iter_mut(); quotient_iter - .zip_eq(&*remainder_lo) - .zip_eq(&*remainder_hi) + .zip(&*remainder_lo) + .zip(&*remainder_hi) .for_each(|((q, r_lo), r_hi)| { *q = *r_hi - *r_lo; }); @@ -296,7 +294,7 @@ where #[cfg(not(feature = "multicore"))] let remainder_lo_iter = remainder_lo.iter_mut(); remainder_lo_iter - .zip_eq(remainder_hi) + .zip(remainder_hi) .for_each(|(r_lo, r_hi)| { *r_lo += (*r_hi - r_lo as &_) * x_i; }); @@ -390,11 +388,11 @@ where let q_scalars = iter::successors(Some(P::ScalarField::one()), |acc| Some(*acc * y_challenge)) .take(num_vars) - .zip_eq(offsets_of_x) + .zip(offsets_of_x) .zip(squares_of_x) .zip(&vs) - .zip_eq(&vs[1..]) - .zip_eq(challenges.iter().rev()) + .zip(&vs[1..]) + .zip(challenges.iter().rev()) .map( |(((((power_of_y, offset_of_x), square_of_x), v_i), v_j), u_i)| { ( @@ -422,9 +420,6 @@ where b"Zeromorph" } - //IDEAS; - // - extra sampling from transcript??? -> no adding randomness has to due with information leakage not verification - pub fn commit( pp: &ZeromorphProverKey

, poly: &DensePolynomial, @@ -532,7 +527,7 @@ where // evals[0..m] fn batch_open( pk: &ZeromorphProverKey

, - polynomials: &[DensePolynomial], + polynomials: &[&DensePolynomial], point: &[P::ScalarField], evals: &[P::ScalarField], transcript: &mut ProofTranscript, @@ -561,17 +556,17 @@ where fn batch_verify( vk: &ZeromorphVerifierKey

, - commitments: &[ZeromorphCommitment

], + commitments: &[&ZeromorphCommitment

], point: &[P::ScalarField], evals: &[P::ScalarField], batch_proof: &ZeromorphProof

, transcript: &mut ProofTranscript, - ) -> Result { + ) -> Result<(), ProofVerifyError> { // Compute batching of unshifted polynomials f_i: // Compute powers of batching challenge rho let rho: P::ScalarField = transcript.challenge_scalar(b"rho"); let mut scalar = P::ScalarField::one(); - let (batched_eval, batched_commitment) = evals.iter().zip_eq(commitments.iter()).fold( + let (batched_eval, batched_commitment) = evals.iter().zip(commitments.iter()).fold( (P::ScalarField::zero(), P::G1::zero()), |(mut batched_evaluation, mut batched_commitment), (opening, commitment)| { batched_evaluation += scalar * opening; @@ -598,7 +593,7 @@ where eval: &P::ScalarField, proof: &ZeromorphProof

, transcript: &mut ProofTranscript, - ) -> Result { + ) -> Result<(), ProofVerifyError> { transcript.append_protocol_name(Self::protocol_name()); // Receive commitments [q_k] @@ -630,7 +625,7 @@ where ) = eval_and_quotient_scalars::

(y_challenge, x_challenge, z_challenge, point); q_scalars .iter_mut() - .zip_eq(zmpoly_q_scalars) + .zip(zmpoly_q_scalars) .for_each(|(scalar, zm_poly_q_scalar)| { *scalar += zm_poly_q_scalar; }); @@ -656,20 +651,17 @@ where (vk.kzg_vk.beta_g2.into_group() - (vk.kzg_vk.g2 * x_challenge)).into(), ], ); - Ok(pairing.is_zero()) + if pairing.is_zero() { + Ok(()) + } else { + Err(ProofVerifyError::InternalError) + } } } -//TODO: have setup be a dummy if lazy_static isn't in use -> just pass in lazy static and select needed keys from for portions it is needed. -//TODO: find commitments in interface -//TODO: test batched commitments outside interface first thing tomorrow. -//TODO: implement multilinear poly * scalar - -/* impl CommitmentScheme for Zeromorph

where

::ScalarField: poly::field::JoltField, - ZeromorphCommitment

: CurveGroup, { type Field = P::ScalarField; type Setup = (ZeromorphProverKey

, ZeromorphVerifierKey

); @@ -678,11 +670,17 @@ where type BatchedProof = ZeromorphProof

; fn setup(_shapes: &[CommitShape]) -> Self::Setup { - todo!() + ZeromorphSRS(Arc::new(SRS::setup( + &mut ChaCha20Rng::from_seed(*b"ZEROMORPH_POLY_COMMITMENT_SCHEME"), + 65536 + 1 + ))).trim(65536) } fn commit(poly: &DensePolynomial, setup: &Self::Setup) -> Self::Commitment { - assert!(setup.0.commit_pp.g1_powers().len() < poly.Z.len()); + assert!( + setup.0.commit_pp.g1_powers().len() > poly.Z.len(), + "COMMIT KEY LENGTH ERROR {}, {}", setup.0.commit_pp.g1_powers().len(), poly.Z.len() + ); ZeromorphCommitment( UVKZGPCS::commit(&setup.0.commit_pp, &UniPoly::from_coeff(poly.Z.clone())).unwrap(), ) @@ -701,8 +699,8 @@ where iter.enumerate() .map(|(i, evals)| { assert!( - gens.0.commit_pp.g1_powers().len() < evals.len(), - "COMMIT KEY LENGTH ERROR" + gens.0.commit_pp.g1_powers().len() > evals.len(), + "COMMIT KEY LENGTH ERROR {}, {}", gens.0.commit_pp.g1_powers().len(), evals.len() ); ZeromorphCommitment( UVKZGPCS::commit(&gens.0.commit_pp, &UniPoly::from_coeff(evals.to_vec())) @@ -722,13 +720,12 @@ where setup: &Self::Setup, poly: &DensePolynomial, opening_point: &[Self::Field], // point at which the polynomial is evaluated - commitment: &Self::Commitment, - eval: &Self::Field, transcript: &mut ProofTranscript, ) -> Self::Proof { + //TODO: setup + let eval = poly.evaluate(&opening_point); Zeromorph::

::open( &setup.0, - &commitment, &poly, &opening_point, &eval, @@ -738,39 +735,21 @@ where } fn batch_prove( + setup: &Self::Setup, polynomials: &[&DensePolynomial], opening_point: &[Self::Field], openings: &[Self::Field], batch_type: BatchType, transcript: &mut ProofTranscript, ) -> Self::BatchedProof { - // Generate batching challenge \rho and powers 1,...,\rho^{m-1} - let rho: P::ScalarField = transcript.challenge_scalar(b"rho"); - let mut scalar = P::ScalarField::one(); - // Compute batching of unshifted polynomials f_i: - let mut scalar = P::ScalarField::one(); - let (f_batched, batched_evaluation) = (0..polynomials.len()).fold( - ( - DensePolynomial::new(vec![P::ScalarField::zero(); n]), - P::ScalarField::zero(), - ), - |(mut f_batched, mut batched_evaluation), i| { - f_batched += polynomials[i].clone() * scalar; - batched_evaluation += scalar * openings[i]; - scalar *= rho; - (f_batched, batched_evaluation) - }, - ); - let mut pi_poly = UniPoly::from_coeff(f_batched.Z.clone()); - Zeromorph::

::open( + //TODO: setup + Zeromorph::

::batch_open( &setup.0, - &commitment, - &pi_poly, + polynomials, &opening_point, - &batched_evaluation, + &openings, transcript, ) - .unwrap() } fn verify( @@ -799,25 +778,11 @@ where commitments: &[&Self::Commitment], transcript: &mut ProofTranscript, ) -> Result<(), ProofVerifyError> { - // Compute batching of unshifted polynomials f_i: - // Compute powers of batching challenge rho - let rho: P::ScalarField = transcript.challenge_scalar(b"rho"); - let mut scalar = P::ScalarField::one(); - let (batched_opening, batched_commitment) = - openings.iter().zip_eq(commitments.iter()).fold( - (P::ScalarField::zero(), P::G1::zero()), - |(mut batched_evaluation, mut batched_commitment), (opening, commitment)| { - batched_evaluation += scalar * opening; - batched_commitment += commitment.0 * scalar; - scalar *= rho; - (batched_evaluation, batched_commitment) - }, - ); - Zeromorph::

::verify( + Zeromorph::

::batch_verify( &setup.1, - &ZeromorphCommitment(batched_commitment.into_affine()), - &opening_point, - &batched_opening, + commitments, + opening_point, + openings, &batch_proof, transcript, ) @@ -827,7 +792,6 @@ where b"zeromorph" } } -*/ #[cfg(test)] mod test { @@ -851,62 +815,6 @@ mod test { }) } - /* - #[test] - fn prove_verify_batched() { - let max_vars = 16; - let mut rng = test_rng(); - let num_polys = 8; - let srs = ZEROMORPH_SRS.lock().unwrap(); - - for num_vars in 3..max_vars { - // Setup - let (pk, vk) = { - let poly_size = 1 << (num_vars + 1); - srs.trim(poly_size - 1).unwrap() - }; - let polys: Vec> = (0..num_polys) - .map(|_| { - DensePolynomial::new( - (0..(1 << num_vars)) - .map(|_| Fr::rand(&mut rng)) - .collect::>(), - ) - }) - .collect::>(); - let challenges = (0..num_vars) - .into_iter() - .map(|_| Fr::rand(&mut rng)) - .collect::>(); - let evals = polys - .clone() - .into_iter() - .map(|poly| poly.evaluate(&challenges)) - .collect::>(); - - // Commit and open - let commitments = Zeromorph::::commit(&polys, &pk.g1_powers).unwrap(); - - let mut prover_transcript = Transcript::new(b"example"); - let proof = - Zeromorph::::prove(&polys, &evals, &challenges, &pk, &mut prover_transcript).unwrap(); - - let mut verifier_transcript = Transcript::new(b"example"); - Zeromorph::::verify( - &commitments, - &evals, - &challenges, - &vk, - &mut verifier_transcript, - proof, - ) - .unwrap(); - - //TODO: check both random oracles are synced - } - } - */ - /// Test for computing qk given multilinear f /// Given ๐‘“(๐‘‹โ‚€, โ€ฆ, ๐‘‹โ‚™โ‚‹โ‚), and `(๐‘ข, ๐‘ฃ)` such that \f(\u) = \v, compute `qโ‚–(๐‘‹โ‚€, โ€ฆ, ๐‘‹โ‚–โ‚‹โ‚)` /// such that the following identity holds: @@ -1234,15 +1142,14 @@ mod test { .collect::>(); let altered_verifier_eval = poly.evaluate(&altered_verifier_point); let mut verifier_transcript = ProofTranscript::new(b"TestEval"); - assert!(!Zeromorph::::verify( + assert!(Zeromorph::::verify( &vk, &commitment, &altered_verifier_point, &altered_verifier_eval, &proof, &mut verifier_transcript, - ) - .unwrap()); + ).is_err()) } } @@ -1252,9 +1159,9 @@ mod test { for num_vars in [4, 5, 6] { let mut rng = rand_chacha::ChaCha20Rng::seed_from_u64(num_vars as u64); - let polys: Vec> = (0..num_polys) + let polys: Vec<_> = (0..num_polys) .map(|_| DensePolynomial::random(num_vars, &mut rng)) - .collect(); + .collect::>(); let point: Vec = (0..num_vars).map(|_| Fr::rand(&mut rng)).collect(); let evals: Vec = polys.iter().map(|poly| poly.evaluate(&point)).collect(); @@ -1265,10 +1172,13 @@ mod test { .map(|poly| Zeromorph::::commit(&pk, &poly).unwrap()) .collect(); + let commitments_refs: Vec<_> = commitments.iter().map(|x| x).collect(); + let polys_refs: Vec<_> = polys.iter().map(|x| x).collect(); + let mut prover_transcript = ProofTranscript::new(b"TestEval"); let proof = Zeromorph::::batch_open( &pk, - &polys, + &polys_refs, &point, &evals, &mut prover_transcript, @@ -1280,7 +1190,7 @@ mod test { let mut verifier_transcript = ProofTranscript::new(b"TestEval"); Zeromorph::::batch_verify( &vk, - &commitments, + &commitments_refs, &point, &evals, &proof, @@ -1302,14 +1212,14 @@ mod test { .map(|poly| poly.evaluate(&altered_verifier_point)) .collect(); let mut verifier_transcript = ProofTranscript::new(b"TestEval"); - assert!(!Zeromorph::::batch_verify( + assert!(Zeromorph::::batch_verify( &vk, - &commitments, + &commitments_refs, &altered_verifier_point, &altered_verifier_evals, &proof, &mut verifier_transcript, - ).unwrap()); + ).is_err()) } } } diff --git a/jolt-core/src/poly/structured_poly.rs b/jolt-core/src/poly/structured_poly.rs index 94179c431..626200499 100644 --- a/jolt-core/src/poly/structured_poly.rs +++ b/jolt-core/src/poly/structured_poly.rs @@ -38,6 +38,7 @@ where /// Proves that the `polynomials`, evaluated at `opening_point`, output the values given /// by `openings`. The polynomials should already be committed by the prover. fn prove_openings( + generators: &C::Setup, polynomials: &Polynomials, opening_point: &[F], openings: &Self, diff --git a/jolt-core/src/r1cs/snark.rs b/jolt-core/src/r1cs/snark.rs index 64ba00440..1690f8a36 100644 --- a/jolt-core/src/r1cs/snark.rs +++ b/jolt-core/src/r1cs/snark.rs @@ -399,12 +399,13 @@ impl> R1CSProof { #[tracing::instrument(skip_all, name = "R1CSProof::prove")] pub fn prove( + generators: &C::Setup, key: UniformSpartanKey, witness_segments: Vec>, transcript: &mut ProofTranscript, ) -> Result { // TODO(sragss): Fiat shamir (relevant) commitments - let proof = UniformSpartanProof::prove_precommitted(&key, witness_segments, transcript)?; + let proof = UniformSpartanProof::prove_precommitted(generators, &key, witness_segments, transcript)?; Ok(R1CSProof:: { proof, key }) } diff --git a/jolt-core/src/r1cs/spartan.rs b/jolt-core/src/r1cs/spartan.rs index 222f4765c..4dad60bb8 100644 --- a/jolt-core/src/r1cs/spartan.rs +++ b/jolt-core/src/r1cs/spartan.rs @@ -222,6 +222,7 @@ impl> UniformSpartanProof { /// produces a succinct proof of satisfiability of a `RelaxedR1CS` instance #[tracing::instrument(skip_all, name = "UniformSpartanProof::prove_precommitted")] pub fn prove_precommitted( + generators: &C::Setup, key: &UniformSpartanKey, witness_segments: Vec>, transcript: &mut ProofTranscript, @@ -447,6 +448,7 @@ impl> UniformSpartanProof { let witness_segment_polys_ref: Vec<&DensePolynomial> = witness_segment_polys.iter().collect(); let opening_proof = C::batch_prove( + generators, &witness_segment_polys_ref, r_y_point, &witness_evals, From e9d5530039ff953b719a089616cd71577a4745bb Mon Sep 17 00:00:00 2001 From: PatStiles Date: Thu, 23 May 2024 00:04:15 -0300 Subject: [PATCH 07/17] add e2e pcs benches + nit --- jolt-core/src/benches/bench.rs | 84 ++++++++++++++----- .../src/jolt/vm/timestamp_range_check.rs | 2 +- jolt-core/src/main.rs | 7 +- 3 files changed, 68 insertions(+), 25 deletions(-) diff --git a/jolt-core/src/benches/bench.rs b/jolt-core/src/benches/bench.rs index 9a76fc5ab..494916604 100644 --- a/jolt-core/src/benches/bench.rs +++ b/jolt-core/src/benches/bench.rs @@ -1,10 +1,19 @@ use crate::host; use crate::jolt::vm::rv32i_vm::{RV32IJoltVM, C, M}; use crate::jolt::vm::Jolt; +use crate::poly::commitment::commitment_scheme::CommitmentScheme; use crate::poly::commitment::hyrax::HyraxScheme; -use ark_bn254::G1Projective; +use crate::poly::commitment::zeromorph::Zeromorph; +use crate::poly::field::JoltField; +use ark_bn254::{Bn254, Fr, G1Projective}; use serde::Serialize; +#[derive(Debug, Copy, Clone, clap::ValueEnum)] +pub enum PCSType { + Hyrax, + Zeromorph +} + #[derive(Debug, Copy, Clone, clap::ValueEnum)] pub enum BenchType { Fibonacci, @@ -15,30 +24,53 @@ pub enum BenchType { #[allow(unreachable_patterns)] // good errors on new BenchTypes pub fn benchmarks( + pcs_type: PCSType, bench_type: BenchType, _num_cycles: Option, _memory_size: Option, _bytecode_size: Option, ) -> Vec<(tracing::Span, Box)> { - match bench_type { - BenchType::Sha2 => sha2(), - BenchType::Sha3 => sha3(), - BenchType::Sha2Chain => sha2chain(), - BenchType::Fibonacci => fibonacci(), - _ => panic!("BenchType does not have a mapping"), + match pcs_type { + PCSType::Hyrax => match bench_type { + BenchType::Sha2 => sha2::>(), + BenchType::Sha3 => sha3::>(), + BenchType::Sha2Chain => sha2chain::>(), + BenchType::Fibonacci => fibonacci::>(), + _ => panic!("BenchType does not have a mapping"), + }, + PCSType::Zeromorph => match bench_type { + BenchType::Sha2 => sha2::>(), + BenchType::Sha3 => sha3::>(), + BenchType::Sha2Chain => sha2chain::>(), + BenchType::Fibonacci => fibonacci::>(), + _ => panic!("BenchType does not have a mapping"), + }, + _ => panic!("PCS Type does not have a mapping"), } } -fn fibonacci() -> Vec<(tracing::Span, Box)> { - prove_example("fibonacci-guest", &9u32) +fn fibonacci() -> Vec<(tracing::Span, Box)> +where + F: JoltField, + PCS: CommitmentScheme, +{ + prove_example::("fibonacci-guest", &9u32) } -fn sha2() -> Vec<(tracing::Span, Box)> { - prove_example("sha2-guest", &vec![5u8; 2048]) +fn sha2() -> Vec<(tracing::Span, Box)> +where + F: JoltField, + PCS: CommitmentScheme, +{ + prove_example::, PCS, F>("sha2-guest", &vec![5u8; 2048]) } -fn sha3() -> Vec<(tracing::Span, Box)> { - prove_example("sha3-guest", &vec![5u8; 2048]) +fn sha3() -> Vec<(tracing::Span, Box)> +where + F: JoltField, + PCS: CommitmentScheme, +{ + prove_example::, PCS, F>("sha3-guest", &vec![5u8; 2048]) } #[allow(dead_code)] @@ -52,10 +84,14 @@ fn serialize_and_print_size(name: &str, item: &impl ark_serialize::CanonicalSeri println!("{:<30} : {:.3} MB", name, file_size_mb); } -fn prove_example( +fn prove_example( example_name: &str, input: &T, -) -> Vec<(tracing::Span, Box)> { +) -> Vec<(tracing::Span, Box)> +where + F: JoltField, + PCS: CommitmentScheme, +{ let mut tasks = Vec::new(); let mut program = host::Program::new(example_name); program.set_input(input); @@ -65,12 +101,12 @@ fn prove_example( let (io_device, trace, circuit_flags) = program.trace(); let preprocessing: crate::jolt::vm::JoltPreprocessing< - ark_ff::Fp, 4>, - HyraxScheme>, + F, + PCS, > = RV32IJoltVM::preprocess(bytecode.clone(), memory_init, 1 << 20, 1 << 20, 1 << 22); let (jolt_proof, jolt_commitments) = - , C, M>>::prove( + >::prove( io_device, trace, circuit_flags, @@ -101,7 +137,11 @@ fn prove_example( tasks } -fn sha2chain() -> Vec<(tracing::Span, Box)> { +fn sha2chain() -> Vec<(tracing::Span, Box)> +where + F: JoltField, + PCS: CommitmentScheme, +{ let mut tasks = Vec::new(); let mut program = host::Program::new("sha2-chain-guest"); program.set_input(&[5u8; 32]); @@ -112,12 +152,12 @@ fn sha2chain() -> Vec<(tracing::Span, Box)> { let (io_device, trace, circuit_flags) = program.trace(); let preprocessing: crate::jolt::vm::JoltPreprocessing< - ark_ff::Fp, 4>, - HyraxScheme>, + F, + PCS, > = RV32IJoltVM::preprocess(bytecode.clone(), memory_init, 1 << 20, 1 << 20, 1 << 22); let (jolt_proof, jolt_commitments) = - , C, M>>::prove( + >::prove( io_device, trace, circuit_flags, diff --git a/jolt-core/src/jolt/vm/timestamp_range_check.rs b/jolt-core/src/jolt/vm/timestamp_range_check.rs index 0ef2b9d4b..611343280 100644 --- a/jolt-core/src/jolt/vm/timestamp_range_check.rs +++ b/jolt-core/src/jolt/vm/timestamp_range_check.rs @@ -267,7 +267,7 @@ where type InitFinalOpenings = RangeCheckOpenings; fn prove_memory_checking( - generators: &C::Setup, + _generators: &C::Setup, _: &NoPreprocessing, _polynomials: &RangeCheckPolynomials, _transcript: &mut ProofTranscript, diff --git a/jolt-core/src/main.rs b/jolt-core/src/main.rs index 798948d79..b4317a820 100644 --- a/jolt-core/src/main.rs +++ b/jolt-core/src/main.rs @@ -1,7 +1,7 @@ use clap::{Args, Parser, Subcommand, ValueEnum}; use jolt_core::benches::{ - bench::{benchmarks, BenchType}, + bench::{benchmarks, BenchType, PCSType}, sum_timer::CumulativeTimingLayer, }; @@ -30,6 +30,9 @@ struct TraceArgs { #[clap(short, long, value_enum)] format: Option>, + #[clap(long, value_enum)] + pcs: PCSType, + /// Type of benchmark to run #[clap(long, value_enum)] name: BenchType, @@ -113,7 +116,7 @@ fn trace(args: TraceArgs) { } tracing_subscriber::registry().with(layers).init(); - for (span, bench) in benchmarks(args.name, args.num_cycles, None, None).into_iter() { + for (span, bench) in benchmarks(args.pcs, args.name, args.num_cycles, None, None).into_iter() { span.to_owned().in_scope(|| { bench(); tracing::info!("Bench Complete"); From f14e9aa1d67a666888d97550f3b1755099b25768 Mon Sep 17 00:00:00 2001 From: PatStiles Date: Thu, 23 May 2024 01:25:04 -0300 Subject: [PATCH 08/17] split zm and kzg rm old kzg --- jolt-core/src/poly/commitment/kzg.rs | 290 ++++++++++---------- jolt-core/src/poly/commitment/mod.rs | 1 + jolt-core/src/poly/commitment/zeromorph.rs | 291 ++------------------- 3 files changed, 164 insertions(+), 418 deletions(-) diff --git a/jolt-core/src/poly/commitment/kzg.rs b/jolt-core/src/poly/commitment/kzg.rs index 6045d10bb..5c7e9e4f6 100644 --- a/jolt-core/src/poly/commitment/kzg.rs +++ b/jolt-core/src/poly/commitment/kzg.rs @@ -1,59 +1,33 @@ -use ark_ec::scalar_mul::fixed_base::FixedBase; -use std::{borrow::Borrow, marker::PhantomData}; - use crate::msm::VariableBaseMSM; -use crate::poly; -use crate::poly::unipoly::UniPoly; -use ark_ec::{pairing::Pairing, AffineRepr, CurveGroup}; +use crate::poly::{unipoly::UniPoly, field::JoltField}; +use crate::utils::errors::ProofVerifyError; +use ark_ec::scalar_mul::fixed_base::FixedBase; +use ark_ec::{pairing::Pairing, CurveGroup}; use ark_ff::PrimeField; -use ark_std::One; use ark_std::UniformRand; -use rand_chacha::rand_core::RngCore; -use thiserror::Error; - -#[derive(Error, Debug)] -pub enum KZGError { - #[error("Length Error: SRS Length: {0}, Key Length: {0}")] - KeyLengthError(usize, usize), - #[error("Length Error: Commitment Key Length: {0}, Polynomial Degree {0}")] - CommitLengthError(usize, usize), - #[error("Failed to compute quotient polynomial due to polynomial division")] - PolyDivisionError, -} +use rand_core::{CryptoRng, RngCore}; +use std::marker::PhantomData; +use std::sync::Arc; -#[derive(Debug, Clone, Default)] -pub struct UniversalKzgSrs { - pub g1_powers: Vec, - pub g2_powers: Vec, -} +#[cfg(feature = "ark-msm")] +use ark_ec::VariableBaseMSM; #[derive(Clone, Debug)] -pub struct KZGProverKey { - /// generators +pub struct SRS { pub g1_powers: Vec, + pub g2_powers: Vec, } -#[derive(Clone, Debug)] -pub struct KZGCommitment(P::G1Affine); -pub struct KZGVerifierKey { - /// The generator of G1. - pub g1: P::G1Affine, - /// The generator of G2. - pub g2: P::G2Affine, - /// tau times the above generator of G2. - pub tau_2: P::G2Affine, -} - -impl UniversalKzgSrs

{ - pub fn setup(max_degree: usize, rng: &mut R) -> UniversalKzgSrs

{ - let tau = P::ScalarField::rand(rng); - let g1 = P::G1::rand(rng); - let g2 = P::G2::rand(rng); +impl SRS

{ + pub fn setup(mut rng: &mut R, max_degree: usize) -> Self { + let beta = P::ScalarField::rand(&mut rng); + let g1 = P::G1::rand(&mut rng); + let g2 = P::G2::rand(&mut rng); - let tau_powers: Vec<_> = (0..=max_degree) - .scan(tau, |state, _| { - let val = *state; - *state *= τ + let beta_powers: Vec = (0..=max_degree) + .scan(beta, |acc, _| { + let val = *acc; + *acc *= beta; Some(val) }) .collect(); @@ -61,174 +35,186 @@ impl UniversalKzgSrs

{ let window_size = FixedBase::get_mul_window_size(max_degree); let scalar_bits = P::ScalarField::MODULUS_BIT_SIZE as usize; - //TODO: gate with rayon - let g1_table = FixedBase::get_window_table(scalar_bits, window_size, g1); - let g2_table = FixedBase::get_window_table(scalar_bits, window_size, g2); - let g1_powers_projective = FixedBase::msm(scalar_bits, window_size, &g1_table, &tau_powers); - let g2_powers_projective = FixedBase::msm(scalar_bits, window_size, &g2_table, &tau_powers); - let g1_powers = P::G1::normalize_batch(&g1_powers_projective); - let g2_powers = P::G2::normalize_batch(&g2_powers_projective); + let (g1_powers_projective, g2_powers_projective) = rayon::join( + || { + let g1_table = FixedBase::get_window_table(scalar_bits, window_size, g1); + FixedBase::msm(scalar_bits, window_size, &g1_table, &beta_powers) + }, + || { + let g2_table = FixedBase::get_window_table(scalar_bits, window_size, g2); + FixedBase::msm(scalar_bits, window_size, &g2_table, &beta_powers) + }, + ); - UniversalKzgSrs { + let (g1_powers, g2_powers) = rayon::join( + || P::G1::normalize_batch(&g1_powers_projective), + || P::G2::normalize_batch(&g2_powers_projective), + ); + + Self { g1_powers, g2_powers, } } - pub fn get_prover_key(&self, key_size: usize) -> Result, KZGError> { - if self.g1_powers.len() < key_size { - return Err(KZGError::KeyLengthError(self.g1_powers.len(), key_size)); - } - Ok(self.g1_powers[..=key_size].to_vec()) + pub fn trim(params: Arc, max_degree: usize) -> (KZGProverKey

, KZGVerifierKey

) { + assert!(params.g1_powers.len() > 0, "max_degree is 0"); + assert!(max_degree < params.g1_powers.len(), "SRS length is less than size"); + let g1 = params.g1_powers[0]; + let g2 = params.g2_powers[0]; + let beta_g2 = params.g2_powers[1]; + let pk = KZGProverKey::new(params, 0, max_degree + 1); + let vk = KZGVerifierKey { g1, g2, beta_g2 }; + (pk, vk) } +} - pub fn get_verifier_key(&self, key_size: usize) -> Result, KZGError> { - if self.g1_powers.len() < key_size { - return Err(KZGError::KeyLengthError(self.g1_powers.len(), key_size)); +#[derive(Clone, Debug)] +pub struct KZGProverKey { + srs: Arc>, + // offset to read into SRS + offset: usize, + // max size of srs + supported_size: usize, +} + +impl KZGProverKey

{ + pub fn new(srs: Arc>, offset: usize, supported_size: usize) -> Self { + assert!( + srs.g1_powers.len() >= offset + supported_size, + "not enough powers (req: {} from offset {}) in the SRS (length: {})", + supported_size, + offset, + srs.g1_powers.len() + ); + Self { + srs, + offset, + supported_size, } - Ok(KZGVerifierKey { - g1: self.g1_powers[0], - g2: self.g2_powers[0], - tau_2: self.g2_powers[1], - }) } - pub fn trim(&self, key_size: usize) -> Result<(Vec, KZGVerifierKey

), KZGError> { - if self.g1_powers.len() < key_size { - return Err(KZGError::KeyLengthError(self.g1_powers.len(), key_size)); - } - let g1_powers = self.g1_powers[..=key_size].to_vec(); - - let pk = g1_powers; - let vk = KZGVerifierKey { - g1: self.g1_powers[0], - g2: self.g2_powers[0], - tau_2: self.g2_powers[1], - }; - Ok((pk, vk)) + pub fn g1_powers(&self) -> &[P::G1Affine] { + &self.srs.g1_powers[self.offset..self.offset + self.supported_size] } } -pub struct UnivariateKZG

{ - phantom: PhantomData

, +#[derive(Clone, Copy, Debug)] +pub struct KZGVerifierKey { + pub g1: P::G1Affine, + pub g2: P::G2Affine, + pub beta_g2: P::G2Affine, } -impl UnivariateKZG

+#[derive(Debug, Clone, Eq, PartialEq, Default)] +pub struct UVKZGPCS { + _phantom: PhantomData

, +} + +impl UVKZGPCS

where -

::ScalarField: poly::field::JoltField, +

::ScalarField: JoltField, { pub fn commit_offset( - g1_powers: &Vec, + pk: &KZGProverKey

, poly: &UniPoly, offset: usize, - ) -> Result { - if poly.degree() > g1_powers.len() { - return Err(KZGError::CommitLengthError(poly.degree(), g1_powers.len())); + ) -> Result { + if poly.degree() > pk.g1_powers().len() { + return Err(ProofVerifyError::KeyLengthError( + poly.degree(), + pk.g1_powers().len(), + )); } - let scalars = &poly.as_vec(); - let bases = g1_powers.as_slice(); - - let com = ::msm( + let scalars = poly.as_vec(); + let bases = pk.g1_powers(); + let c = ::msm( &bases[offset..scalars.len()], &poly.as_vec()[offset..], ) .unwrap(); - Ok(com.into_affine()) + Ok(c.into_affine()) } pub fn commit( - g1_powers: &Vec, + pk: &KZGProverKey

, poly: &UniPoly, - ) -> Result, KZGError> { - if poly.degree() > g1_powers.len() { - return Err(KZGError::CommitLengthError(poly.degree(), g1_powers.len())); + ) -> Result { + if poly.degree() > pk.g1_powers().len() { + return Err(ProofVerifyError::KeyLengthError( + poly.degree(), + pk.g1_powers().len(), + )); } - let com = ::msm( - &g1_powers.as_slice()[..poly.as_vec().len()], - &poly.as_vec(), + let c = ::msm( + &pk.g1_powers()[..poly.as_vec().len()], + &poly.as_vec().as_slice(), ) .unwrap(); - Ok(KZGCommitment(com.into_affine())) + Ok(c.into_affine()) } pub fn open( - g1_powers: impl Borrow>, - polynomial: &UniPoly, + pk: &KZGProverKey

, + poly: &UniPoly, point: &P::ScalarField, - ) -> Result<(P::G1Affine, P::ScalarField), KZGError> { - let g1_powers = g1_powers.borrow(); + ) -> Result<(P::G1Affine, P::ScalarField), ProofVerifyError> + where +

::ScalarField: JoltField, + { let divisor = UniPoly::from_coeff(vec![-*point, P::ScalarField::one()]); - let witness_polynomial = polynomial - .divide_with_q_and_r(&divisor) - .map(|(q, _r)| q) - .ok_or(KZGError::PolyDivisionError)?; + let (witness_poly, _) = poly.divide_with_q_and_r(&divisor).unwrap(); let proof = ::msm( - &g1_powers.as_slice()[..witness_polynomial.as_vec().len()], - &witness_polynomial.as_vec(), + &pk.g1_powers()[..witness_poly.as_vec().len()], + &witness_poly.as_vec().as_slice(), ) .unwrap(); - let evaluation = polynomial.evaluate(point); - + let evaluation = poly.evaluate(point); Ok((proof.into_affine(), evaluation)) } +} - fn _verify( - vk: impl Borrow>, - commitment: &KZGCommitment

, +#[cfg(test)] +mod test { + use super::*; + use ark_bn254::{Bn254, Fr}; + use ark_ec::AffineRepr; + use ark_std::{rand::Rng, UniformRand}; + use rand_chacha::ChaCha20Rng; + use rand_core::SeedableRng; + + fn kzg_verify( + vk: &KZGVerifierKey

, + commitment: &P::G1Affine, point: &P::ScalarField, proof: &P::G1Affine, evaluation: &P::ScalarField, - ) -> Result { - let vk = vk.borrow(); - + ) -> Result { let lhs = P::pairing( - commitment.0.into_group() - vk.g1.into_group() * evaluation, + commitment.into_group() - vk.g1.into_group() * evaluation, vk.g2, ); - let rhs = P::pairing(proof, vk.tau_2.into_group() - (vk.g2 * point)); + let rhs = P::pairing(proof, vk.beta_g2.into_group() - (vk.g2 * point)); Ok(lhs == rhs) } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::poly::unipoly::UniPoly; - use ark_bn254::{Bn254, Fr}; - - use ark_std::{ - rand::{Rng, SeedableRng}, - UniformRand, - }; - use rand_chacha::{rand_core::RngCore, ChaCha20Rng}; - - fn random(degree: usize, mut rng: &mut R) -> UniPoly - where -

::ScalarField: poly::field::JoltField, - { - let coeffs = (0..=degree) - .map(|_| P::ScalarField::rand(&mut rng)) - .collect::>(); - UniPoly::from_coeff(coeffs) - } #[test] - fn commit_prove_verify() -> Result<(), KZGError> { + fn kzg_commit_prove_verify() -> Result<(), ProofVerifyError> { let seed = b"11111111111111111111111111111111"; for _ in 0..100 { let mut rng = &mut ChaCha20Rng::from_seed(*seed); let degree = rng.gen_range(2..20); - let pp = UniversalKzgSrs::::setup(degree, &mut rng); - let (ck, vk) = pp.trim(degree).unwrap(); - let p = random::(degree, rng); - let comm = UnivariateKZG::::commit(&ck, &p)?; + let pp = Arc::new(SRS::::setup(&mut rng, degree)); + let (ck, vk) = SRS::trim(pp, degree); + let p = UniPoly::random::(degree, rng); + let comm = UVKZGPCS::::commit(&ck, &p)?; let point = Fr::rand(rng); - let (proof, value) = UnivariateKZG::::open(&ck, &p, &point)?; + let (proof, value) = UVKZGPCS::::open(&ck, &p, &point)?; assert!( - UnivariateKZG::::_verify(&vk, &comm, &point, &proof, &value)?, + kzg_verify(&vk, &comm, &point, &proof, &value)?, "proof was incorrect for max_degree = {}, polynomial_degree = {}", degree, p.degree(), @@ -236,4 +222,4 @@ mod tests { } Ok(()) } -} +} \ No newline at end of file diff --git a/jolt-core/src/poly/commitment/mod.rs b/jolt-core/src/poly/commitment/mod.rs index eed9b953e..c53ec21a6 100644 --- a/jolt-core/src/poly/commitment/mod.rs +++ b/jolt-core/src/poly/commitment/mod.rs @@ -2,3 +2,4 @@ pub mod commitment_scheme; pub mod hyrax; pub mod pedersen; pub mod zeromorph; +pub mod kzg; \ No newline at end of file diff --git a/jolt-core/src/poly/commitment/zeromorph.rs b/jolt-core/src/poly/commitment/zeromorph.rs index c9323d00b..bcad4d2dd 100644 --- a/jolt-core/src/poly/commitment/zeromorph.rs +++ b/jolt-core/src/poly/commitment/zeromorph.rs @@ -4,18 +4,12 @@ use std::{iter, marker::PhantomData}; use crate::msm::VariableBaseMSM; -use crate::poly::unipoly::UniPoly; -use crate::poly::{self, dense_mlpoly::DensePolynomial}; -use crate::utils::errors::ProofVerifyError; -use crate::utils::transcript::{AppendToTranscript, ProofTranscript}; -use ark_bn254::Bn254; -use ark_ec::scalar_mul::fixed_base::FixedBase; +use crate::poly::{self, unipoly::UniPoly, dense_mlpoly::DensePolynomial}; +use crate::utils::{errors::ProofVerifyError, transcript::{AppendToTranscript, ProofTranscript}}; use ark_ec::{pairing::Pairing, AffineRepr, CurveGroup}; -use ark_ff::{batch_inversion, Field, PrimeField}; +use ark_ff::{batch_inversion, Field}; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; -use ark_std::{One, UniformRand, Zero}; -use itertools::Itertools; -use lazy_static::lazy_static; +use ark_std::{One, Zero}; use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use rand_core::{CryptoRng, RngCore}; use std::sync::Arc; @@ -25,184 +19,7 @@ use ark_ec::VariableBaseMSM; use rayon::prelude::*; -use super::commitment_scheme::{BatchType, CommitShape, CommitmentScheme}; - -//use super::commitment_scheme::{ BatchType, CommitShape, CommitmentScheme}; - -#[derive(Clone, Debug)] -pub struct SRS { - pub g1_powers: Vec, - pub g2_powers: Vec, -} - -impl SRS

{ - pub fn setup(mut rng: &mut R, max_degree: usize) -> Self { - let beta = P::ScalarField::rand(&mut rng); - let g1 = P::G1::rand(&mut rng); - let g2 = P::G2::rand(&mut rng); - - let beta_powers: Vec = (0..=max_degree) - .scan(beta, |acc, _| { - let val = *acc; - *acc *= beta; - Some(val) - }) - .collect(); - - let window_size = FixedBase::get_mul_window_size(max_degree); - let scalar_bits = P::ScalarField::MODULUS_BIT_SIZE as usize; - - //TODO: gate with rayon - let (g1_powers_projective, g2_powers_projective) = rayon::join( - || { - let g1_table = FixedBase::get_window_table(scalar_bits, window_size, g1); - FixedBase::msm(scalar_bits, window_size, &g1_table, &beta_powers) - }, - || { - let g2_table = FixedBase::get_window_table(scalar_bits, window_size, g2); - FixedBase::msm(scalar_bits, window_size, &g2_table, &beta_powers) - }, - ); - - let (g1_powers, g2_powers) = rayon::join( - || P::G1::normalize_batch(&g1_powers_projective), - || P::G2::normalize_batch(&g2_powers_projective), - ); - - Self { - g1_powers, - g2_powers, - } - } - - pub fn trim(params: Arc, supported_size: usize) -> (KZGProverKey

, KZGVerifierKey

) { - assert!(params.g1_powers.len() > 0, "max_degree is 0"); - let g1 = params.g1_powers[0]; - let g2 = params.g2_powers[0]; - let beta_g2 = params.g2_powers[1]; - let pk = KZGProverKey::new(params, 0, supported_size + 1); - let vk = KZGVerifierKey { g1, g2, beta_g2 }; - (pk, vk) - } -} - -// Abstraction around SRS preventing copying. Arc of SRS -#[derive(Clone, Debug)] -pub struct KZGProverKey { - srs: Arc>, - // offset to read into SRS - offset: usize, - // max size of srs - supported_size: usize, -} - -impl KZGProverKey

{ - pub fn new(srs: Arc>, offset: usize, supported_size: usize) -> Self { - assert!( - srs.g1_powers.len() >= offset + supported_size, - "not enough powers (req: {} from offset {}) in the SRS (length: {})", - supported_size, - offset, - srs.g1_powers.len() - ); - Self { - srs, - offset, - supported_size, - } - } - - pub fn g1_powers(&self) -> &[P::G1Affine] { - &self.srs.g1_powers[self.offset..self.offset + self.supported_size] - } -} - -// Abstraction around SRS preventing copying. Arc of SRS -#[derive(Clone, Copy, Debug)] -pub struct KZGVerifierKey { - pub g1: P::G1Affine, - pub g2: P::G2Affine, - pub beta_g2: P::G2Affine, -} - -#[derive(Debug, Clone, Eq, PartialEq, Default)] -pub struct UVKZGPCS { - _phantom: PhantomData

, -} - -impl UVKZGPCS

-where -

::ScalarField: poly::field::JoltField, -{ - fn commit_offset( - pk: &KZGProverKey

, - poly: &UniPoly, - offset: usize, - ) -> Result { - if poly.degree() > pk.g1_powers().len() { - return Err(ProofVerifyError::KeyLengthError( - poly.degree(), - pk.g1_powers().len(), - )); - } - - let scalars = poly.as_vec(); - let bases = pk.g1_powers(); - let c = ::msm( - &bases[offset..scalars.len()], - &poly.as_vec()[offset..], - ) - .unwrap(); - - Ok(c.into_affine()) - } - - pub fn commit( - pk: &KZGProverKey

, - poly: &UniPoly, - ) -> Result { - if poly.degree() > pk.g1_powers().len() { - return Err(ProofVerifyError::KeyLengthError( - poly.degree(), - pk.g1_powers().len(), - )); - } - let c = ::msm( - &pk.g1_powers()[..poly.as_vec().len()], - &poly.as_vec().as_slice(), - ) - .unwrap(); - Ok(c.into_affine()) - } - - fn open( - pk: &KZGProverKey

, - poly: &UniPoly, - point: &P::ScalarField, - ) -> Result<(P::G1Affine, P::ScalarField), ProofVerifyError> - where -

::ScalarField: poly::field::JoltField, - { - let divisor = UniPoly::from_coeff(vec![-*point, P::ScalarField::one()]); - let (witness_poly, _) = poly.divide_with_q_and_r(&divisor).unwrap(); - let proof = ::msm( - &pk.g1_powers()[..witness_poly.as_vec().len()], - &witness_poly.as_vec().as_slice(), - ) - .unwrap(); - let evaluation = poly.evaluate(point); - Ok((proof.into_affine(), evaluation)) - } -} - -const MAX_VARS: usize = 17; - -lazy_static! { - pub static ref ZEROMORPH_SRS: ZeromorphSRS = ZeromorphSRS(Arc::new(SRS::setup( - &mut ChaCha20Rng::from_seed(*b"ZEROMORPH_POLY_COMMITMENT_SCHEME"), - 1 << (MAX_VARS + 1) - ))); -} +use super::{commitment_scheme::{BatchType, CommitShape, CommitmentScheme}, kzg::{SRS, UVKZGPCS, KZGProverKey, KZGVerifierKey}}; pub struct ZeromorphSRS(Arc>); @@ -212,7 +29,6 @@ impl ZeromorphSRS

{ } pub fn trim(self, max_degree: usize) -> (ZeromorphProverKey

, ZeromorphVerifierKey

) { - //TODO: remove into() let (commit_pp, kzg_vk) = SRS::trim(self.0.clone(), max_degree); let offset = self.0.g1_powers.len() - max_degree; let tau_N_max_sub_2_N = self.0.g2_powers[offset]; @@ -435,7 +251,6 @@ where )) } - //TODO: change interface to create commitment for poly??? pub fn open( pp: &ZeromorphProverKey

, poly: &DensePolynomial, @@ -453,7 +268,6 @@ where )); } - //assert_eq!(Self::commit(pp, poly).unwrap(), *comm); assert_eq!(poly.evaluate(point), *eval); let (quotients, remainder): (Vec>, P::ScalarField) = @@ -462,16 +276,15 @@ where assert_eq!(remainder, *eval); // Compute the multilinear quotients q_k = q_k(X_0, ..., X_{k-1}) + // TODO: multicore gate let q_k_com: Vec = quotients .par_iter() .map(|q| UVKZGPCS::commit(&pp.commit_pp, q).unwrap()) .collect(); let q_comms: Vec = q_k_com - .clone() - .into_iter() + .par_iter() .map(|c| c.into_group()) .collect(); - //transcript.append_points(b"q_comms", &q_comms); q_comms .iter() .for_each(|c| transcript.append_point(b"quo", c)); @@ -521,10 +334,6 @@ where }) } - //Batch together polynomials -> Then commit - // polys[0..m] - // commitments[0..m] - // evals[0..m] fn batch_open( pk: &ZeromorphProverKey

, polynomials: &[&DensePolynomial], @@ -534,6 +343,8 @@ where ) -> ZeromorphProof

{ let num_vars = point.len(); let n = 1 << num_vars; + + //TODO(pat): produce powers in parallel // Generate batching challenge \rho and powers 1,...,\rho^{m-1} let rho: P::ScalarField = transcript.challenge_scalar(b"rho"); // Compute batching of unshifted polynomials f_i, and batched eval v_i: @@ -550,8 +361,8 @@ where (f_batched, batched_evaluation) }, ); - let pi_poly = DensePolynomial::new(f_batched.Z.clone()); - Zeromorph::

::open(&pk, &pi_poly, &point, &batched_evaluation, transcript).unwrap() + let poly = DensePolynomial::new(f_batched.Z.clone()); + Zeromorph::

::open(&pk, &poly, &point, &batched_evaluation, transcript).unwrap() } fn batch_verify( @@ -562,6 +373,8 @@ where batch_proof: &ZeromorphProof

, transcript: &mut ProofTranscript, ) -> Result<(), ProofVerifyError> { + + //TODO(pat): produce powers in parallel using window method // Compute batching of unshifted polynomials f_i: // Compute powers of batching challenge rho let rho: P::ScalarField = transcript.challenge_scalar(b"rho"); @@ -585,7 +398,6 @@ where ) } - //Change api pub fn verify( vk: &ZeromorphVerifierKey

, comm: &ZeromorphCommitment

, @@ -596,12 +408,9 @@ where ) -> Result<(), ProofVerifyError> { transcript.append_protocol_name(Self::protocol_name()); - // Receive commitments [q_k] - //TODO: remove clone let q_comms: Vec = proof .q_k_com - .clone() - .into_iter() + .iter() .map(|c| c.into_group()) .collect(); q_comms @@ -636,7 +445,6 @@ where .concat(); let bases = [ vec![proof.q_hat_com, comm.0, vk.kzg_vk.g1], - //TODO: eliminate proof.q_k_com.clone(), ] .concat(); @@ -669,11 +477,13 @@ where type Proof = ZeromorphProof

; type BatchedProof = ZeromorphProof

; - fn setup(_shapes: &[CommitShape]) -> Self::Setup { + fn setup(shapes: &[CommitShape]) -> Self::Setup { + let max_len = shapes.iter().map(|shape| shape.input_length).max().unwrap(); + ZeromorphSRS(Arc::new(SRS::setup( &mut ChaCha20Rng::from_seed(*b"ZEROMORPH_POLY_COMMITMENT_SCHEME"), - 65536 + 1 - ))).trim(65536) + max_len + ))).trim(max_len) } fn commit(poly: &DensePolynomial, setup: &Self::Setup) -> Self::Commitment { @@ -696,8 +506,8 @@ where let iter = evals.par_iter(); #[cfg(not(feature = "multicore"))] let iter = evals.iter(); - iter.enumerate() - .map(|(i, evals)| { + iter + .map(|evals| { assert!( gens.0.commit_pp.g1_powers().len() > evals.len(), "COMMIT KEY LENGTH ERROR {}, {}", gens.0.commit_pp.g1_powers().len(), evals.len() @@ -722,7 +532,6 @@ where opening_point: &[Self::Field], // point at which the polynomial is evaluated transcript: &mut ProofTranscript, ) -> Self::Proof { - //TODO: setup let eval = poly.evaluate(&opening_point); Zeromorph::

::open( &setup.0, @@ -739,10 +548,9 @@ where polynomials: &[&DensePolynomial], opening_point: &[Self::Field], openings: &[Self::Field], - batch_type: BatchType, + _batch_type: BatchType, transcript: &mut ProofTranscript, ) -> Self::BatchedProof { - //TODO: setup Zeromorph::

::batch_open( &setup.0, polynomials, @@ -798,9 +606,8 @@ mod test { use super::*; use crate::utils::math::Math; use ark_bn254::{Bn254, Fr}; - use ark_ec::AffineRepr; use ark_ff::{BigInt, Zero}; - use ark_std::{rand::Rng, test_rng, UniformRand}; + use ark_std::{test_rng, UniformRand}; use rand_core::SeedableRng; // Evaluate Phi_k(x) = \sum_{i=0}^k x^i using the direct inefficent formula @@ -1050,54 +857,6 @@ mod test { } } - fn kzg_verify( - vk: &KZGVerifierKey

, - commitment: &P::G1Affine, - point: &P::ScalarField, - proof: &P::G1Affine, - evaluation: &P::ScalarField, - ) -> Result { - let lhs = P::pairing( - commitment.into_group() - vk.g1.into_group() * evaluation, - vk.g2, - ); - let rhs = P::pairing(proof, vk.beta_g2.into_group() - (vk.g2 * point)); - Ok(lhs == rhs) - } - - fn random(degree: usize, mut rng: &mut R) -> UniPoly - where -

::ScalarField: poly::field::JoltField, - { - let coeffs = (0..=degree) - .map(|_| P::ScalarField::rand(&mut rng)) - .collect::>(); - UniPoly::from_coeff(coeffs) - } - - #[test] - fn kzg_commit_prove_verify() -> Result<(), ProofVerifyError> { - let seed = b"11111111111111111111111111111111"; - for _ in 0..100 { - let mut rng = &mut ChaCha20Rng::from_seed(*seed); - let degree = rng.gen_range(2..20); - - let pp = Arc::new(SRS::::setup(&mut rng, degree)); - let (ck, vk) = SRS::trim(pp, degree); - let p = random::(degree, rng); - let comm = UVKZGPCS::::commit(&ck, &p)?; - let point = Fr::rand(rng); - let (proof, value) = UVKZGPCS::::open(&ck, &p, &point)?; - assert!( - kzg_verify(&vk, &comm, &point, &proof, &value)?, - "proof was incorrect for max_degree = {}, polynomial_degree = {}", - degree, - p.degree(), - ); - } - Ok(()) - } - #[test] fn zeromorph_commit_prove_verify() { for num_vars in [4, 5, 6] { @@ -1167,11 +926,11 @@ mod test { let srs = ZeromorphSRS::::setup(&mut rng, 1 << num_vars); let (pk, vk) = srs.trim(1 << num_vars); - let commitments: Vec> = polys + let commitments: Vec<_> = polys .iter() .map(|poly| Zeromorph::::commit(&pk, &poly).unwrap()) .collect(); - + let commitments_refs: Vec<_> = commitments.iter().map(|x| x).collect(); let polys_refs: Vec<_> = polys.iter().map(|x| x).collect(); From 34d58e0b76f26ac54a58e8f7a8356af4348f82a6 Mon Sep 17 00:00:00 2001 From: PatStiles Date: Thu, 23 May 2024 01:46:45 -0300 Subject: [PATCH 09/17] fmt --- jolt-core/src/benches/bench.rs | 24 +++--- jolt-core/src/jolt/vm/bytecode.rs | 14 ++- jolt-core/src/jolt/vm/instruction_lookups.rs | 3 +- jolt-core/src/jolt/vm/mod.rs | 9 +- jolt-core/src/jolt/vm/read_write_memory.rs | 11 ++- jolt-core/src/jolt/vm/rv32i_vm.rs | 26 +++--- jolt-core/src/poly/commitment/kzg.rs | 9 +- jolt-core/src/poly/commitment/mod.rs | 2 +- jolt-core/src/poly/commitment/zeromorph.rs | 89 +++++++++----------- jolt-core/src/poly/unipoly.rs | 11 --- jolt-core/src/r1cs/snark.rs | 7 +- 11 files changed, 102 insertions(+), 103 deletions(-) diff --git a/jolt-core/src/benches/bench.rs b/jolt-core/src/benches/bench.rs index 494916604..2b2a8bc33 100644 --- a/jolt-core/src/benches/bench.rs +++ b/jolt-core/src/benches/bench.rs @@ -11,7 +11,7 @@ use serde::Serialize; #[derive(Debug, Copy, Clone, clap::ValueEnum)] pub enum PCSType { Hyrax, - Zeromorph + Zeromorph, } #[derive(Debug, Copy, Clone, clap::ValueEnum)] @@ -49,7 +49,7 @@ pub fn benchmarks( } } -fn fibonacci() -> Vec<(tracing::Span, Box)> +fn fibonacci() -> Vec<(tracing::Span, Box)> where F: JoltField, PCS: CommitmentScheme, @@ -57,7 +57,7 @@ where prove_example::("fibonacci-guest", &9u32) } -fn sha2() -> Vec<(tracing::Span, Box)> +fn sha2() -> Vec<(tracing::Span, Box)> where F: JoltField, PCS: CommitmentScheme, @@ -65,7 +65,7 @@ where prove_example::, PCS, F>("sha2-guest", &vec![5u8; 2048]) } -fn sha3() -> Vec<(tracing::Span, Box)> +fn sha3() -> Vec<(tracing::Span, Box)> where F: JoltField, PCS: CommitmentScheme, @@ -87,7 +87,7 @@ fn serialize_and_print_size(name: &str, item: &impl ark_serialize::CanonicalSeri fn prove_example( example_name: &str, input: &T, -) -> Vec<(tracing::Span, Box)> +) -> Vec<(tracing::Span, Box)> where F: JoltField, PCS: CommitmentScheme, @@ -100,10 +100,8 @@ where let (bytecode, memory_init) = program.decode(); let (io_device, trace, circuit_flags) = program.trace(); - let preprocessing: crate::jolt::vm::JoltPreprocessing< - F, - PCS, - > = RV32IJoltVM::preprocess(bytecode.clone(), memory_init, 1 << 20, 1 << 20, 1 << 22); + let preprocessing: crate::jolt::vm::JoltPreprocessing = + RV32IJoltVM::preprocess(bytecode.clone(), memory_init, 1 << 20, 1 << 20, 1 << 22); let (jolt_proof, jolt_commitments) = >::prove( @@ -137,7 +135,7 @@ where tasks } -fn sha2chain() -> Vec<(tracing::Span, Box)> +fn sha2chain() -> Vec<(tracing::Span, Box)> where F: JoltField, PCS: CommitmentScheme, @@ -151,10 +149,8 @@ where let (bytecode, memory_init) = program.decode(); let (io_device, trace, circuit_flags) = program.trace(); - let preprocessing: crate::jolt::vm::JoltPreprocessing< - F, - PCS, - > = RV32IJoltVM::preprocess(bytecode.clone(), memory_init, 1 << 20, 1 << 20, 1 << 22); + let preprocessing: crate::jolt::vm::JoltPreprocessing = + RV32IJoltVM::preprocess(bytecode.clone(), memory_init, 1 << 20, 1 << 20, 1 << 22); let (jolt_proof, jolt_commitments) = >::prove( diff --git a/jolt-core/src/jolt/vm/bytecode.rs b/jolt-core/src/jolt/vm/bytecode.rs index d761aa17b..b986626a5 100644 --- a/jolt-core/src/jolt/vm/bytecode.rs +++ b/jolt-core/src/jolt/vm/bytecode.rs @@ -862,7 +862,12 @@ mod tests { let generators = HyraxScheme::::setup(&commitment_shapes); let commitments = polys.commit(&generators); - let proof = BytecodeProof::prove_memory_checking(&generators, &preprocessing, &polys, &mut transcript); + let proof = BytecodeProof::prove_memory_checking( + &generators, + &preprocessing, + &polys, + &mut transcript, + ); let mut transcript = ProofTranscript::new(b"test_transcript"); BytecodeProof::verify_memory_checking( @@ -924,7 +929,12 @@ mod tests { let mut transcript = ProofTranscript::new(b"test_transcript"); - let proof = BytecodeProof::prove_memory_checking(&generators, &preprocessing, &polys, &mut transcript); + let proof = BytecodeProof::prove_memory_checking( + &generators, + &preprocessing, + &polys, + &mut transcript, + ); let mut transcript = ProofTranscript::new(b"test_transcript"); BytecodeProof::verify_memory_checking( diff --git a/jolt-core/src/jolt/vm/instruction_lookups.rs b/jolt-core/src/jolt/vm/instruction_lookups.rs index 500800c1d..e5829cb05 100644 --- a/jolt-core/src/jolt/vm/instruction_lookups.rs +++ b/jolt-core/src/jolt/vm/instruction_lookups.rs @@ -856,7 +856,8 @@ where opening_proof: sumcheck_opening_proof, }; - let memory_checking = Self::prove_memory_checking(generators, preprocessing, polynomials, transcript); + let memory_checking = + Self::prove_memory_checking(generators, preprocessing, polynomials, transcript); InstructionLookupsProof { _instructions: PhantomData, diff --git a/jolt-core/src/jolt/vm/mod.rs b/jolt-core/src/jolt/vm/mod.rs index 5fd228116..590cc9cc7 100644 --- a/jolt-core/src/jolt/vm/mod.rs +++ b/jolt-core/src/jolt/vm/mod.rs @@ -399,8 +399,13 @@ pub trait Jolt, const C: usize, c drop_in_background_thread(jolt_polynomials); - let r1cs_proof = - R1CSProof::prove(&preprocessing.generators, spartan_key, witness_segments, &mut transcript).expect("proof failed"); + let r1cs_proof = R1CSProof::prove( + &preprocessing.generators, + spartan_key, + witness_segments, + &mut transcript, + ) + .expect("proof failed"); let jolt_proof = JoltProof { trace_length, diff --git a/jolt-core/src/jolt/vm/read_write_memory.rs b/jolt-core/src/jolt/vm/read_write_memory.rs index 93f03434a..c39cd0044 100644 --- a/jolt-core/src/jolt/vm/read_write_memory.rs +++ b/jolt-core/src/jolt/vm/read_write_memory.rs @@ -1570,7 +1570,8 @@ where transcript, ); - let sumcheck_opening_proof = C::prove(&generators, &polynomials.v_final, &r_sumcheck, transcript); + let sumcheck_opening_proof = + C::prove(&generators, &polynomials.v_final, &r_sumcheck, transcript); Self { num_rounds, @@ -1695,8 +1696,12 @@ where program_io: &JoltDevice, transcript: &mut ProofTranscript, ) -> Self { - let memory_checking_proof = - ReadWriteMemoryProof::prove_memory_checking(generators, preprocessing, polynomials, transcript); + let memory_checking_proof = ReadWriteMemoryProof::prove_memory_checking( + generators, + preprocessing, + polynomials, + transcript, + ); let output_proof = OutputSumcheckProof::prove_outputs( generators, diff --git a/jolt-core/src/jolt/vm/rv32i_vm.rs b/jolt-core/src/jolt/vm/rv32i_vm.rs index 18fc1d714..db19be260 100644 --- a/jolt-core/src/jolt/vm/rv32i_vm.rs +++ b/jolt-core/src/jolt/vm/rv32i_vm.rs @@ -177,14 +177,11 @@ mod tests { fn test_instruction_set_subtables() { let mut subtable_set: HashSet<_> = HashSet::new(); - for instruction in - >::InstructionSet::iter() - { + for instruction in >::InstructionSet::iter() { for (subtable, _) in instruction.subtables::(C, M) { // panics if subtable cannot be cast to enum variant - let _ = >::Subtables::from( - subtable.subtable_id(), - ); + let _ = + >::Subtables::from(subtable.subtable_id()); subtable_set.insert(subtable.subtable_id()); } } @@ -239,15 +236,14 @@ mod tests { let preprocessing = RV32IJoltVM::preprocess(bytecode.clone(), memory_init, 1 << 20, 1 << 20, 1 << 20); - let (proof, commitments) = - , C, M>>::prove( - io_device, - bytecode_trace, - memory_trace, - instruction_trace, - circuit_flags, - preprocessing.clone(), - ); + let (proof, commitments) = , C, M>>::prove( + io_device, + bytecode_trace, + memory_trace, + instruction_trace, + circuit_flags, + preprocessing.clone(), + ); let verification_result = RV32IJoltVM::verify(preprocessing, proof, commitments); assert!( verification_result.is_ok(), diff --git a/jolt-core/src/poly/commitment/kzg.rs b/jolt-core/src/poly/commitment/kzg.rs index 5c7e9e4f6..380922e62 100644 --- a/jolt-core/src/poly/commitment/kzg.rs +++ b/jolt-core/src/poly/commitment/kzg.rs @@ -1,5 +1,5 @@ use crate::msm::VariableBaseMSM; -use crate::poly::{unipoly::UniPoly, field::JoltField}; +use crate::poly::{field::JoltField, unipoly::UniPoly}; use crate::utils::errors::ProofVerifyError; use ark_ec::scalar_mul::fixed_base::FixedBase; use ark_ec::{pairing::Pairing, CurveGroup}; @@ -59,7 +59,10 @@ impl SRS

{ pub fn trim(params: Arc, max_degree: usize) -> (KZGProverKey

, KZGVerifierKey

) { assert!(params.g1_powers.len() > 0, "max_degree is 0"); - assert!(max_degree < params.g1_powers.len(), "SRS length is less than size"); + assert!( + max_degree < params.g1_powers.len(), + "SRS length is less than size" + ); let g1 = params.g1_powers[0]; let g2 = params.g2_powers[0]; let beta_g2 = params.g2_powers[1]; @@ -222,4 +225,4 @@ mod test { } Ok(()) } -} \ No newline at end of file +} diff --git a/jolt-core/src/poly/commitment/mod.rs b/jolt-core/src/poly/commitment/mod.rs index c53ec21a6..61a62504e 100644 --- a/jolt-core/src/poly/commitment/mod.rs +++ b/jolt-core/src/poly/commitment/mod.rs @@ -1,5 +1,5 @@ pub mod commitment_scheme; pub mod hyrax; +pub mod kzg; pub mod pedersen; pub mod zeromorph; -pub mod kzg; \ No newline at end of file diff --git a/jolt-core/src/poly/commitment/zeromorph.rs b/jolt-core/src/poly/commitment/zeromorph.rs index bcad4d2dd..4b2f69147 100644 --- a/jolt-core/src/poly/commitment/zeromorph.rs +++ b/jolt-core/src/poly/commitment/zeromorph.rs @@ -4,8 +4,11 @@ use std::{iter, marker::PhantomData}; use crate::msm::VariableBaseMSM; -use crate::poly::{self, unipoly::UniPoly, dense_mlpoly::DensePolynomial}; -use crate::utils::{errors::ProofVerifyError, transcript::{AppendToTranscript, ProofTranscript}}; +use crate::poly::{self, dense_mlpoly::DensePolynomial, unipoly::UniPoly}; +use crate::utils::{ + errors::ProofVerifyError, + transcript::{AppendToTranscript, ProofTranscript}, +}; use ark_ec::{pairing::Pairing, AffineRepr, CurveGroup}; use ark_ff::{batch_inversion, Field}; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; @@ -19,7 +22,10 @@ use ark_ec::VariableBaseMSM; use rayon::prelude::*; -use super::{commitment_scheme::{BatchType, CommitShape, CommitmentScheme}, kzg::{SRS, UVKZGPCS, KZGProverKey, KZGVerifierKey}}; +use super::{ + commitment_scheme::{BatchType, CommitShape, CommitmentScheme}, + kzg::{KZGProverKey, KZGVerifierKey, SRS, UVKZGPCS}, +}; pub struct ZeromorphSRS(Arc>); @@ -59,8 +65,7 @@ pub struct ZeromorphVerifierKey { #[derive(Debug, PartialEq, CanonicalSerialize, CanonicalDeserialize)] pub struct ZeromorphCommitment(P::G1Affine); -impl AppendToTranscript for ZeromorphCommitment

-{ +impl AppendToTranscript for ZeromorphCommitment

{ fn append_to_transcript(&self, _label: &'static [u8], transcript: &mut ProofTranscript) { transcript.append_point(b"poly_commitment_share", &self.0.into_group()); } @@ -281,10 +286,7 @@ where .par_iter() .map(|q| UVKZGPCS::commit(&pp.commit_pp, q).unwrap()) .collect(); - let q_comms: Vec = q_k_com - .par_iter() - .map(|c| c.into_group()) - .collect(); + let q_comms: Vec = q_k_com.par_iter().map(|c| c.into_group()).collect(); q_comms .iter() .for_each(|c| transcript.append_point(b"quo", c)); @@ -344,7 +346,7 @@ where let num_vars = point.len(); let n = 1 << num_vars; - //TODO(pat): produce powers in parallel + //TODO(pat): produce powers in parallel // Generate batching challenge \rho and powers 1,...,\rho^{m-1} let rho: P::ScalarField = transcript.challenge_scalar(b"rho"); // Compute batching of unshifted polynomials f_i, and batched eval v_i: @@ -373,7 +375,6 @@ where batch_proof: &ZeromorphProof

, transcript: &mut ProofTranscript, ) -> Result<(), ProofVerifyError> { - //TODO(pat): produce powers in parallel using window method // Compute batching of unshifted polynomials f_i: // Compute powers of batching challenge rho @@ -408,11 +409,7 @@ where ) -> Result<(), ProofVerifyError> { transcript.append_protocol_name(Self::protocol_name()); - let q_comms: Vec = proof - .q_k_com - .iter() - .map(|c| c.into_group()) - .collect(); + let q_comms: Vec = proof.q_k_com.iter().map(|c| c.into_group()).collect(); q_comms .iter() .for_each(|c| transcript.append_point(b"quo", c)); @@ -482,15 +479,18 @@ where ZeromorphSRS(Arc::new(SRS::setup( &mut ChaCha20Rng::from_seed(*b"ZEROMORPH_POLY_COMMITMENT_SCHEME"), - max_len - ))).trim(max_len) + max_len, + ))) + .trim(max_len) } fn commit(poly: &DensePolynomial, setup: &Self::Setup) -> Self::Commitment { assert!( setup.0.commit_pp.g1_powers().len() > poly.Z.len(), - "COMMIT KEY LENGTH ERROR {}, {}", setup.0.commit_pp.g1_powers().len(), poly.Z.len() - ); + "COMMIT KEY LENGTH ERROR {}, {}", + setup.0.commit_pp.g1_powers().len(), + poly.Z.len() + ); ZeromorphCommitment( UVKZGPCS::commit(&setup.0.commit_pp, &UniPoly::from_coeff(poly.Z.clone())).unwrap(), ) @@ -506,18 +506,18 @@ where let iter = evals.par_iter(); #[cfg(not(feature = "multicore"))] let iter = evals.iter(); - iter - .map(|evals| { - assert!( - gens.0.commit_pp.g1_powers().len() > evals.len(), - "COMMIT KEY LENGTH ERROR {}, {}", gens.0.commit_pp.g1_powers().len(), evals.len() - ); - ZeromorphCommitment( - UVKZGPCS::commit(&gens.0.commit_pp, &UniPoly::from_coeff(evals.to_vec())) - .unwrap(), - ) - }) - .collect::>() + iter.map(|evals| { + assert!( + gens.0.commit_pp.g1_powers().len() > evals.len(), + "COMMIT KEY LENGTH ERROR {}, {}", + gens.0.commit_pp.g1_powers().len(), + evals.len() + ); + ZeromorphCommitment( + UVKZGPCS::commit(&gens.0.commit_pp, &UniPoly::from_coeff(evals.to_vec())).unwrap(), + ) + }) + .collect::>() } fn commit_slice(evals: &[Self::Field], setup: &Self::Setup) -> Self::Commitment { @@ -533,14 +533,7 @@ where transcript: &mut ProofTranscript, ) -> Self::Proof { let eval = poly.evaluate(&opening_point); - Zeromorph::

::open( - &setup.0, - &poly, - &opening_point, - &eval, - transcript, - ) - .unwrap() + Zeromorph::

::open(&setup.0, &poly, &opening_point, &eval, transcript).unwrap() } fn batch_prove( @@ -551,13 +544,7 @@ where _batch_type: BatchType, transcript: &mut ProofTranscript, ) -> Self::BatchedProof { - Zeromorph::

::batch_open( - &setup.0, - polynomials, - &opening_point, - &openings, - transcript, - ) + Zeromorph::

::batch_open(&setup.0, polynomials, &opening_point, &openings, transcript) } fn verify( @@ -908,7 +895,8 @@ mod test { &altered_verifier_eval, &proof, &mut verifier_transcript, - ).is_err()) + ) + .is_err()) } } @@ -930,7 +918,7 @@ mod test { .iter() .map(|poly| Zeromorph::::commit(&pk, &poly).unwrap()) .collect(); - + let commitments_refs: Vec<_> = commitments.iter().map(|x| x).collect(); let polys_refs: Vec<_> = polys.iter().map(|x| x).collect(); @@ -978,7 +966,8 @@ mod test { &altered_verifier_evals, &proof, &mut verifier_transcript, - ).is_err()) + ) + .is_err()) } } } diff --git a/jolt-core/src/poly/unipoly.rs b/jolt-core/src/poly/unipoly.rs index b6ef4194e..d1cc61cdf 100644 --- a/jolt-core/src/poly/unipoly.rs +++ b/jolt-core/src/poly/unipoly.rs @@ -132,17 +132,6 @@ impl UniPoly { } } - pub fn divide_minus_u(&self, u: F) -> Self { - let d = self.coeffs.len(); - - // Compute h(x) = f(x)/(x - u) - let mut h = vec![F::zero(); d]; - for i in (1..d).rev() { - h[i - 1] = self.coeffs[i] + h[i] * u; - } - Self::from_coeff(h) - } - pub fn random(num_vars: usize, mut rng: &mut R) -> Self { Self::from_coeff( std::iter::from_fn(|| Some(F::random(&mut rng))) diff --git a/jolt-core/src/r1cs/snark.rs b/jolt-core/src/r1cs/snark.rs index 1690f8a36..920a6e162 100644 --- a/jolt-core/src/r1cs/snark.rs +++ b/jolt-core/src/r1cs/snark.rs @@ -405,7 +405,12 @@ impl> R1CSProof { transcript: &mut ProofTranscript, ) -> Result { // TODO(sragss): Fiat shamir (relevant) commitments - let proof = UniformSpartanProof::prove_precommitted(generators, &key, witness_segments, transcript)?; + let proof = UniformSpartanProof::prove_precommitted( + generators, + &key, + witness_segments, + transcript, + )?; Ok(R1CSProof:: { proof, key }) } From ff5e564a451a18e7b78c1c8a8d5d97ab5173cf3d Mon Sep 17 00:00:00 2001 From: PatStiles Date: Thu, 23 May 2024 01:53:54 -0300 Subject: [PATCH 10/17] nit --- jolt-core/src/poly/commitment/zeromorph.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/jolt-core/src/poly/commitment/zeromorph.rs b/jolt-core/src/poly/commitment/zeromorph.rs index 4b2f69147..e2f321936 100644 --- a/jolt-core/src/poly/commitment/zeromorph.rs +++ b/jolt-core/src/poly/commitment/zeromorph.rs @@ -445,12 +445,13 @@ where proof.q_k_com.clone(), ] .concat(); - let c = ::msm(&bases, &scalars) + let zeta_z_com = ::msm(&bases, &scalars) .unwrap() .into_affine(); + // e(pi, [tau]_2 - x * [1]_2) == e(C_{\zeta,Z}, -[X^(N_max - 2^n - 1)]_2) <==> e(C_{\zeta,Z} - x * pi, [X^{N_max - 2^n - 1}]_2) * e(-pi, [tau_2]) == 1 let pairing = P::multi_pairing( - &[c, proof.pi], + &[zeta_z_com, proof.pi], &[ (-vk.tau_N_max_sub_2_N.into_group()).into_affine(), (vk.kzg_vk.beta_g2.into_group() - (vk.kzg_vk.g2 * x_challenge)).into(), From f50f0715720acb523af5a3452d786ae73c3fb42a Mon Sep 17 00:00:00 2001 From: PatStiles Date: Thu, 30 May 2024 20:40:47 -0700 Subject: [PATCH 11/17] nits --- jolt-core/src/poly/commitment/kzg.rs | 49 +++++++++++----------- jolt-core/src/poly/commitment/zeromorph.rs | 39 ++++++----------- jolt-core/src/poly/unipoly.rs | 18 ++++---- 3 files changed, 46 insertions(+), 60 deletions(-) diff --git a/jolt-core/src/poly/commitment/kzg.rs b/jolt-core/src/poly/commitment/kzg.rs index 380922e62..8e7baf87b 100644 --- a/jolt-core/src/poly/commitment/kzg.rs +++ b/jolt-core/src/poly/commitment/kzg.rs @@ -2,7 +2,7 @@ use crate::msm::VariableBaseMSM; use crate::poly::{field::JoltField, unipoly::UniPoly}; use crate::utils::errors::ProofVerifyError; use ark_ec::scalar_mul::fixed_base::FixedBase; -use ark_ec::{pairing::Pairing, CurveGroup}; +use ark_ec::{AffineRepr, pairing::Pairing, CurveGroup}; use ark_ff::PrimeField; use ark_std::UniformRand; use rand_core::{CryptoRng, RngCore}; @@ -110,11 +110,11 @@ pub struct KZGVerifierKey { } #[derive(Debug, Clone, Eq, PartialEq, Default)] -pub struct UVKZGPCS { +pub struct UnivariateKZG { _phantom: PhantomData

, } -impl UVKZGPCS

+impl UnivariateKZG

where

::ScalarField: JoltField, { @@ -130,11 +130,10 @@ where )); } - let scalars = poly.as_vec(); let bases = pk.g1_powers(); let c = ::msm( - &bases[offset..scalars.len()], - &poly.as_vec()[offset..], + &bases[offset..poly.coeffs.len()], + &poly.coeffs[offset..], ) .unwrap(); @@ -152,8 +151,8 @@ where )); } let c = ::msm( - &pk.g1_powers()[..poly.as_vec().len()], - &poly.as_vec().as_slice(), + &pk.g1_powers()[..poly.coeffs.len()], + &poly.coeffs.as_slice(), ) .unwrap(); Ok(c.into_affine()) @@ -168,27 +167,17 @@ where

::ScalarField: JoltField, { let divisor = UniPoly::from_coeff(vec![-*point, P::ScalarField::one()]); - let (witness_poly, _) = poly.divide_with_q_and_r(&divisor).unwrap(); + let (witness_poly, _) = poly.divide_with_remainder(&divisor).unwrap(); let proof = ::msm( - &pk.g1_powers()[..witness_poly.as_vec().len()], - &witness_poly.as_vec().as_slice(), + &pk.g1_powers()[..witness_poly.coeffs.len()], + &witness_poly.coeffs.as_slice(), ) .unwrap(); let evaluation = poly.evaluate(point); Ok((proof.into_affine(), evaluation)) } -} - -#[cfg(test)] -mod test { - use super::*; - use ark_bn254::{Bn254, Fr}; - use ark_ec::AffineRepr; - use ark_std::{rand::Rng, UniformRand}; - use rand_chacha::ChaCha20Rng; - use rand_core::SeedableRng; - fn kzg_verify( + pub fn verify( vk: &KZGVerifierKey

, commitment: &P::G1Affine, point: &P::ScalarField, @@ -203,6 +192,16 @@ mod test { Ok(lhs == rhs) } +} + +#[cfg(test)] +mod test { + use super::*; + use ark_bn254::{Bn254, Fr}; + use ark_std::{rand::Rng, UniformRand}; + use rand_chacha::ChaCha20Rng; + use rand_core::SeedableRng; + #[test] fn kzg_commit_prove_verify() -> Result<(), ProofVerifyError> { let seed = b"11111111111111111111111111111111"; @@ -213,11 +212,11 @@ mod test { let pp = Arc::new(SRS::::setup(&mut rng, degree)); let (ck, vk) = SRS::trim(pp, degree); let p = UniPoly::random::(degree, rng); - let comm = UVKZGPCS::::commit(&ck, &p)?; + let comm = UnivariateKZG::::commit(&ck, &p)?; let point = Fr::rand(rng); - let (proof, value) = UVKZGPCS::::open(&ck, &p, &point)?; + let (proof, value) = UnivariateKZG::::open(&ck, &p, &point)?; assert!( - kzg_verify(&vk, &comm, &point, &proof, &value)?, + UnivariateKZG::verify(&vk, &comm, &point, &proof, &value)?, "proof was incorrect for max_degree = {}, polynomial_degree = {}", degree, p.degree(), diff --git a/jolt-core/src/poly/commitment/zeromorph.rs b/jolt-core/src/poly/commitment/zeromorph.rs index e2f321936..a153571a6 100644 --- a/jolt-core/src/poly/commitment/zeromorph.rs +++ b/jolt-core/src/poly/commitment/zeromorph.rs @@ -17,14 +17,11 @@ use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use rand_core::{CryptoRng, RngCore}; use std::sync::Arc; -#[cfg(feature = "ark-msm")] -use ark_ec::VariableBaseMSM; - use rayon::prelude::*; use super::{ commitment_scheme::{BatchType, CommitShape, CommitmentScheme}, - kzg::{KZGProverKey, KZGVerifierKey, SRS, UVKZGPCS}, + kzg::{KZGProverKey, KZGVerifierKey, SRS, UnivariateKZG}, }; pub struct ZeromorphSRS(Arc>); @@ -96,25 +93,14 @@ where let (remainder_lo, remainder_hi) = remainder.split_at_mut(1 << (num_var - 1 - i)); let mut quotient = vec![P::ScalarField::zero(); remainder_lo.len()]; - #[cfg(feature = "multicore")] - let quotient_iter = quotient.par_iter_mut(); - - #[cfg(not(feature = "multicore"))] - let quotient_iter = quotient.iter_mut(); - - quotient_iter + quotient.par_iter_mut() .zip(&*remainder_lo) .zip(&*remainder_hi) .for_each(|((q, r_lo), r_hi)| { *q = *r_hi - *r_lo; }); - #[cfg(feature = "multicore")] - let remainder_lo_iter = remainder_lo.par_iter_mut(); - - #[cfg(not(feature = "multicore"))] - let remainder_lo_iter = remainder_lo.iter_mut(); - remainder_lo_iter + remainder_lo.par_iter_mut() .zip(remainder_hi) .for_each(|(r_lo, r_hi)| { *r_lo += (*r_hi - r_lo as &_) * x_i; @@ -152,7 +138,7 @@ where #[cfg(not(feature = "multicore"))] let q_hat_iter = q_hat[(1 << num_vars) - (1 << idx)..].iter_mut(); - q_hat_iter.zip(&q.as_vec()).for_each(|(q_hat, q)| { + q_hat_iter.zip(&q.coeffs).for_each(|(q_hat, q)| { *q_hat += scalar * q; }); scalar *= y_challenge; @@ -179,7 +165,6 @@ where .take(num_vars + 1) .collect(); - // offsets of x = let offsets_of_x = { let mut offsets_of_x = squares_of_x .iter() @@ -252,7 +237,7 @@ where )); } Ok(ZeromorphCommitment( - UVKZGPCS::commit(&pp.commit_pp, &UniPoly::from_coeff(poly.Z.clone())).unwrap(), + UnivariateKZG::commit(&pp.commit_pp, &UniPoly::from_coeff(poly.Z.clone())).unwrap(), )) } @@ -284,7 +269,7 @@ where // TODO: multicore gate let q_k_com: Vec = quotients .par_iter() - .map(|q| UVKZGPCS::commit(&pp.commit_pp, q).unwrap()) + .map(|q| UnivariateKZG::commit(&pp.commit_pp, q).unwrap()) .collect(); let q_comms: Vec = q_k_com.par_iter().map(|c| c.into_group()).collect(); q_comms @@ -299,7 +284,7 @@ where let (q_hat, offset) = compute_batched_lifted_degree_quotient::

("ients, &y_challenge); // Compute and absorb the commitment C_q = [\hat{q}] - let q_hat_com = UVKZGPCS::commit_offset(&pp.commit_pp, &q_hat, offset)?; + let q_hat_com = UnivariateKZG::commit_offset(&pp.commit_pp, &q_hat, offset)?; transcript.append_point(b"q_hat", &q_hat_com.into_group()); // Get x and z challenges @@ -327,7 +312,7 @@ where debug_assert_eq!(f.evaluate(&x_challenge), P::ScalarField::zero()); // Compute and send proof commitment pi - let (pi, _) = UVKZGPCS::open(&pp.open_pp, &f, &x_challenge)?; + let (pi, _) = UnivariateKZG::open(&pp.open_pp, &f, &x_challenge)?; Ok(ZeromorphProof { pi, @@ -493,7 +478,7 @@ where poly.Z.len() ); ZeromorphCommitment( - UVKZGPCS::commit(&setup.0.commit_pp, &UniPoly::from_coeff(poly.Z.clone())).unwrap(), + UnivariateKZG::commit(&setup.0.commit_pp, &UniPoly::from_coeff(poly.Z.clone())).unwrap(), ) } @@ -515,7 +500,7 @@ where evals.len() ); ZeromorphCommitment( - UVKZGPCS::commit(&gens.0.commit_pp, &UniPoly::from_coeff(evals.to_vec())).unwrap(), + UnivariateKZG::commit(&gens.0.commit_pp, &UniPoly::from_coeff(evals.to_vec())).unwrap(), ) }) .collect::>() @@ -523,7 +508,7 @@ where fn commit_slice(evals: &[Self::Field], setup: &Self::Setup) -> Self::Commitment { ZeromorphCommitment( - UVKZGPCS::commit(&setup.0.commit_pp, &UniPoly::from_coeff(evals.to_vec())).unwrap(), + UnivariateKZG::commit(&setup.0.commit_pp, &UniPoly::from_coeff(evals.to_vec())).unwrap(), ) } @@ -653,7 +638,7 @@ mod test { for (k, q_k_uni) in quotients.iter().enumerate() { let z_partial = &z_challenge[&z_challenge.len() - k..]; //This is a weird consequence of how things are done.. the univariate polys are of the multilinear commitment in lagrange basis. Therefore we evaluate as multilinear - let q_k = DensePolynomial::new(q_k_uni.as_vec()); + let q_k = DensePolynomial::new(q_k_uni.coeffs.clone()); let q_k_eval = q_k.evaluate(z_partial); res -= (z_challenge[z_challenge.len() - k - 1] diff --git a/jolt-core/src/poly/unipoly.rs b/jolt-core/src/poly/unipoly.rs index d1cc61cdf..c593e52e3 100644 --- a/jolt-core/src/poly/unipoly.rs +++ b/jolt-core/src/poly/unipoly.rs @@ -13,7 +13,7 @@ use rayon::iter::{IntoParallelIterator, IntoParallelRefMutIterator, ParallelIter // ax^3 + bx^2 + cx + d stored as vec![d,c,b,a] #[derive(Debug, Clone, PartialEq)] pub struct UniPoly { - coeffs: Vec, + pub coeffs: Vec, } // ax^2 + bx + c stored as vec![c,a] @@ -57,7 +57,7 @@ impl UniPoly { /// Divide self by another polynomial, and returns the /// quotient and remainder. - pub fn divide_with_q_and_r(&self, divisor: &Self) -> Option<(Self, Self)> { + pub fn divide_with_remainder(&self, divisor: &Self) -> Option<(Self, Self)> { if self.is_zero() { Some((Self::zero(), Self::zero())) } else if divisor.is_zero() { @@ -139,17 +139,19 @@ impl UniPoly { .collect(), ) } + + pub fn shift_coefficients(&mut self, rhs: &F) { + self.coeffs.par_iter_mut().for_each(|c| *c += rhs); + } } +/* impl AddAssign<&F> for UniPoly { fn add_assign(&mut self, rhs: &F) { - #[cfg(feature = "multicore")] - let iter = self.coeffs.par_iter_mut(); - #[cfg(not(feature = "multicore"))] - let iter = self.coeffs.iter_mut(); - iter.for_each(|c| *c += rhs); + self.coeffs.par_iter_mut().for_each(|c| *c += rhs); } } +*/ impl AddAssign<&Self> for UniPoly { fn add_assign(&mut self, rhs: &Self) { @@ -330,7 +332,7 @@ mod tests { let divisor = UniPoly::::random(b_degree, rng); if let Some((quotient, remainder)) = - UniPoly::divide_with_q_and_r(÷nd, &divisor) + UniPoly::divide_with_remainder(÷nd, &divisor) { let mut prod = naive_mul(&divisor, "ient); prod += &remainder; From f4ae5b319efa0981de4599796ea0c9bf486af18d Mon Sep 17 00:00:00 2001 From: PatStiles Date: Fri, 31 May 2024 13:52:39 -0600 Subject: [PATCH 12/17] use izip --- jolt-core/src/poly/commitment/zeromorph.rs | 26 ++++++++-------------- 1 file changed, 9 insertions(+), 17 deletions(-) diff --git a/jolt-core/src/poly/commitment/zeromorph.rs b/jolt-core/src/poly/commitment/zeromorph.rs index a153571a6..53cc51595 100644 --- a/jolt-core/src/poly/commitment/zeromorph.rs +++ b/jolt-core/src/poly/commitment/zeromorph.rs @@ -13,6 +13,7 @@ use ark_ec::{pairing::Pairing, AffineRepr, CurveGroup}; use ark_ff::{batch_inversion, Field}; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use ark_std::{One, Zero}; +use itertools::izip; use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; use rand_core::{CryptoRng, RngCore}; use std::sync::Arc; @@ -192,23 +193,14 @@ where .collect::>() }; - let q_scalars = iter::successors(Some(P::ScalarField::one()), |acc| Some(*acc * y_challenge)) - .take(num_vars) - .zip(offsets_of_x) - .zip(squares_of_x) - .zip(&vs) - .zip(&vs[1..]) - .zip(challenges.iter().rev()) - .map( - |(((((power_of_y, offset_of_x), square_of_x), v_i), v_j), u_i)| { - ( - -(power_of_y * offset_of_x), - -(z_challenge * (square_of_x * v_j - *u_i * v_i)), - ) - }, - ) - .unzip(); - + let q_scalars = izip!(iter::successors(Some(P::ScalarField::one()), |acc| Some(*acc * y_challenge)).take(num_vars), offsets_of_x, squares_of_x, &vs, &vs[1..], challenges.iter().rev()).map( + |(power_of_y, offset_of_x, square_of_x, v_i, v_j, u_i)| { + ( + -(power_of_y * offset_of_x), + -(z_challenge * (square_of_x * v_j - *u_i * v_i)), + ) + } + ).unzip(); // -vs[0] * z = -z * (x^(2^num_vars) - 1) / (x - 1) = -z ฮฆ_n(x) (-vs[0] * z_challenge, q_scalars) } From 8ece39b6d6e94bb1c45b95b2a1348d5108fbdf33 Mon Sep 17 00:00:00 2001 From: PatStiles Date: Fri, 31 May 2024 14:26:34 -0600 Subject: [PATCH 13/17] fix incorrect jolt vm interface --- jolt-core/src/jolt/vm/rv32i_vm.rs | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/jolt-core/src/jolt/vm/rv32i_vm.rs b/jolt-core/src/jolt/vm/rv32i_vm.rs index db19be260..edc045615 100644 --- a/jolt-core/src/jolt/vm/rv32i_vm.rs +++ b/jolt-core/src/jolt/vm/rv32i_vm.rs @@ -231,16 +231,14 @@ mod tests { let mut program = host::Program::new("fibonacci-guest"); program.set_input(&9u32); let (bytecode, memory_init) = program.decode(); - let (io_device, bytecode_trace, instruction_trace, memory_trace, circuit_flags) = + let (io_device, trace, circuit_flags) = program.trace(); let preprocessing = RV32IJoltVM::preprocess(bytecode.clone(), memory_init, 1 << 20, 1 << 20, 1 << 20); let (proof, commitments) = , C, M>>::prove( io_device, - bytecode_trace, - memory_trace, - instruction_trace, + trace, circuit_flags, preprocessing.clone(), ); @@ -286,7 +284,7 @@ mod tests { let mut program = host::Program::new("sha3-guest"); program.set_input(&[5u8; 32]); let (bytecode, memory_init) = program.decode(); - let (io_device, bytecode_trace, instruction_trace, memory_trace, circuit_flags) = + let (io_device, trace, circuit_flags) = program.trace(); let preprocessing = @@ -294,9 +292,7 @@ mod tests { let (jolt_proof, jolt_commitments) = , C, M>>::prove( io_device, - bytecode_trace, - memory_trace, - instruction_trace, + trace, circuit_flags, preprocessing.clone(), ); From 34a380af2c7821e8dd0d57db5997d058fbcdd769 Mon Sep 17 00:00:00 2001 From: PatStiles Date: Fri, 31 May 2024 14:59:17 -0600 Subject: [PATCH 14/17] fmt --- jolt-core/src/benches/bench.rs | 26 +++++++------- jolt-core/src/jolt/vm/rv32i_vm.rs | 6 ++-- jolt-core/src/poly/commitment/kzg.rs | 3 +- jolt-core/src/poly/commitment/zeromorph.rs | 41 ++++++++++++++-------- 4 files changed, 42 insertions(+), 34 deletions(-) diff --git a/jolt-core/src/benches/bench.rs b/jolt-core/src/benches/bench.rs index 2b2a8bc33..4ecbf95b8 100644 --- a/jolt-core/src/benches/bench.rs +++ b/jolt-core/src/benches/bench.rs @@ -103,13 +103,12 @@ where let preprocessing: crate::jolt::vm::JoltPreprocessing = RV32IJoltVM::preprocess(bytecode.clone(), memory_init, 1 << 20, 1 << 20, 1 << 22); - let (jolt_proof, jolt_commitments) = - >::prove( - io_device, - trace, - circuit_flags, - preprocessing.clone(), - ); + let (jolt_proof, jolt_commitments) = >::prove( + io_device, + trace, + circuit_flags, + preprocessing.clone(), + ); // println!("Proof sizing:"); // serialize_and_print_size("jolt_commitments", &jolt_commitments); @@ -152,13 +151,12 @@ where let preprocessing: crate::jolt::vm::JoltPreprocessing = RV32IJoltVM::preprocess(bytecode.clone(), memory_init, 1 << 20, 1 << 20, 1 << 22); - let (jolt_proof, jolt_commitments) = - >::prove( - io_device, - trace, - circuit_flags, - preprocessing.clone(), - ); + let (jolt_proof, jolt_commitments) = >::prove( + io_device, + trace, + circuit_flags, + preprocessing.clone(), + ); let verification_result = RV32IJoltVM::verify(preprocessing, jolt_proof, jolt_commitments); assert!( verification_result.is_ok(), diff --git a/jolt-core/src/jolt/vm/rv32i_vm.rs b/jolt-core/src/jolt/vm/rv32i_vm.rs index edc045615..35f99d552 100644 --- a/jolt-core/src/jolt/vm/rv32i_vm.rs +++ b/jolt-core/src/jolt/vm/rv32i_vm.rs @@ -231,8 +231,7 @@ mod tests { let mut program = host::Program::new("fibonacci-guest"); program.set_input(&9u32); let (bytecode, memory_init) = program.decode(); - let (io_device, trace, circuit_flags) = - program.trace(); + let (io_device, trace, circuit_flags) = program.trace(); let preprocessing = RV32IJoltVM::preprocess(bytecode.clone(), memory_init, 1 << 20, 1 << 20, 1 << 20); @@ -284,8 +283,7 @@ mod tests { let mut program = host::Program::new("sha3-guest"); program.set_input(&[5u8; 32]); let (bytecode, memory_init) = program.decode(); - let (io_device, trace, circuit_flags) = - program.trace(); + let (io_device, trace, circuit_flags) = program.trace(); let preprocessing = RV32IJoltVM::preprocess(bytecode.clone(), memory_init, 1 << 20, 1 << 20, 1 << 20); diff --git a/jolt-core/src/poly/commitment/kzg.rs b/jolt-core/src/poly/commitment/kzg.rs index 8e7baf87b..ec4f13ef6 100644 --- a/jolt-core/src/poly/commitment/kzg.rs +++ b/jolt-core/src/poly/commitment/kzg.rs @@ -2,7 +2,7 @@ use crate::msm::VariableBaseMSM; use crate::poly::{field::JoltField, unipoly::UniPoly}; use crate::utils::errors::ProofVerifyError; use ark_ec::scalar_mul::fixed_base::FixedBase; -use ark_ec::{AffineRepr, pairing::Pairing, CurveGroup}; +use ark_ec::{pairing::Pairing, AffineRepr, CurveGroup}; use ark_ff::PrimeField; use ark_std::UniformRand; use rand_core::{CryptoRng, RngCore}; @@ -191,7 +191,6 @@ where let rhs = P::pairing(proof, vk.beta_g2.into_group() - (vk.g2 * point)); Ok(lhs == rhs) } - } #[cfg(test)] diff --git a/jolt-core/src/poly/commitment/zeromorph.rs b/jolt-core/src/poly/commitment/zeromorph.rs index 53cc51595..37abafad2 100644 --- a/jolt-core/src/poly/commitment/zeromorph.rs +++ b/jolt-core/src/poly/commitment/zeromorph.rs @@ -22,7 +22,7 @@ use rayon::prelude::*; use super::{ commitment_scheme::{BatchType, CommitShape, CommitmentScheme}, - kzg::{KZGProverKey, KZGVerifierKey, SRS, UnivariateKZG}, + kzg::{KZGProverKey, KZGVerifierKey, UnivariateKZG, SRS}, }; pub struct ZeromorphSRS(Arc>); @@ -94,14 +94,16 @@ where let (remainder_lo, remainder_hi) = remainder.split_at_mut(1 << (num_var - 1 - i)); let mut quotient = vec![P::ScalarField::zero(); remainder_lo.len()]; - quotient.par_iter_mut() + quotient + .par_iter_mut() .zip(&*remainder_lo) .zip(&*remainder_hi) .for_each(|((q, r_lo), r_hi)| { *q = *r_hi - *r_lo; }); - remainder_lo.par_iter_mut() + remainder_lo + .par_iter_mut() .zip(remainder_hi) .for_each(|(r_lo, r_hi)| { *r_lo += (*r_hi - r_lo as &_) * x_i; @@ -193,14 +195,22 @@ where .collect::>() }; - let q_scalars = izip!(iter::successors(Some(P::ScalarField::one()), |acc| Some(*acc * y_challenge)).take(num_vars), offsets_of_x, squares_of_x, &vs, &vs[1..], challenges.iter().rev()).map( - |(power_of_y, offset_of_x, square_of_x, v_i, v_j, u_i)| { - ( - -(power_of_y * offset_of_x), - -(z_challenge * (square_of_x * v_j - *u_i * v_i)), - ) - } - ).unzip(); + let q_scalars = izip!( + iter::successors(Some(P::ScalarField::one()), |acc| Some(*acc * y_challenge)) + .take(num_vars), + offsets_of_x, + squares_of_x, + &vs, + &vs[1..], + challenges.iter().rev() + ) + .map(|(power_of_y, offset_of_x, square_of_x, v_i, v_j, u_i)| { + ( + -(power_of_y * offset_of_x), + -(z_challenge * (square_of_x * v_j - *u_i * v_i)), + ) + }) + .unzip(); // -vs[0] * z = -z * (x^(2^num_vars) - 1) / (x - 1) = -z ฮฆ_n(x) (-vs[0] * z_challenge, q_scalars) } @@ -470,7 +480,8 @@ where poly.Z.len() ); ZeromorphCommitment( - UnivariateKZG::commit(&setup.0.commit_pp, &UniPoly::from_coeff(poly.Z.clone())).unwrap(), + UnivariateKZG::commit(&setup.0.commit_pp, &UniPoly::from_coeff(poly.Z.clone())) + .unwrap(), ) } @@ -492,7 +503,8 @@ where evals.len() ); ZeromorphCommitment( - UnivariateKZG::commit(&gens.0.commit_pp, &UniPoly::from_coeff(evals.to_vec())).unwrap(), + UnivariateKZG::commit(&gens.0.commit_pp, &UniPoly::from_coeff(evals.to_vec())) + .unwrap(), ) }) .collect::>() @@ -500,7 +512,8 @@ where fn commit_slice(evals: &[Self::Field], setup: &Self::Setup) -> Self::Commitment { ZeromorphCommitment( - UnivariateKZG::commit(&setup.0.commit_pp, &UniPoly::from_coeff(evals.to_vec())).unwrap(), + UnivariateKZG::commit(&setup.0.commit_pp, &UniPoly::from_coeff(evals.to_vec())) + .unwrap(), ) } From 8d6dccd69d91e9a97723b2da5dddaaabf1e4fc23 Mon Sep 17 00:00:00 2001 From: PatStiles Date: Sat, 1 Jun 2024 10:11:29 -0600 Subject: [PATCH 15/17] nits --- jolt-core/src/poly/commitment/kzg.rs | 6 ++--- jolt-core/src/poly/commitment/zeromorph.rs | 28 +++++++++++----------- jolt-core/src/poly/field.rs | 2 +- 3 files changed, 18 insertions(+), 18 deletions(-) diff --git a/jolt-core/src/poly/commitment/kzg.rs b/jolt-core/src/poly/commitment/kzg.rs index ec4f13ef6..f1f46367c 100644 --- a/jolt-core/src/poly/commitment/kzg.rs +++ b/jolt-core/src/poly/commitment/kzg.rs @@ -58,7 +58,7 @@ impl SRS

{ } pub fn trim(params: Arc, max_degree: usize) -> (KZGProverKey

, KZGVerifierKey

) { - assert!(params.g1_powers.len() > 0, "max_degree is 0"); + assert!(params.g1_powers.is_empty(), "max_degree is 0"); assert!( max_degree < params.g1_powers.len(), "SRS length is less than size" @@ -152,7 +152,7 @@ where } let c = ::msm( &pk.g1_powers()[..poly.coeffs.len()], - &poly.coeffs.as_slice(), + poly.coeffs.as_slice(), ) .unwrap(); Ok(c.into_affine()) @@ -170,7 +170,7 @@ where let (witness_poly, _) = poly.divide_with_remainder(&divisor).unwrap(); let proof = ::msm( &pk.g1_powers()[..witness_poly.coeffs.len()], - &witness_poly.coeffs.as_slice(), + witness_poly.coeffs.as_slice(), ) .unwrap(); let evaluation = poly.evaluate(point); diff --git a/jolt-core/src/poly/commitment/zeromorph.rs b/jolt-core/src/poly/commitment/zeromorph.rs index 37abafad2..f2fd50c98 100644 --- a/jolt-core/src/poly/commitment/zeromorph.rs +++ b/jolt-core/src/poly/commitment/zeromorph.rs @@ -351,7 +351,7 @@ where }, ); let poly = DensePolynomial::new(f_batched.Z.clone()); - Zeromorph::

::open(&pk, &poly, &point, &batched_evaluation, transcript).unwrap() + Zeromorph::

::open(pk, &poly, point, &batched_evaluation, transcript).unwrap() } fn batch_verify( @@ -377,11 +377,11 @@ where }, ); Zeromorph::

::verify( - &vk, + vk, &ZeromorphCommitment(batched_commitment.into_affine()), - &point, + point, &batched_eval, - &batch_proof, + batch_proof, transcript, ) } @@ -438,8 +438,8 @@ where // e(pi, [tau]_2 - x * [1]_2) == e(C_{\zeta,Z}, -[X^(N_max - 2^n - 1)]_2) <==> e(C_{\zeta,Z} - x * pi, [X^{N_max - 2^n - 1}]_2) * e(-pi, [tau_2]) == 1 let pairing = P::multi_pairing( - &[zeta_z_com, proof.pi], - &[ + [zeta_z_com, proof.pi], + [ (-vk.tau_N_max_sub_2_N.into_group()).into_affine(), (vk.kzg_vk.beta_g2.into_group() - (vk.kzg_vk.g2 * x_challenge)).into(), ], @@ -523,8 +523,8 @@ where opening_point: &[Self::Field], // point at which the polynomial is evaluated transcript: &mut ProofTranscript, ) -> Self::Proof { - let eval = poly.evaluate(&opening_point); - Zeromorph::

::open(&setup.0, &poly, &opening_point, &eval, transcript).unwrap() + let eval = poly.evaluate(opening_point); + Zeromorph::

::open(&setup.0, poly, opening_point, &eval, transcript).unwrap() } fn batch_prove( @@ -535,7 +535,7 @@ where _batch_type: BatchType, transcript: &mut ProofTranscript, ) -> Self::BatchedProof { - Zeromorph::

::batch_open(&setup.0, polynomials, &opening_point, &openings, transcript) + Zeromorph::

::batch_open(&setup.0, polynomials, opening_point, openings, transcript) } fn verify( @@ -548,10 +548,10 @@ where ) -> Result<(), ProofVerifyError> { Zeromorph::

::verify( &setup.1, - &commitment, - &opening_point, - &opening, - &proof, + commitment, + opening_point, + opening, + proof, transcript, ) } @@ -569,7 +569,7 @@ where commitments, opening_point, openings, - &batch_proof, + batch_proof, transcript, ) } diff --git a/jolt-core/src/poly/field.rs b/jolt-core/src/poly/field.rs index 94dba6338..ade46e8d7 100644 --- a/jolt-core/src/poly/field.rs +++ b/jolt-core/src/poly/field.rs @@ -121,7 +121,7 @@ impl JoltField for ark_bn254::Fr { } fn inverse(&self) -> Option { - ::inverse(&self) + ::inverse(self) } fn from_bytes(bytes: &[u8]) -> Self { From 5b672587e63fbacce2424badc9558b92c5894d0b Mon Sep 17 00:00:00 2001 From: PatStiles Date: Sat, 1 Jun 2024 10:13:47 -0600 Subject: [PATCH 16/17] nit --- jolt-core/src/jolt/vm/read_write_memory.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/jolt-core/src/jolt/vm/read_write_memory.rs b/jolt-core/src/jolt/vm/read_write_memory.rs index c39cd0044..14869776f 100644 --- a/jolt-core/src/jolt/vm/read_write_memory.rs +++ b/jolt-core/src/jolt/vm/read_write_memory.rs @@ -1571,7 +1571,7 @@ where ); let sumcheck_opening_proof = - C::prove(&generators, &polynomials.v_final, &r_sumcheck, transcript); + C::prove(generators, &polynomials.v_final, &r_sumcheck, transcript); Self { num_rounds, From 802df0a327767ecec7fbbc1dc82d0bd2bb96af37 Mon Sep 17 00:00:00 2001 From: PatStiles Date: Sun, 2 Jun 2024 20:36:08 -0600 Subject: [PATCH 17/17] ci --- jolt-core/src/poly/commitment/kzg.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/jolt-core/src/poly/commitment/kzg.rs b/jolt-core/src/poly/commitment/kzg.rs index f1f46367c..cd0062650 100644 --- a/jolt-core/src/poly/commitment/kzg.rs +++ b/jolt-core/src/poly/commitment/kzg.rs @@ -58,7 +58,7 @@ impl SRS

{ } pub fn trim(params: Arc, max_degree: usize) -> (KZGProverKey

, KZGVerifierKey

) { - assert!(params.g1_powers.is_empty(), "max_degree is 0"); + assert!(!params.g1_powers.is_empty(), "max_degree is 0"); assert!( max_degree < params.g1_powers.len(), "SRS length is less than size"